Skip to content

Commit

Permalink
Merge pull request #114 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v1.4.1
  • Loading branch information
AndrewPlayer3 authored Jan 15, 2025
2 parents e3a1ed7 + e9d0d39 commit 70777c3
Show file tree
Hide file tree
Showing 38 changed files with 246 additions and 187 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/changelog-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ on:

jobs:
call-changelog-check-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.13.2
uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.14.0
2 changes: 1 addition & 1 deletion .github/workflows/create-jira-issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:

jobs:
call-create-jira-issue-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.13.2
uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.14.0
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/labeled-pr-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ on:

jobs:
call-labeled-pr-check-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.13.2
uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.14.0
2 changes: 1 addition & 1 deletion .github/workflows/release-checklist-comment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:

jobs:
call-release-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.13.2
uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.14.0
permissions:
pull-requests: write
secrets:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ on:
jobs:
call-release-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.13.2
uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.14.0
with:
release_prefix: burst2safe
secrets:
Expand Down
17 changes: 6 additions & 11 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,12 @@ on: [pull_request]
jobs:
call-secrets-analysis-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]

check-with-black:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "--check --diff --color"
src: "."
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-mypy-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]
2 changes: 1 addition & 1 deletion .github/workflows/tag-version.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ jobs:
call-bump-version-workflow:
# For first-time setup, create a v0.0.0 tag as shown here:
# https://github.com/ASFHyP3/actions#reusable-bump-versionyml
uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.13.2
uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.14.0
secrets:
USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }}
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.4.1]

### Added
* The [`static-analysis`](.github/workflows/static-analysis.yml) workflow now includes `mypy` for type checking.

### Removed
* Removed `black` from the [`static-analysis`](.github/workflows/static-analysis.yml) workflow.

### Fixed
* Add missing [`src/burst2safe/__init__.py`](./src/burst2safe/__init__.py) file.

## [1.4.0]

### Added
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ dependencies:
- pytest-cov
- pytest-dependency
- ruff
- mypy
14 changes: 9 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ classifiers=[
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
Expand Down Expand Up @@ -64,11 +65,6 @@ addopts = '-ra -q -m "not integration and not golden"'
markers = ["integration", "golden"]
testpaths = ["tests"]

[tool.black]
line-length = 120
skip-string-normalization = true
include = '\.pyx?$'

[tool.ruff]
line-length = 120
# The directories to consider when resolving first- vs. third-party imports.
Expand All @@ -95,3 +91,11 @@ convention = "google"
[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2

[tool.mypy]
python_version = "3.9"
warn_redundant_casts = true
warn_unused_ignores = true
warn_unreachable = true
strict_equality = true
check_untyped_defs = true
Empty file added src/burst2safe/__init__.py
Empty file.
7 changes: 4 additions & 3 deletions src/burst2safe/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
from pathlib import Path
from platform import system
from typing import Tuple
from typing import Tuple, Union


EARTHDATA_HOST = 'urs.earthdata.nasa.gov'
Expand All @@ -20,7 +20,7 @@ def get_netrc() -> Path:
return netrc_file


def find_creds_in_env(username_name, password_name) -> Tuple[str, str]:
def find_creds_in_env(username_name, password_name) -> Union[Tuple[str, str], Tuple[None, None]]:
"""Find credentials for a service in the environment.
Args:
Expand All @@ -38,7 +38,7 @@ def find_creds_in_env(username_name, password_name) -> Tuple[str, str]:
return None, None


def find_creds_in_netrc(service) -> Tuple[str, str]:
def find_creds_in_netrc(service) -> Union[Tuple[str, str], Tuple[None, None]]:
"""Find credentials for a service in the netrc file.
Args:
Expand All @@ -53,6 +53,7 @@ def find_creds_in_netrc(service) -> Tuple[str, str]:
if service in netrc_credentials.hosts:
username = netrc_credentials.hosts[service][0]
password = netrc_credentials.hosts[service][2]
assert password is not None
return username, password

return None, None
Expand Down
44 changes: 27 additions & 17 deletions src/burst2safe/base.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import hashlib
from collections.abc import Iterable
from copy import deepcopy
from datetime import datetime, timedelta
from pathlib import Path
from typing import List, Optional
from typing import List, Optional, Union, cast

import lxml.etree as ET

Expand All @@ -24,9 +23,9 @@ def __init__(
start_line: The starting line number of the first element.
slc_lengths: The total line lengths of the SLCs corresponding to each element.
"""
self.inputs = inputs
self.start_line = start_line
self.slc_lengths = slc_lengths
self.inputs: list[ET.Element] = inputs
self.start_line: Optional[int] = start_line
self.slc_lengths: Optional[list[int]] = slc_lengths

self.name = self.inputs[0].tag
elements = flatten([element.findall('*') for element in self.inputs])
Expand Down Expand Up @@ -75,6 +74,7 @@ def get_unique_elements(self) -> List[ET.Element]:
last_time = datetime.fromisoformat(list_of_element_lists[0][-1].find(self.time_field).text)
uniques = [deepcopy(element) for element in list_of_element_lists[0]]
if self.has_line:
assert self.slc_lengths is not None
previous_line_count = self.slc_lengths[0]

for i, element_list in enumerate(list_of_element_lists[1:]):
Expand All @@ -84,7 +84,9 @@ def get_unique_elements(self) -> List[ET.Element]:

if self.has_line:
new_lines = [int(elem.find('line').text) + previous_line_count for elem in to_keep]
[set_text(elem.find('line'), line) for elem, line in zip(to_keep, new_lines)]
for elem, line in zip(to_keep, new_lines):
set_text(elem.find('line'), line)
assert self.slc_lengths is not None
previous_line_count += self.slc_lengths[i]

last_time = max([times[index] for index in keep_index])
Expand Down Expand Up @@ -116,10 +118,11 @@ def update_line_numbers(self, elements: List[ET.Element]) -> None:
"""
for element in elements:
standard_line = int(element.find('line').text)
assert self.start_line is not None
element.find('line').text = str(standard_line - self.start_line)

def filter_by_time(
self, elements: List[ET.Element], anx_bounds: tuple[float, float], buffer: timedelta
self, elements: List[ET.Element], anx_bounds: tuple[datetime, datetime], buffer: timedelta
) -> List[ET.Element]:
"""Filter elements by time.
Expand All @@ -143,8 +146,8 @@ def filter_by_time(

def create_filtered_list(
self,
anx_bounds: Optional[tuple[float, float]],
buffer: Optional[timedelta] = timedelta(seconds=3),
anx_bounds: tuple[datetime, datetime],
buffer: timedelta = timedelta(seconds=3),
line_bounds: Optional[tuple[float, float]] = None,
) -> ET.Element:
"""Filter elements by time/line. Adjust line number if present.
Expand Down Expand Up @@ -210,7 +213,7 @@ def create_metadata_object(simple_name: str) -> ET.Element:


def create_data_object(
simple_name: str, relative_path: Path, rep_id: str, mime_type: str, size_bytes: int, md5: str
simple_name: str, relative_path: Union[Path, str], rep_id: str, mime_type: str, size_bytes: int, md5: str
) -> ET.Element:
"""Create a data object element for a manifest.safe file.
Expand Down Expand Up @@ -241,7 +244,7 @@ def create_data_object(


class Annotation:
def __init__(self, burst_infos: Iterable[BurstInfo], metadata_type: str, ipf_version: str, image_number: int):
def __init__(self, burst_infos: list[BurstInfo], metadata_type: str, ipf_version: str, image_number: int):
"""Initialize the Annotation object.
Args:
Expand All @@ -256,11 +259,12 @@ def __init__(self, burst_infos: Iterable[BurstInfo], metadata_type: str, ipf_ver
self.major_version, self.minor_version = [int(v) for v in ipf_version.split('.')]
self.metadata_paths = drop_duplicates([x.metadata_path for x in burst_infos])
self.swath, self.pol = burst_infos[0].swath, burst_infos[0].polarization
assert burst_infos[0].length is not None
self.start_line = burst_infos[0].burst_index * burst_infos[0].length
self.total_lines = len(burst_infos) * burst_infos[0].length
self.stop_line = self.start_line + self.total_lines
self.min_anx = min([x.start_utc for x in burst_infos])
self.max_anx = max([x.stop_utc for x in burst_infos])
self.min_anx = min(cast(datetime, x.start_utc) for x in burst_infos)
self.max_anx = max(cast(datetime, x.stop_utc) for x in burst_infos)

self.inputs = [
get_subxml_from_metadata(path, metadata_type, self.swath, self.pol) for path in self.metadata_paths
Expand All @@ -276,11 +280,11 @@ def __init__(self, burst_infos: Iterable[BurstInfo], metadata_type: str, ipf_ver

# annotation components to be extended by subclasses
self.ads_header = None
self.xml = None
self.xml: Optional[ET.Element] = None

# these attributes are updated when the annotation is written to a file
self.size_bytes = None
self.md5 = None
self.size_bytes: Optional[int] = None
self.md5: Optional[str] = None

def create_ads_header(self):
"""Create the ADS header for the annotation."""
Expand All @@ -301,7 +305,7 @@ def merge_lists(self, list_name: str, line_bounds: Optional[tuple[int, int]] = N
"""
list_elements = [input_xml.find(list_name) for input_xml in self.inputs]
list_of_list_elements = ListOfListElements(list_elements, self.start_line, self.slc_lengths)
merged_list = list_of_list_elements.create_filtered_list([self.min_anx, self.max_anx], line_bounds=line_bounds)
merged_list = list_of_list_elements.create_filtered_list((self.min_anx, self.max_anx), line_bounds=line_bounds)
return merged_list

def write(self, out_path: Path, update_info=True) -> None:
Expand All @@ -311,6 +315,7 @@ def write(self, out_path: Path, update_info=True) -> None:
out_path: The path to write the annotation to.
update_info: Whether to update the size and md5 attributes of the annotation.
"""
assert self.xml is not None
self.xml.write(out_path, pretty_print=True, xml_declaration=True, encoding='utf-8')

if update_info:
Expand Down Expand Up @@ -345,6 +350,11 @@ def create_manifest_components(self):

content_unit = create_content_unit(simple_name, unit_type, rep_id)
metadata_object = create_metadata_object(simple_name)
assert self.size_bytes is not None
assert self.md5 is not None
data_object = create_data_object(simple_name, rel_path, rep_id, mime_type, self.size_bytes, self.md5)

return content_unit, metadata_object, data_object

def assemble(self):
raise NotImplementedError()
4 changes: 2 additions & 2 deletions src/burst2safe/burst2safe.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ def burst2safe(
granules: Optional[Iterable[str]] = None,
orbit: Optional[int] = None,
extent: Optional[Polygon] = None,
polarizations: Optional[Iterable[str]] = None,
swaths: Optional[Iterable[str]] = None,
polarizations: Optional[list[str]] = None,
swaths: Optional[list[str]] = None,
mode: str = 'IW',
min_bursts: int = 1,
all_anns: bool = False,
Expand Down
6 changes: 3 additions & 3 deletions src/burst2safe/burst2stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from argparse import ArgumentParser
from datetime import datetime
from pathlib import Path
from typing import Iterable, List, Optional
from typing import List, Optional

from shapely.geometry import Polygon

Expand All @@ -25,8 +25,8 @@ def burst2stack(
start_date: Optional[datetime] = None,
end_date: Optional[datetime] = None,
extent: Optional[Polygon] = None,
polarizations: Optional[Iterable[str]] = None,
swaths: Optional[Iterable[str]] = None,
polarizations: Optional[list[str]] = None,
swaths: Optional[list[str]] = None,
mode: str = 'IW',
min_bursts: int = 1,
all_anns: bool = False,
Expand Down
3 changes: 1 addition & 2 deletions src/burst2safe/calibration.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from collections.abc import Iterable
from copy import deepcopy

import lxml.etree as ET
Expand All @@ -10,7 +9,7 @@
class Calibration(Annotation):
"""Class representing a calibration XML."""

def __init__(self, burst_infos: Iterable[BurstInfo], ipf_version: str, image_number: int):
def __init__(self, burst_infos: list[BurstInfo], ipf_version: str, image_number: int):
"""Create a calibration object.
Args:
Expand Down
6 changes: 4 additions & 2 deletions src/burst2safe/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def get_url_dict(burst_infos: Iterable[BurstInfo], force: bool = False) -> dict:
"""
url_dict = {}
for burst_info in burst_infos:
assert burst_info.data_path is not None
if force or not burst_info.data_path.exists():
url_dict[burst_info.data_path] = burst_info.data_url
if force or not burst_info.metadata_path.exists():
Expand Down Expand Up @@ -60,11 +61,12 @@ async def download_burst_url_async(session: aiohttp.ClientSession, url: str, fil
file_path: The path to save the downloaded data to
"""
response = await get_async(session, url)

assert response.content_disposition is not None
if file_path.suffix in ['.tif', '.tiff']:
returned_filename = response.content_disposition.filename
elif file_path.suffix == '.xml':
url_parts = str(response.url).split('/')
assert response.content_disposition.filename is not None
ext = response.content_disposition.filename.split('.')[-1]
returned_filename = f'{url_parts[3]}_{url_parts[5]}.{ext}'
else:
Expand Down Expand Up @@ -116,4 +118,4 @@ def download_bursts(burst_infos: Iterable[BurstInfo]) -> None:
full_dict = get_url_dict(burst_infos, force=True)
missing_data = [x for x in full_dict.keys() if not x.exists]
if missing_data:
raise ValueError(f'Error downloading, missing files: {", ".join(missing_data.name)}')
raise ValueError(f'Error downloading, missing files: {", ".join([x.name for x in missing_data])}')
Loading

0 comments on commit 70777c3

Please sign in to comment.