Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add mypy for Type Checking #325

Merged
merged 11 commits into from
Jan 13, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@
on: push

jobs:
# Docs: https://github.com/ASFHyP3/actions
call-secrets-analysis-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/[email protected]

call-mypy-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected]

Check warning

Code scanning / CodeQL

Workflow does not contain permissions Medium

Actions Job or Workflow does not set permissions
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [3.1.4]

### Added
- The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now includes `mypy` for type checking.

## [3.1.3]

### Changed
Expand Down
2 changes: 2 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ dependencies:
# For packaging, and testing
- python-build
- ruff
- mypy
- types-lxml
- setuptools
- setuptools_scm
- pytest
Expand Down
11 changes: 11 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -98,3 +98,14 @@ convention = "google"
[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2

[tool.mypy]
python_version = "3.10"
warn_redundant_casts = true
warn_unused_ignores = true
warn_unreachable = true
strict_equality = true
check_untyped_defs = true
exclude = [
"/build/"
]
6 changes: 3 additions & 3 deletions src/hyp3lib/asf_geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def geotiff2boundary_mask(inGeotiff, tsEPSG, threshold, use_closing=True):
data[np.isnan(data) is True] = noDataValue
if threshold is not None:
print('Applying threshold ({0}) ...'.format(threshold))
data[data < np.float(threshold)] = noDataValue
data[data < np.float64(threshold)] = noDataValue
if np.isnan(noDataValue):
data[np.isnan(data) is False] = 1
else:
Expand Down Expand Up @@ -400,7 +400,7 @@ def raster2csv(fields, values, csvFile):
line.append(value[name])

with open(csvFile, 'wb') as outF:
writer = csv.writer(outF, delimiter=';')
writer = csv.writer(outF, delimiter=';') # type: ignore [arg-type]
writer.writerow(header)
writer.writerow(line)

Expand Down Expand Up @@ -816,7 +816,7 @@ def generate_tile_shape(shapeFile, minLat, maxLat, minLon, maxLon, step):
def list2shape(csvFile, shapeFile):
# Set up shapefile attributes
fields = []
field = {}
field: dict = {}
values = []
field['name'] = 'granule'
field['type'] = ogr.OFTString
Expand Down
9 changes: 4 additions & 5 deletions src/hyp3lib/asf_time_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def filter_change(image, kernelSize, iterations):
if image[ii, kk] == 1:
negativeChange[ii, kk] = 1
elif image[ii, kk] == 2:
noChange = 1
noChange = np.ones_like(noChange)
elif image[ii, kk] == 3:
positiveChange[ii, kk] = 1
image = None
Expand Down Expand Up @@ -222,7 +222,7 @@ def vector_meta(vectorFile):
features = []
featureCount = layer.GetFeatureCount()
for kk in range(featureCount):
value = {}
value: dict = {}
feature = layer.GetFeature(kk)
for ii in range(fieldCount):
if fields[ii]['type'] == ogr.OFTInteger:
Expand All @@ -241,7 +241,7 @@ def vector_meta(vectorFile):
def raster_metadata(input): # noqa: A002
# Set up shapefile attributes
fields = []
field = {}
field: dict = {}
values = []
field['name'] = 'granule'
field['type'] = ogr.OFTString
Expand Down Expand Up @@ -427,11 +427,10 @@ def time_series_slice(ncFile, x, y, typeXY):
index = datestamp.index(refDates[ii])
allValues.append(value[index])
refType.append('acquired')
allValues = np.asarray(allValues)

# Smoothing the time line with localized regression (LOESS)
lowess = sm.nonparametric.lowess
smooth = lowess(allValues, np.arange(len(allValues)), frac=0.08, it=0)[:, 1]
smooth = lowess(np.asarray(allValues), np.arange(len(allValues)), frac=0.08, it=0)[:, 1]

sd = seasonal_decompose(x=smooth, model='additive', freq=4)

Expand Down
2 changes: 2 additions & 0 deletions src/hyp3lib/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ def execute(
raise ExecuteError(tool + ': ' + last)

if expected is not None:
if isinstance(expected, Path):
expected = str(expected)
if uselogging:
logging.info('Checking for expected output: ' + expected)
else:
Expand Down
3 changes: 2 additions & 1 deletion src/hyp3lib/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def write_credentials_to_netrc_file(
f.write(f'machine {domain} login {username} password {password}\n')


def _get_download_path(url: str, content_disposition: str = None, directory: Union[Path, str] = '.'):
def _get_download_path(url: str, content_disposition: str | None = None, directory: Union[Path, str] = '.'):
filename = None
if content_disposition is not None:
message = Message()
Expand All @@ -37,6 +37,7 @@ def _get_download_path(url: str, content_disposition: str = None, directory: Uni
filename = basename(urlparse(url).path)
if not filename:
raise ValueError(f'could not determine download path for: {url}')
assert isinstance(filename, str)
return Path(directory) / filename


Expand Down
10 changes: 7 additions & 3 deletions src/hyp3lib/get_orb.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(self, username: str, password: str):
"""
self.username = username
self.password = password
self.token = None
self.token: str | None = None
self.session_id = None

def __enter__(self) -> str:
Expand All @@ -47,6 +47,7 @@ def __enter__(self) -> str:
response.raise_for_status()
self.session_id = response.json()['session_state']
self.token = response.json()['access_token']
assert self.token is not None
return self.token

def __exit__(self, exc_type, exc_val, exc_tb):
Expand All @@ -67,6 +68,7 @@ def _get_asf_orbit_url(orbit_type, platform, timestamp):
backoff_factor=10,
status_forcelist=[429, 500, 503, 504],
)
assert hostname is not None
session.mount(hostname, HTTPAdapter(max_retries=retries))
response = session.get(search_url)
response.raise_for_status()
Expand All @@ -75,7 +77,7 @@ def _get_asf_orbit_url(orbit_type, platform, timestamp):
file for file in tree.xpath('//a[@href]//@href') if file.startswith(platform) and file.endswith('.EOF')
]

d1 = 0
d1 = 0.0
best = None
for file in file_list:
file = file.strip()
Expand All @@ -99,7 +101,7 @@ def _get_esa_orbit_url(orbit_type: str, platform: str, start_time: datetime, end
search_url = 'https://catalogue.dataspace.copernicus.eu/odata/v1/Products'

date_format = '%Y-%m-%dT%H:%M:%SZ'
params = {
params: dict = {
'$filter': f"Collection/Name eq 'SENTINEL-1' and "
f"startswith(Name, '{platform}_OPER_{orbit_type}_OPOD_') and "
f'ContentDate/Start lt {start_time.strftime(date_format)} and '
Expand Down Expand Up @@ -173,7 +175,9 @@ def downloadSentinelOrbitFile(
for provider in providers:
try:
url = get_orbit_url(granule, orbit_type, provider=provider)
orbit_file: str | None = None
if provider == 'ESA':
assert esa_credentials is not None
with EsaToken(*esa_credentials) as token:
orbit_file = download_file(url, directory=directory, token=token)
else:
Expand Down
2 changes: 1 addition & 1 deletion src/hyp3lib/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from PIL import Image


def create_thumbnail(input_image: Path, size: Tuple[int, int] = (100, 100), output_dir: Path = None) -> Path:
def create_thumbnail(input_image: Path, size: Tuple[int, int] = (100, 100), output_dir: Path | None = None) -> Path:
"""Create a thumbnail from an image

Args:
Expand Down
6 changes: 2 additions & 4 deletions src/hyp3lib/makeAsfBrowse.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@ def makeAsfBrowse(geotiff: str, base_name: str, use_nn=False, width: int = 2048)
tiff = None # How to close with gdal

if tiff_width < width:
logging.warning(
f'Requested image dimension of {width} exceeds GeoTIFF width {tiff_width}.' f' Using GeoTIFF width'
)
logging.warning(f'Requested image dimension of {width} exceeds GeoTIFF width {tiff_width}. Using GeoTIFF width')
browse_width = tiff_width
else:
browse_width = width
Expand All @@ -55,7 +53,7 @@ def main():
'-n',
'--nearest-neighbor',
action='store_true',
help="use GDAL's GRIORA_NearestNeighbour interpolation instead" ' of GRIORA_Cubic to resample the GeoTIFF',
help="use GDAL's GRIORA_NearestNeighbour interpolation instead of GRIORA_Cubic to resample the GeoTIFF",
)
parser.add_argument('-w', '--width', default=2048, help='browse image width')
args = parser.parse_args()
Expand Down
4 changes: 2 additions & 2 deletions src/hyp3lib/saa_func_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def write_gdal_file_byte(filename, geotransform, geoproj, data, nodata=None):


def write_gdal_file_rgb(filename, geotransform, geoproj, b1, b2, b3, metadata=None):
options = []
options: list = []
(x, y) = b1.shape
image_format = 'GTiff'
driver = gdal.GetDriverByName(image_format)
Expand All @@ -226,7 +226,7 @@ def write_gdal_file_rgb(filename, geotransform, geoproj, b1, b2, b3, metadata=No


def write_gdal_file_rgba(filename, geotransform, geoproj, b1, b2, b3, b4):
options = []
options: list = []
(x, y) = b1.shape
image_format = 'GTiff'
driver = gdal.GetDriverByName(image_format)
Expand Down
6 changes: 3 additions & 3 deletions src/hyp3lib/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ def gamma_version():
gamma_ver = f.readlines()[-1].strip()
except IOError:
logging.warning(
f"No GAMMA_VERSION environment variable or ASF_Gamma_version.txt "
f"file found in GAMMA_HOME:\n {os.getenv('GAMMA_HOME')}\n"
f"Attempting to parse GAMMA version from its install directory"
f'No GAMMA_VERSION environment variable or ASF_Gamma_version.txt '
f'file found in GAMMA_HOME:\n {os.getenv("GAMMA_HOME")}\n'
f'Attempting to parse GAMMA version from its install directory'
)
gamma_ver = os.path.basename(gamma_home).split('-')[-1]
try:
Expand Down
2 changes: 1 addition & 1 deletion tests/test_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,4 +124,4 @@ def request_callback(request):

def test_download_file_none():
with pytest.raises(requests.exceptions.MissingSchema):
_ = fetch.download_file(url=None)
_ = fetch.download_file(url=None) # type: ignore [arg-type]
Loading