Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

49 typing on cached module #50

Merged
merged 5 commits into from
Jun 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.8.5
current_version = 0.8.6
commit = True
tag = True

Expand Down
19 changes: 16 additions & 3 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## [0.8.6] - 2024-06-12
### Added
- More type hinting for composite_solution/cached.py
- Testing against Python 3.10, 3.11
### Changed
- Update to use solvis 0.11.1
- Update to use solvis-store 2.0.3
- Update to use nzshm-model 0.4.0
### Removed
- Testing against Python 3.7, 3.8
### Fixed
- parent_fault_names passes through sorting parameter

## [0.8.5] - 2023-08-02
### Changed
- solvis-store == v2.0.2
Expand All @@ -9,7 +22,7 @@
## [0.8.4] - 2023-07-19
### Added
- version config for package.json

## [0.8.3] - 2023-07-19
### Added
- new filter_set_options argument
Expand All @@ -18,7 +31,7 @@
### Changed
- updated upstream solvis libs
- removed monkeypatching for solvis/solvis-store

## [0.8.2] - 2023-07-04
### Changed
- added list support for corupture queries
Expand Down Expand Up @@ -93,7 +106,7 @@

## [0.2.0] - 2022-??-??
### Added
- InversionSolutoin
- InversionSolutoin

## [0.0.1] - 2022-??-??
Initial release of the NZ NSHM 2022 revision
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "solvis-graphql-api",
"version": "0.8.5",
"version": "0.8.6",
"description": "A graphql API for analysis and geojson visualisation from NZSHM Inversion Solutions.",
"dependencies": {
"serverless": "^3.30.1",
Expand Down
2,473 changes: 1,402 additions & 1,071 deletions poetry.lock

Large diffs are not rendered by default.

16 changes: 8 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "solvis-graphql-api"
version = "0.8.5"
version = "0.8.6"
description = "Graphql API for analysis of opensha modular Inversion Solutions"
authors = ["Chris Chamberlain <[email protected]>"]
license = "AGPL3"
Expand All @@ -14,6 +14,7 @@ classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
]
packages = [{include = "solvis_graphql_api"}]

Expand All @@ -25,17 +26,16 @@ Flask-GraphQL = "^2.0.1"
graphene = "<3"
pyyaml = "^6.0"

nzshm-model = "^0.3.0"
nzshm-model = "^0.4.0"
nzshm-common = "^0.6.0"
# solvis = "^0.7.0"
solvis = "^0.8.1"
solvis-store = {git = "https://github.com/GNS-Science/solvis-store", rev = "v2.0.2"}
solvis = "^0.11.1"
solvis-store = {git = "https://github.com/GNS-Science/solvis-store", rev = "v2.0.3"}

matplotlib = "^3.7.1"
werkzeug = "^2.3.3"

# see https://github.com/orgs/python-poetry/discussions/7937
urllib3 = "<2"
urllib3 = "<2"
numpy = "<1.25"

[tool.poetry.group.dev.dependencies]
Expand All @@ -44,7 +44,7 @@ black = { version = "^22.3"}
isort = { version = "^5.8.0"}
flake8 = { version = "^3.9.2"}
flake8-docstrings = { version = "^1.6.0", optional = true }
mypy = {version = "^0.900"}
mypy = {version = "^1.10"}
pytest = { version = "^6.2.4"}
pytest-cov = { version = "^2.12.0"}

Expand Down Expand Up @@ -98,7 +98,7 @@ doc = [
[tool.black]
line-length = 120
skip-string-normalization = true
target-version = ['py37', 'py38', 'py39']
target-version = ['py39', 'py310', 'py311']
include = '\.pyi?$'
exclude = '''
/(
Expand Down
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,11 @@ exclude_lines =

[tox:tox]
isolated_build = true
envlist = py39, py310, format, lint, build
envlist = py39, py310, py311, format, lint, build

[gh-actions]
python =
3.11: py311
3.10: py310
3.9: py39, format, lint, build

Expand Down
2 changes: 1 addition & 1 deletion solvis_graphql_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@

__author__ = """GNS Science"""
__email__ = '[email protected]'
__version__ = '0.8.5'
__version__ = '0.8.6'
45 changes: 31 additions & 14 deletions solvis_graphql_api/composite_solution/cached.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import time
from functools import lru_cache
from pathlib import Path
from typing import Any, Callable, Iterable, Iterator, List, Set, Tuple, Union
from typing import Any, Callable, Iterable, Iterator, List, Set, Tuple, Union, TYPE_CHECKING

import geopandas as gpd
import nzshm_model
Expand All @@ -16,6 +16,10 @@

from .filter_set_logic_options import SetOperationEnum

if TYPE_CHECKING:
import shapely.geometry.polygon.Polygon
from nzshm_model.source_logic_tree.logic_tree import SourceLogicTree

Check warning on line 21 in solvis_graphql_api/composite_solution/cached.py

View check run for this annotation

Codecov / codecov/patch

solvis_graphql_api/composite_solution/cached.py#L20-L21

Added lines #L20 - L21 were not covered by tests

log = logging.getLogger(__name__)

FAULT_SECTION_LIMIT = 1e4
Expand All @@ -25,15 +29,16 @@


@lru_cache
def get_location_polygon(radius_km, lon, lat):
def get_location_polygon(radius_km: float, lon: float, lat: float) -> "shapely.geometry.polygon.Polygon":
return solvis.geometry.circle_polygon(radius_m=radius_km * 1000, lon=lon, lat=lat)


@lru_cache
def parent_fault_names(
sol: InversionSolutionProtocol, sort: Union[None, Callable[[Iterable], List]] = sorted
sol: InversionSolutionProtocol, sort: Union[None, Callable[[Iterable[str]], Iterable[str]]] = sorted
) -> List[str]:
return solvis.parent_fault_names(sol)
fault_names: List[str] = solvis.parent_fault_names(sol, sort)
return fault_names


@lru_cache
Expand All @@ -58,23 +63,29 @@


def get_rupture_ids_for_fault_names_stored(
model_id: str, fault_system: str, fault_names: Iterable[str], filter_set_options
) -> Iterator[int]:
model_id: str, fault_system: str, fault_names: Iterable[str], filter_set_options: Tuple[Any]
) -> Set[int]:
log.info('get_rupture_ids_for_fault_names_stored: %s %s %s' % (model_id, fault_system, fault_names))
filter_set_options_dict = dict(filter_set_options)
fss = get_fault_system_solution_for_model(model_id, fault_system)
ruptset_ids = list(set([branch.rupture_set_id for branch in fss.branches]))
assert len(ruptset_ids) == 1
rupture_set_id = ruptset_ids[0]
union = False if filter_set_options_dict["multiple_faults"] == SetOperationEnum.INTERSECTION else True
return get_fault_name_rupture_ids(rupture_set_id, fault_names, union)

rupture_id_set: Set[int] = get_fault_name_rupture_ids(rupture_set_id, fault_names, union)
return rupture_id_set

Check warning on line 77 in solvis_graphql_api/composite_solution/cached.py

View check run for this annotation

Codecov / codecov/patch

solvis_graphql_api/composite_solution/cached.py#L76-L77

Added lines #L76 - L77 were not covered by tests


def get_fault_system_solution_for_model(model_id, fault_system):
def get_fault_system_solution_for_model(
model_id: str, fault_system: str
) -> "nzshm_model.source_logic_tree.logic_tree.FaultSystemLogicTree":
current_model = nzshm_model.get_model_version(model_id)
slt = current_model.source_logic_tree()

def get_fss(slt, fault_system):
def get_fss(
slt: "SourceLogicTree", fault_system: str
) -> "nzshm_model.source_logic_tree.logic_tree.FaultSystemLogicTree":
for fss in slt.fault_system_lts:
if fss.short_name == fault_system:
return fss
Expand All @@ -86,7 +97,10 @@


def get_rupture_ids_for_location_radius(
fault_system_solution, location_ids, radius_km, filter_set_options: Tuple[Any]
fault_system_solution: InversionSolutionProtocol,
location_ids: Iterable[str],
radius_km: float,
filter_set_options: Tuple[Any],
) -> Set[int]:
log.info('get_rupture_ids_for_location_radius: %s %s %s' % (fault_system_solution, radius_km, location_ids))
filter_set_options_dict = dict(filter_set_options)
Expand Down Expand Up @@ -130,9 +144,10 @@
union = False if filter_set_options_dict["multiple_locations"] == SetOperationEnum.INTERSECTION else True
# print("filter_dataframe_by_radius_stored", radius_km)
# print("get_rupture_ids_for_location_radius_stored", radius_km)
return get_location_radius_rupture_ids(
rupture_ids: Iterator[int] = get_location_radius_rupture_ids(
rupture_set_id=rupture_set_id, locations=location_ids, radius=radius_km, union=union
)
return rupture_ids


@lru_cache
Expand All @@ -141,7 +156,9 @@


def get_rupture_ids_for_fault_names(
fault_system_solution, corupture_fault_names, filter_set_options: Tuple[Any]
fault_system_solution: InversionSolutionProtocol,
corupture_fault_names: Iterable[str],
filter_set_options: Tuple[Any],
) -> Set[int]:
filter_set_options_dict = dict(filter_set_options)
fss = fault_system_solution
Expand Down Expand Up @@ -198,7 +215,7 @@
tic1 = time.perf_counter()
log.debug('matched_rupture_sections_gdf(): time to load fault system solution: %2.3f seconds' % (tic1 - tic0))

df0 = fss.ruptures_with_rates
df0 = fss.ruptures_with_rupture_rates

# attribute filters
df0 = df0 if not max_mag else df0[df0.Magnitude <= max_mag]
Expand Down Expand Up @@ -282,7 +299,7 @@
tic2 = time.perf_counter()
log.debug('fault_section_aggregates_gdf(): time to filter rupture sections: %2.3f seconds' % (tic2 - tic1))

fsr = fss.fault_sections_with_rates
fsr = fss.fault_sections_with_rupture_rates
fsr = fsr[fsr['Rupture Index'].isin(df0['Rupture Index'].unique())]

tic3 = time.perf_counter()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

@lru_cache
def rupture_detail(model_id: str, fault_system: str, rupture_index: int):
sr = get_composite_solution(model_id)._solutions[fault_system].ruptures_with_rates
sr = get_composite_solution(model_id)._solutions[fault_system].ruptures_with_rupture_rates
return sr[sr['Rupture Index'] == rupture_index]


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,13 +224,14 @@ def resolve_fault_surfaces(root, info, *args, **kwargs):
fault_sections_gdf['stroke-width'] = stroke_width
fault_sections_gdf['stroke-opacity'] = stroke_opacity

log.debug(f"columns: {fault_sections_gdf.columns}")
fault_sections_gdf = fault_sections_gdf.drop(
columns=[
'rate_weighted_mean.max',
'rate_weighted_mean.min',
'rate_weighted_mean.mean',
"SlipRate",
"SlipRateStdDev",
'Target Slip Rate',
'Target Slip Rate StdDev',
]
)
# import solvis
Expand Down Expand Up @@ -275,8 +276,8 @@ def resolve_fault_traces(root, info, *args, **kwargs):
'rate_weighted_mean.max',
'rate_weighted_mean.min',
'rate_weighted_mean.mean',
"SlipRate",
"SlipRateStdDev",
'Target Slip Rate',
'Target Slip Rate StdDev',
]
)
# import solvis
Expand Down
Loading