Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Inherit much of geojson parser from GMT #8

Open
wants to merge 8 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
types_or: [yaml, markdown, css, scss]
# https://docs.astral.sh/ruff/integrations/#pre-commit
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.9
rev: v0.9.2
hooks:
# Run the linter
- id: ruff
Expand Down
461 changes: 438 additions & 23 deletions poetry.lock

Large diffs are not rendered by default.

9 changes: 4 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,22 @@ packages = [{ include = "urbanopt_des", from = "." }]
python = ">=3.10,<3.13"
geopandas = "^1.0.1"
# release mode
modelica-builder = "^0.6.0"
vtnate marked this conversation as resolved.
Show resolved Hide resolved
# geojson-modelica-translator = "^0.9.1"
# pre-release mode, use github
# modelica-builder = { git = "https://github.com/urbanopt/modelica-builder.git", branch = "develop" }
geojson-modelica-translator = { git = "https://github.com/urbanopt/geojson-modelica-translator.git", branch = "more-geojson-parsing" }
vtnate marked this conversation as resolved.
Show resolved Hide resolved
# dev mode
# modelica-builder = { path = "../modelica-builder", develop = true }
buildingspy = "^5.1.0"
# buildingspy = "^5.1.0"

[tool.poetry.group.dev.dependencies]
pre-commit = "^3.7.0"
autopep8 = "~2.0"
coveralls = "~3.3"
mypy = "~1.6"
pytest = "~7.4"
pytest-cov = "~4.1"

[build-system]
requires = ["poetry-core"]
requires = ["poetry-core>=2.0.1"]
build-backend = "poetry.core.masonry.api"

# TODO: add in other Ruff configs from CBL or GMT
Expand Down
4 changes: 2 additions & 2 deletions tests/test_geojson.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import unittest
from pathlib import Path

from urbanopt_des.urbanopt_geojson import URBANoptGeoJSON
from urbanopt_des.urbanopt_geojson import DESGeoJSON
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I renamed the uo-des geojson parsing class, to not conflict with the one from the GMT. Should it be different?Happy to change the name to something else if that makes more sense.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should move all the methods out of DESGeoJSON... looks like the only one remaining is create_aggregated_representation. Can we move that over to GMT's GeoJSON?



class GeoJsonTest(unittest.TestCase):
Expand All @@ -12,7 +12,7 @@ def setUp(self):
def test_load_geojson(self):
"""Simple test to make sure we can load the geojson file"""
filename = self.data_dir / "nrel_campus.json"
geojson = URBANoptGeoJSON(filename)
geojson = DESGeoJSON(filename)

assert "Outdoor Test Facility" in geojson.get_building_names()
assert "Research Support Facility" in geojson.get_building_names()
15 changes: 7 additions & 8 deletions urbanopt_des/modelica_results.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import json
from datetime import datetime, timedelta
from pathlib import Path
from typing import Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -217,18 +216,18 @@ def retrieve_variable_data(self, variable_name: str, len_of_time: int, default_v

def resample_and_convert_to_df(
self,
building_ids: Union[list[str], None] = None,
other_vars: Union[list[str], None] = None,
building_ids: list[str] | None = None,
other_vars: list[str] | None = None,
year_of_data: int = 2017,
) -> None:
"""The Modelica data (self.modelica_data) are stored in a Reader object and the timesteps are non ideal for comparison across models. The method handles
a very specific set of variables which are extracted from the Reader object. After the data are stored in a DataFrame with the correct timesteps and units,
then the data will be resampled to 5min, 15min, and 60min.

Args:
building_ids (Union[list[str], None], optional): Name of the buildings to process out of the Modelica data. Defaults to None.
other_vars (Union[list[str], None], optional): Other variables to extract and store in the dataframe. Defaults to None.
year_of_data (int, optional): Year of the data, should match the URBANopt/OpenStudio/EnergyPlus value and correct starting day of week. Defaults to 2017.
building_ids (list[str] | None): Name of the buildings to process out of the Modelica data. Defaults to None.
other_vars (list[str] | None): Other variables to extract and store in the dataframe. Defaults to None.
year_of_data (int): Year of the data, should match the URBANopt/OpenStudio/EnergyPlus value and correct starting day of week. Defaults to 2017.

Raises:
Exception: errors
Expand Down Expand Up @@ -480,7 +479,7 @@ def resample_and_convert_to_df(

def combine_with_openstudio_results(
self,
building_ids: Union[list[str], None],
building_ids: list[str] | None,
openstudio_df: pd.DataFrame,
openstudio_df_15: pd.DataFrame,
) -> None:
Expand All @@ -489,7 +488,7 @@ def combine_with_openstudio_results(
HVAC related.

Args:
building_ids (Union[list[str], None]): Name of the buildings
building_ids (list[str] | None): Name of the buildings
openstudio_df (pd.DataFrame): dataframe of URBANopt/OpenStudio hourly results
openstudio_df_15 (pd.DataFrame): dataframe of URBANopt/OpenStudio 15min results
Returns:
Expand Down
15 changes: 7 additions & 8 deletions urbanopt_des/urbanopt_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
import json
import math
from pathlib import Path
from typing import Tuple, Union

import pandas as pd

from .emissions import HourlyEmissionsData
from .modelica_results import ModelicaResults
from .urbanopt_geojson import URBANoptGeoJSON
from .urbanopt_geojson import DESGeoJSON
from .urbanopt_results import URBANoptResults


Expand All @@ -35,7 +34,7 @@ def __init__(self, geojson_file: Path, analysis_dir: Path, year_of_data: int = 2
"""
self.geojson_file = geojson_file
if geojson_file.exists():
self.geojson = URBANoptGeoJSON(geojson_file)
self.geojson = DESGeoJSON(geojson_file)
else:
raise Exception(f"GeoJSON file does not exist: {geojson_file}")

Expand Down Expand Up @@ -342,8 +341,8 @@ def resample_actual_data(self) -> None:

def resample_and_convert_modelica_results(
self,
building_ids: Union[list[str], None] = None,
other_vars: Union[list[str], None] = None,
building_ids: list[str] | None = None,
other_vars: list[str] | None = None,
) -> None:
"""Run the resample and convert method for each of the analyses in the modelica object

Expand Down Expand Up @@ -756,7 +755,7 @@ def update_geojson_from_seed_data(self, **kwargs) -> dict:
}

new_dict = None
# load the GeoJSON file as a dictionary, NOT an URBANoptGeoJSON object.
# load the GeoJSON file as a dictionary, NOT an DESGeoJSON object.
with open(self.geojson_file) as f:
geojson = json.load(f)
# insert project dict and move to after the type object
Expand Down Expand Up @@ -1168,7 +1167,7 @@ def create_summary_results(self) -> None:
return True

@classmethod
def get_list_of_valid_result_folders(cls, root_analysis_path: Path) -> Tuple[dict, dict]:
def get_list_of_valid_result_folders(cls, root_analysis_path: Path) -> (dict, dict):
"""Parse through the root_analysis_path and return a dict of valid
result folders that can be loaded and processed. Also return dict of
folders that have simulation errors or empty results
Expand All @@ -1177,7 +1176,7 @@ def get_list_of_valid_result_folders(cls, root_analysis_path: Path) -> Tuple[dic
root_analysis_path (Path): Analysis folder to analyze.

Returns:
Tuple[list, list]: Tuple of lists, first is a dict of valid results, second list is bad or empty results
(dict, dict): Tuple of dicts, first is a dict of valid results, second is bad or empty results
"""
results = {}
bad_or_empty_results = {}
Expand Down
143 changes: 3 additions & 140 deletions urbanopt_des/urbanopt_geojson.py
Original file line number Diff line number Diff line change
@@ -1,143 +1,15 @@
import json
import tempfile
from pathlib import Path
from typing import Union

from geojson_modelica_translator.geojson.urbanopt_geojson import UrbanOptGeoJson
from geopandas import GeoDataFrame
from shapely.geometry import box


class URBANoptGeoJSON:
class DESGeoJSON(UrbanOptGeoJson):
def __init__(self, filename: Path):
self._filename = filename
self.data = None

# read in the JSON file and store it in data
with open(filename) as f:
self.data = json.load(f)

def get_building_paths(self, scenario_name: str) -> list[Path]:
"""Return a list of Path objects for the building GeoJSON files"""
result = []
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building":
building_path = self._filename.parent / "run" / scenario_name / feature["properties"]["id"]
result.append(building_path)
# result.append(Path(feature["properties"]["file"]))

# verify that the paths exist
for path in result:
if not path.exists():
raise FileNotFoundError(f"File not found: {path}")

return result

def get_building_ids(self) -> list:
"""Return a list of building names"""
result = []
for feature in self.data["features"]:
if "type" in feature["properties"] and feature["properties"]["type"] == "Building":
result.append(feature["properties"]["id"])
elif "name" in feature["properties"] and feature["properties"]["name"] == "Site Origin":
pass
else:
# need to implement a reasonable logger.
pass
# print(f"Feature does not have a type Building: {feature}")
# print("Did you forget to call the `update_geojson_from_seed_data` method?")

return result

def get_building_names(self) -> list:
"""Return a list of building names. Typically this field is only used for visual display name only."""
result = []
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building":
result.append(feature["properties"]["name"])

return result

def get_buildings(self, ids: Union[list[str], None] = None) -> list:
"""Return a list of all the properties of type Building"""
result = []
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building" and (ids is None or feature["properties"]["id"] in ids):
# TODO: eventually add a list of building ids to keep, for now it
# will be all buildings.
result.append(feature)

return result

def get_building_properties_by_id(self, building_id: str) -> dict:
"""Get the list of building ids in the GeoJSON file. The Building id is what
is used in URBANopt as the identifier. It is common that this is used to name
the building, more than the GeoJSON's building name field.

Args:
building_id (str): building id, this is the property.id values in the geojson's feature

Returns:
dict: building properties
"""
result = {}
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building" and feature["properties"]["id"] == building_id:
result = feature["properties"]

return result

def get_meters_for_building(self, building_id: str) -> list:
"""Return a list of meters for the building_id"""
result = []
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building" and feature["properties"]["id"] == building_id:
for meter in feature["properties"].get("meters", []):
result.append(meter["type"])

return result

def get_meter_readings_for_building(self, building_id: str, meter_type: str) -> list:
"""Return a list of meter readings for the building_id"""
result = []
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building" and feature["properties"]["id"] == building_id:
for meter in feature["properties"].get("meters", []):
if meter["type"] == meter_type:
result = meter["readings"]

return result

def get_monthly_readings(self, building_id: str, meter_type: str) -> list:
"""Return a list of monthly electricity consumption for the building_id"""
result = []
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building" and feature["properties"]["id"] == building_id:
result = feature["properties"]["monthly_electricity"]

return result

def set_property_on_building_id(self, building_id: str, property_name: str, property_value: str, overwrite=True) -> None:
"""Set a property on a building_id"""
for feature in self.data["features"]:
if (
feature["properties"]["type"] == "Building"
and feature["properties"]["id"] == building_id
and (overwrite or property_name not in feature["properties"])
):
feature["properties"][property_name] = property_value

def get_property_on_building_id(self, building_id: str, property_name: str) -> str:
"""Get a property on a building_id"""
for feature in self.data["features"]:
if feature["properties"]["type"] == "Building" and feature["properties"]["id"] == building_id:
return feature["properties"].get(property_name, None)

def get_site_lat_lon(self) -> tuple:
"""Return the site's latitude and longitude"""
for feature in self.data["features"]:
if feature["properties"]["name"] == "Site Origin":
# reverse the order of the coordinates
return feature["geometry"]["coordinates"][::-1]
super().__init__(filename)

def create_aggregated_representation(self, building_names: list[str]) -> None:
"""Go through the GeoJSON file and if it is of type Building, then aggregate the characteristics.
Expand Down Expand Up @@ -209,12 +81,3 @@ def create_aggregated_representation(self, building_names: list[str]) -> None:
gdf_2["project"] = project_data

return gdf_2

def save(self) -> None:
"""Save the GeoJSON file"""
self.save_as(self._filename)

def save_as(self, filename: Path) -> None:
"""Save the GeoJSON file"""
with open(filename, "w") as f:
json.dump(self.data, f, indent=2)
Loading