Skip to content

Commit

Permalink
reduce complexity
Browse files Browse the repository at this point in the history
  • Loading branch information
marcosmc committed Dec 19, 2024
1 parent 27ab07d commit b2998f7
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 66 deletions.
33 changes: 19 additions & 14 deletions src/dgcv/files/manage_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,21 @@
ProducerFiles = namedtuple("ProducerFiles", ["producer_dyd", "producer_par"])


def _copy_files(
path: Path,
target_path: Path,
):
pattern = re.compile(r".*")
exclude_pattern1 = re.compile(r".*__init__.py")
exclude_pattern2 = re.compile(r".*__pycache__*")
for file in path.iterdir():
matching = pattern.match(str(file))
matching1 = exclude_pattern1.match(str(file))
matching2 = exclude_pattern2.match(str(file))
if matching and not matching1 and not matching2:
shutil.copy(file, target_path / (file.stem + file.suffix.lower()))


def create_config_file(config_file: Path, target_file: Path) -> None:
"""Create a commented config file in target from the input config file.
Expand Down Expand Up @@ -202,23 +217,13 @@ def copy_base_case_files(
target_path: Path
Target path
"""

_copy_files(model_files.model_path, target_path)
_copy_files(model_files.omega_path, target_path)

pattern = re.compile(r".*")
exclude_pattern1 = re.compile(r".*__init__.py")
exclude_pattern2 = re.compile(r".*__pycache__*")
for file in model_files.model_path.iterdir():
matching = pattern.match(str(file))
matching1 = exclude_pattern1.match(str(file))
matching2 = exclude_pattern2.match(str(file))
if matching and not matching1 and not matching2:
shutil.copy(file, target_path / (file.stem + file.suffix.lower()))

for file in model_files.omega_path.iterdir():
matching = pattern.match(str(file))
matching1 = exclude_pattern1.match(str(file))
matching2 = exclude_pattern2.match(str(file))
if matching and not matching1 and not matching2:
shutil.copy(file, target_path / (file.stem + file.suffix.lower()))

exclude_pattern3 = re.compile(r".*.[iI][nN][iI]$")
exclude_pattern4 = re.compile(r".*.[cC][rR][vV]$")
for file in model_files.pcs_path.iterdir():
Expand Down
41 changes: 26 additions & 15 deletions src/dgcv/files/producer_curves.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,39 +261,50 @@ def _get_performance_templates(
return producer_curves_txt, curves_names_txt


def _get_model_templates(
def _get_xmfrs_models(
model_path: Path,
template: str,
):
zone: str,
) -> list:
producer_dyd_tree = etree.parse(
model_path / "Zone3" / "Producer.dyd", etree.XMLParser(remove_blank_text=True)
model_path / zone / "Producer.dyd", etree.XMLParser(remove_blank_text=True)
)
producer_dyd_root = producer_dyd_tree.getroot()
xfmrs = []
for xfmr in find_bbmodel_by_type(producer_dyd_root, "Transformer"):
if "StepUp_Xfmr" in xfmr.get("id"):
xfmrs.append(xfmr)

z3_gen_ppms = []
if template == "model_PPM":
for model in dynawo_translator.get_power_park_models():
z3_gen_ppms.extend(find_bbmodel_by_type(producer_dyd_root, model))
elif template == "model_BESS":
for model in dynawo_translator.get_storage_models():
z3_gen_ppms.extend(find_bbmodel_by_type(producer_dyd_root, model))
return xfmrs


def _get_generator_models(
model_path: Path,
template: str,
zone: str,
) -> list:
producer_dyd_tree = etree.parse(
model_path / "Zone1" / "Producer.dyd", etree.XMLParser(remove_blank_text=True)
model_path / zone / "Producer.dyd", etree.XMLParser(remove_blank_text=True)
)
producer_dyd_root = producer_dyd_tree.getroot()

z1_gen_ppms = []
gen_ppms = []
if template == "model_PPM":
for model in dynawo_translator.get_power_park_models():
z1_gen_ppms.extend(find_bbmodel_by_type(producer_dyd_root, model))
gen_ppms.extend(find_bbmodel_by_type(producer_dyd_root, model))
elif template == "model_BESS":
for model in dynawo_translator.get_storage_models():
z1_gen_ppms.extend(find_bbmodel_by_type(producer_dyd_root, model))
gen_ppms.extend(find_bbmodel_by_type(producer_dyd_root, model))

return gen_ppms


def _get_model_templates(
model_path: Path,
template: str,
):
xfmrs = _get_xmfrs_models(model_path, "Zone3")
z3_gen_ppms = _get_generator_models(model_path, template, "Zone3")
z1_gen_ppms = _get_generator_models(model_path, template, "Zone1")

producer_curves_txt = _get_model_file_template()
curves_names_txt = _get_model_curves_template(xfmrs, z1_gen_ppms, z3_gen_ppms)
Expand Down
67 changes: 30 additions & 37 deletions src/dgcv/model/operating_condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from dgcv.curves.manager import CurvesManager
from dgcv.files import manage_files
from dgcv.logging.logging import dgcv_logging
import pandas as pd


class OperatingCondition:
Expand Down Expand Up @@ -154,6 +155,29 @@ def __validate(

return results

def _check_curves(
self, curves: pd.DataFrame, curves_name: str, review_curves_set: bool
) -> bool:
measurement_names = self._validator.get_measurement_names()
has_curves = True
if review_curves_set:
if curves.empty:
dgcv_logging.get_logger("Operating Condition").warning(
f"Test without {curves_name} curves file"
)
has_curves = False
else:
missed_curves = []
for key in measurement_names:
if key not in curves:
missed_curves.append(key)
has_curves = False
if not has_curves:
dgcv_logging.get_logger("Operating Condition").warning(
f"Test without {curves_name} curve for keys {missed_curves}"
)
return has_curves

def validate(
self,
pcs_bm_name: str,
Expand Down Expand Up @@ -262,46 +286,15 @@ def has_required_curves(
bm_name,
)

measurement_names = self._validator.get_measurement_names()

# If the tool has the model, it is assumed that the simulated curves are always available,
# if they are not available it is due to a failure in the simulation, this event is
# handled differently.
sim_curves = True
if not self._producer.is_dynawo_model():
if curves["calculated"].empty:
dgcv_logging.get_logger("Operating Condition").warning(
"Test without producer curves file"
)
sim_curves = False
else:
missed_curves = []
for key in measurement_names:
if key not in curves["calculated"]:
missed_curves.append(key)
sim_curves = False
if not sim_curves:
dgcv_logging.get_logger("Operating Condition").warning(
f"Test without producer curve for keys {missed_curves}"
)

ref_curves = True
if self.__has_reference_curves():
if curves["reference"].empty:
dgcv_logging.get_logger("Operating Condition").warning(
"Test without reference curves file"
)
ref_curves = False
else:
missed_curves = []
for key in measurement_names:
if key not in curves["reference"]:
missed_curves.append(key)
ref_curves = False
if not ref_curves:
dgcv_logging.get_logger("Operating Condition").warning(
f"Test without reference curve for keys {missed_curves}"
)
sim_curves = self._check_curves(
curves["calculated"], "producer", not self._producer.is_dynawo_model()
)
ref_curves = self._check_curves(
curves["reference"], "reference", self.__has_reference_curves()
)

if sim_curves and ref_curves:
has_curves = 0
Expand Down

0 comments on commit b2998f7

Please sign in to comment.