Skip to content

Commit

Permalink
Merge pull request #207 from bknueven/model_data
Browse files Browse the repository at this point in the history
Making ModelData.__init__ more useful
  • Loading branch information
michaelbynum authored Feb 19, 2021
2 parents 49fcdf9 + 97de5eb commit 614ba40
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 55 deletions.
36 changes: 36 additions & 0 deletions egret/data/data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,39 @@ def _get_sub_list_indicies(master_list, sub_list):
raise Exception("Could not find element {} in the list {}".format(sub_list[sub_list_pos], master_list))
return sub_index_list

def _read_from_file(filename, file_type):
valid_file_types = ['json', 'json.gz', 'm', 'dat', 'pglib-uc']
if file_type is not None and file_type not in valid_file_types:
raise Exception("Unrecognized file_type {}. Valid file types are {}".format(file_type, valid_file_types))
elif file_type is None:
## identify the file type
if filename[-5:] == '.json':
file_type = 'json'
elif filename[-8:] == '.json.gz':
file_type = 'json.gz'
elif filename[-2:] == '.m':
file_type = 'm'
elif filename[-4:] == '.dat':
file_type = 'dat'
else:
raise Exception("Could not infer type of file {} from its extension!".format(filename))

if file_type == 'json':
import json
with open(filename) as f:
data = json.load(f)
elif file_type == 'json.gz':
import json
import gzip
with gzip.open(filename, 'rt') as f:
data = json.load(f)
elif file_type == 'm':
from egret.parsers.matpower_parser import create_model_data_dict
data = create_model_data_dict(filename)
elif file_type == 'dat':
from egret.parsers.prescient_dat_parser import create_model_data_dict
data = create_model_data_dict(filename)
elif file_type == 'pglib-uc':
from egret.parsers.pglib_uc_parser import create_model_data_dict
data = create_model_data_dict(filename)
return data
68 changes: 23 additions & 45 deletions egret/data/model_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,20 +140,35 @@ def empty_model_data_dict():
"""
return {"elements": dict(), "system": dict()}

def __init__(self, data=None):
def __init__(self, source=None, file_type=None):
"""
Create a new ModelData object to wrap a model_data dictionary with some helper methods.
Parameters
----------
data : dict or None
An initial model_data dictionary if it is available, otherwise, a new model_data
dictionary is created.
source : dict, str, ModelData, or None (optional)
If dict, an initial model_data dictionary.
If str, a path to a file which is parsable by EGRET.
If ModelData, the original is copies into the new ModelData.
If None, a blank model_data dictionary is created.
file_type : str or None (optional)
If source is str, this is the specification of the file_type.
Valid values are 'json', 'json.gz' for json-ed EGRET ModelData
objects, 'm' for MATPOWER files, 'dat' for Prescient data files, and
'pglib-uc' for json files from pglib-uc. If None, the file type is
inferred from the extension.
"""
if data:
self.data = data
else:
if isinstance(source, dict):
self.data = source
elif isinstance(source, str):
self.data = du._read_from_file(source, file_type)
elif isinstance(source, ModelData):
self.data = source.clone().data
elif source is None:
self.data = ModelData.empty_model_data_dict()
else:
raise RuntimeError("Unrecognized source for ModelData")

@classmethod
def read(cls, filename, file_type=None):
Expand All @@ -170,44 +185,7 @@ def read(cls, filename, file_type=None):
'pglib-uc' for json files from pglib-uc. If None, the file type is inferred from the
extension.
"""
valid_file_types = ['json', 'json.gz', 'm', 'dat', 'pglib-uc']
if file_type is not None and file_type not in valid_file_types:
raise Exception("Unrecognized file_type {}. Valid file types are {}".format(file_type, valid_file_types))
elif file_type is None:
## identify the file type
if filename[-5:] == '.json':
file_type = 'json'
elif filename[-8:] == '.json.gz':
file_type = 'json.gz'
elif filename[-2:] == '.m':
file_type = 'm'
elif filename[-4:] == '.dat':
file_type = 'dat'
else:
raise Exception("Could not infer type of file {} from its extension!".format(filename))

if file_type == 'json':
import json
with open(filename) as f:
data = json.load(f)
elif file_type == 'json.gz':
import json
import gzip
with gzip.open(filename, 'rt') as f:
data = json.load(f)
elif file_type == 'm':
from egret.parsers.matpower_parser import create_model_data_dict
data = create_model_data_dict(filename)
elif file_type == 'dat':
from egret.parsers.prescient_dat_parser import create_model_data_dict
data = create_model_data_dict(filename)
elif file_type == 'pglib-uc':
from egret.parsers.pglib_uc_parser import create_model_data_dict
data = create_model_data_dict(filename)

logger.debug("ModelData read from {}".format(filename))

return cls(data=data)
return cls(source=du._read_from_file(filename, file_type))

def elements(self, element_type, **kwargs):
"""
Expand Down
15 changes: 15 additions & 0 deletions egret/data/tests/test_model_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,3 +226,18 @@ def test_json_gz_read_write():
md_read = ModelData.read('testdata.json.gz')

assert md.data == md_read.data

def test_init_read():
md = ModelData(testdata)
md.write('testdata.json')

md_read = ModelData('testdata.json')

assert md.data == md_read.data

def test_init_clone():
md = ModelData(testdata)
md_clone = ModelData(md)

assert md.data == md_clone.data
assert id(md.data) != id(md_clone.data)
20 changes: 10 additions & 10 deletions egret/models/tests/test_unit_commitment.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,11 +159,11 @@ def test_uc_runner():
test_names = ['tiny_uc_{}'.format(i) for i in range(1,10+1)]
for test_name in test_names:
input_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'.json')
md_in = ModelData.read(input_json_file_name)
md_in = ModelData(input_json_file_name)
md_results = solve_unit_commitment(md_in, solver=test_solver, mipgap=0.0)

reference_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'_results.json')
md_reference = ModelData.read(reference_json_file_name)
md_reference = ModelData(reference_json_file_name)
assert math.isclose(md_reference.data['system']['total_cost'], md_results.data['system']['total_cost'], rel_tol=rel_tol)

def test_uc_transmission_models():
Expand All @@ -176,39 +176,39 @@ def test_uc_transmission_models():

for test_name in test_names:
input_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'.json')
md_in = ModelData.read(input_json_file_name)
md_in = ModelData(input_json_file_name)
for tc in tc_networks:
for kwargs in tc_networks[tc]:

md_results = solve_unit_commitment(md_in, solver=test_solver, mipgap=0.0, uc_model_generator = _make_get_dcopf_uc_model(tc), **kwargs)
reference_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'_results.json')
md_reference = ModelData.read(reference_json_file_name)
md_reference = ModelData(reference_json_file_name)
assert math.isclose(md_reference.data['system']['total_cost'], md_results.data['system']['total_cost'], rel_tol=rel_tol)

## test copperplate
test_name = 'tiny_uc_1'
md_in = ModelData.read(os.path.join(current_dir, 'uc_test_instances', 'tiny_uc_tc_2.json'))
md_in = ModelData(os.path.join(current_dir, 'uc_test_instances', 'tiny_uc_tc_2.json'))
md_results = solve_unit_commitment(md_in, solver=test_solver, mipgap=0.0, uc_model_generator = _make_get_dcopf_uc_model(no_network))
reference_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'_results.json')
md_reference = ModelData.read(reference_json_file_name)
md_reference = ModelData(reference_json_file_name)
assert math.isclose(md_reference.data['system']['total_cost'], md_results.data['system']['total_cost'], rel_tol=rel_tol)

def test_uc_relaxation():
test_name = 'tiny_uc_tc'
input_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'.json')

md_in = ModelData(json.load(open(input_json_file_name, 'r')))
md_in = ModelData(input_json_file_name)

md_results = solve_unit_commitment(md_in, solver=test_solver, relaxed=True)
reference_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'_relaxed_results.json')
md_reference = ModelData(json.load(open(reference_json_file_name, 'r')))
md_reference = ModelData(reference_json_file_name)
assert math.isclose(md_reference.data['system']['total_cost'], md_results.data['system']['total_cost'], rel_tol=rel_tol)

def test_uc_lazy_ptdf_thresholding():
test_name = 'tiny_uc_tc'
input_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'.json')

md_in = ModelData(json.load(open(input_json_file_name, 'r')))
md_in = ModelData(input_json_file_name)

tc_sol_fn = test_name + '_relaxed_results.json'
ntc_sol_fn = test_name + '_relaxed_unconstrained_results.json'
Expand Down Expand Up @@ -239,7 +239,7 @@ def test_uc_ptdf_termination():
test_name = 'tiny_uc_tc_3'
input_json_file_name = os.path.join(current_dir, 'uc_test_instances', test_name+'.json')

md_in = ModelData(json.load(open(input_json_file_name, 'r')))
md_in = ModelData(input_json_file_name)

kwargs = {'ptdf_options':{'lazy': True, 'rel_ptdf_tol':10.}}
md_results, results = solve_unit_commitment(md_in, solver=test_solver, relaxed=True, return_results=True, **kwargs)
Expand Down

0 comments on commit 614ba40

Please sign in to comment.