Skip to content

Commit

Permalink
fix(thermal_map): Add support for NumPy results
Browse files Browse the repository at this point in the history
  • Loading branch information
mikkelkp authored and chriswmackey committed Sep 9, 2024
1 parent 98d0c4f commit 8d9b3da
Show file tree
Hide file tree
Showing 8 changed files with 69 additions and 97 deletions.
Binary file modified honeybee_grasshopper_energy/icon/HB Read Environment Matrix.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified honeybee_grasshopper_energy/icon/HB Read Thermal Matrix.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"version": "1.8.0",
"version": "1.8.1",
"nickname": "EnvMtx",
"outputs": [
[
Expand Down Expand Up @@ -36,7 +36,7 @@
}
],
"subcategory": "7 :: Thermal Map",
"code": "\nimport subprocess\nimport os\nimport shutil\nimport json\n\ntry:\n from ladybug.datatype.temperature import AirTemperature, \\\n MeanRadiantTemperature, RadiantTemperature\n from ladybug.datatype.temperaturedelta import RadiantTemperatureDelta\n from ladybug.datatype.fraction import RelativeHumidity\n from ladybug.header import Header\n from ladybug.datacollection import HourlyContinuousCollection, \\\n HourlyDiscontinuousCollection\n from ladybug.futil import csv_to_num_matrix\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug:\\n\\t{}'.format(e))\n\ntry:\n from honeybee.config import folders\nexcept ImportError as e:\n raise ImportError('\\nFailed to import honeybee:\\n\\t{}'.format(e))\n\ntry:\n from ladybug_{{cad}}.{{plugin}} import all_required_inputs, objectify_output\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug_{{cad}}:\\n\\t{}'.format(e))\n\nENV_CONDS_MAP = {\n '0': 'mrt',\n 'mrt': 'mrt',\n 'mean radiant temperature': 'mrt',\n '1': 'air_temperature',\n 'air temperature': 'air_temperature',\n '2': 'longwave_mrt',\n 'longwave mrt': 'longwave_mrt',\n '3': 'shortwave_mrt',\n 'shortwave mrt': 'shortwave_mrt',\n 'shortwave mrt delta': 'shortwave_mrt',\n '4': 'rel_humidity',\n 'relative humidity': 'rel_humidity'\n}\n\n\ndef load_matrix(comf_result):\n \"\"\"Load a matrix of data into an object that can be output in {{Plugin}}.\n\n Args:\n comf_result: Path to a folder with CSV data to be loaded into {{Plugin}}.\n \"\"\"\n # parse the result_info.json into a data collection header\n with open(os.path.join(comf_result, 'results_info.json')) as json_file:\n data_header = Header.from_dict(json.load(json_file))\n a_per = data_header.analysis_period\n continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False\n if not continuous:\n dates = a_per.datetimes\n\n # parse the grids_info.json with the correct order of the grid files\n with open(os.path.join(comf_result, 'grids_info.json')) as json_file:\n grid_list = json.load(json_file)\n\n # loop through the grid CSV files, parse their results, and build data collections\n comf_matrix = []\n for grid in grid_list:\n grid_name = grid['full_id'] if 'full_id' in grid else 'id'\n metadata = {'grid': grid_name}\n grid_file = os.path.join(comf_result, '{}.csv'.format(grid_name))\n data_matrix = csv_to_num_matrix(grid_file)\n grid_data = []\n for i, row in enumerate(data_matrix):\n header = data_header.duplicate()\n header.metadata = metadata.copy()\n header.metadata['sensor_index'] = i\n data = HourlyContinuousCollection(header, row) if continuous else \\\n HourlyDiscontinuousCollection(header, row, dates)\n grid_data.append(data)\n comf_matrix.append(grid_data)\n\n # wrap the maptrix into an object so that it does not slow the {{Plugin}} UI\n comf_mtx = objectify_output(\n '{} Matrix'.format(data_header.data_type.name), comf_matrix)\n return comf_mtx\n\n\ndef create_result_header(env_conds, sub_path):\n \"\"\"Create a DataCollection Header for a given metric.\"\"\"\n with open(os.path.join(env_conds, 'results_info.json')) as json_file:\n base_head = Header.from_dict(json.load(json_file))\n if sub_path == 'mrt':\n return Header(MeanRadiantTemperature(), 'C', base_head.analysis_period)\n elif sub_path == 'air_temperature':\n return Header(AirTemperature(), 'C', base_head.analysis_period)\n elif sub_path == 'longwave_mrt':\n return Header(RadiantTemperature(), 'C', base_head.analysis_period)\n elif sub_path == 'shortwave_mrt':\n return Header(RadiantTemperatureDelta(), 'dC', base_head.analysis_period)\n elif sub_path == 'rel_humidity':\n return Header(RelativeHumidity(), '%', base_head.analysis_period)\n\n\ndef sum_matrices(mtxs_1, mtxs_2, dest_dir):\n \"\"\"Sum together matrices of two folders.\"\"\"\n if not os.path.isdir(dest_dir):\n os.makedirs(dest_dir)\n for mtx_file in os.listdir(mtxs_1):\n if mtx_file.endswith('.csv'):\n mtx_file1 = os.path.join(mtxs_1, mtx_file)\n mtx_file2 = os.path.join(mtxs_2, mtx_file)\n matrix_1 = csv_to_num_matrix(mtx_file1)\n matrix_2 = csv_to_num_matrix(mtx_file2)\n data = [[d1 + d2 for d1, d2 in zip(r1, r2)]\n for r1, r2 in zip(matrix_1, matrix_2)]\n csv_path = os.path.join(dest_dir, mtx_file)\n with open(csv_path, 'w') as csv_file:\n for dat in data:\n str_data = (str(v) for v in dat)\n csv_file.write(','.join(str_data) + '\\n')\n elif mtx_file == 'grids_info.json':\n shutil.copyfile(\n os.path.join(mtxs_1, mtx_file),\n os.path.join(dest_dir, mtx_file)\n )\n\n\nif all_required_inputs(ghenv.Component) and _load:\n # get the folders and that correspond with the requested metric\n _metric_ = _metric_ if _metric_ is not None else 'mrt'\n try:\n sub_path = ENV_CONDS_MAP[_metric_.lower()]\n except KeyError:\n raise ValueError(\n 'Input metric \"{}\" is not recognized. Choose from: {}'.format(\n _metric_, '\\n'.join(ENV_CONDS_MAP.keys()))\n )\n source_folder = os.path.join(_env_conds, sub_path)\n dest_folder = os.path.join(_env_conds, 'final', sub_path)\n\n # if the results have already been processed, then load them up\n if os.path.isdir(dest_folder):\n comf_mtx = load_matrix(dest_folder)\n else: # otherwise, process them into a load-able format\n # make sure the requested metric is valid for the study\n if sub_path == 'mrt':\n source_folders = [os.path.join(_env_conds, 'longwave_mrt'),\n os.path.join(_env_conds, 'shortwave_mrt')]\n dest_folders = [os.path.join(_env_conds, 'final', 'longwave_mrt'),\n os.path.join(_env_conds, 'final', 'shortwave_mrt')]\n else:\n assert os.path.isdir(source_folder), \\\n 'Metric \"{}\" does not exist for this comfort study.'.format(sub_path)\n source_folders, dest_folders = [source_folder], [dest_folder]\n # restructure the results to align with the sensor grids\n dist_info = os.path.join(_env_conds, '_redist_info.json')\n for src_f, dst_f in zip(source_folders, dest_folders):\n if not os.path.isdir(dst_f):\n os.makedirs(dst_f)\n cmds = [folders.python_exe_path, '-m', 'honeybee_radiance', 'grid',\n 'merge-folder', src_f, dst_f, 'csv',\n '--dist-info', dist_info]\n shell = True if os.name == 'nt' else False\n custom_env = os.environ.copy()\n custom_env['PYTHONHOME'] = ''\n process = subprocess.Popen(\n cmds, stdout=subprocess.PIPE, shell=shell, env=custom_env)\n stdout = process.communicate()\n grid_info_src = os.path.join(_env_conds, 'grids_info.json')\n grid_info_dst = os.path.join(dst_f, 'grids_info.json')\n shutil.copyfile(grid_info_src, grid_info_dst)\n data_header = create_result_header(_env_conds, os.path.split(dst_f)[-1])\n result_info_path = os.path.join(dst_f, 'results_info.json')\n with open(result_info_path, 'w') as fp:\n json.dump(data_header.to_dict(), fp, indent=4)\n # if MRT was requested, sum together the longwave and shortwave\n if sub_path == 'mrt':\n sum_matrices(dest_folders[0], dest_folders[1], dest_folder)\n data_header = create_result_header(_env_conds, sub_path)\n result_info_path = os.path.join(dest_folder, 'results_info.json')\n with open(result_info_path, 'w') as fp:\n json.dump(data_header.to_dict(), fp, indent=4)\n # load the resulting matrix into {{Plugin}}\n comf_mtx = load_matrix(dest_folder)\n",
"code": "\nimport subprocess\nimport os\nimport shutil\nimport json\n\ntry:\n from ladybug.datatype.temperature import AirTemperature, \\\n MeanRadiantTemperature, RadiantTemperature\n from ladybug.datatype.temperaturedelta import RadiantTemperatureDelta\n from ladybug.datatype.fraction import RelativeHumidity\n from ladybug.header import Header\n from ladybug.datacollection import HourlyContinuousCollection, \\\n HourlyDiscontinuousCollection\n from ladybug.futil import csv_to_num_matrix\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug:\\n\\t{}'.format(e))\n\ntry:\n from honeybee.config import folders\nexcept ImportError as e:\n raise ImportError('\\nFailed to import honeybee:\\n\\t{}'.format(e))\n\ntry:\n from ladybug_{{cad}}.{{plugin}} import all_required_inputs, objectify_output\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug_{{cad}}:\\n\\t{}'.format(e))\n\nENV_CONDS_MAP = {\n '0': 'mrt',\n 'mrt': 'mrt',\n 'mean radiant temperature': 'mrt',\n '1': 'air_temperature',\n 'air temperature': 'air_temperature',\n '2': 'longwave_mrt',\n 'longwave mrt': 'longwave_mrt',\n '3': 'shortwave_mrt',\n 'shortwave mrt': 'shortwave_mrt',\n 'shortwave mrt delta': 'shortwave_mrt',\n '4': 'rel_humidity',\n 'relative humidity': 'rel_humidity'\n}\n\n\ndef load_matrix(comf_result):\n \"\"\"Load a matrix of data into an object that can be output in {{Plugin}}.\n\n Args:\n comf_result: Path to a folder with CSV data to be loaded into {{Plugin}}.\n \"\"\"\n # parse the result_info.json into a data collection header\n with open(os.path.join(comf_result, 'results_info.json')) as json_file:\n data_header = Header.from_dict(json.load(json_file))\n a_per = data_header.analysis_period\n continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False\n if not continuous:\n dates = a_per.datetimes\n\n # parse the grids_info.json with the correct order of the grid files\n with open(os.path.join(comf_result, 'grids_info.json')) as json_file:\n grid_list = json.load(json_file)\n\n # loop through the grid CSV files, parse their results, and build data collections\n comf_matrix = []\n for grid in grid_list:\n grid_name = grid['full_id'] if 'full_id' in grid else 'id'\n metadata = {'grid': grid_name}\n grid_file = os.path.join(comf_result, '{}.csv'.format(grid_name))\n data_matrix = csv_to_num_matrix(grid_file)\n grid_data = []\n for i, row in enumerate(data_matrix):\n header = data_header.duplicate()\n header.metadata = metadata.copy()\n header.metadata['sensor_index'] = i\n data = HourlyContinuousCollection(header, row) if continuous else \\\n HourlyDiscontinuousCollection(header, row, dates)\n grid_data.append(data)\n comf_matrix.append(grid_data)\n\n # wrap the maptrix into an object so that it does not slow the {{Plugin}} UI\n comf_mtx = objectify_output(\n '{} Matrix'.format(data_header.data_type.name), comf_matrix)\n return comf_mtx\n\n\nif all_required_inputs(ghenv.Component) and _load:\n # get the folders and that correspond with the requested metric\n _metric_ = _metric_ if _metric_ is not None else 'mrt'\n try:\n sub_path = ENV_CONDS_MAP[_metric_.lower()]\n except KeyError:\n raise ValueError(\n 'Input metric \"{}\" is not recognized. Choose from: {}'.format(\n _metric_, '\\n'.join(ENV_CONDS_MAP.keys()))\n )\n source_folder = os.path.join(_env_conds, sub_path)\n dest_folder = os.path.join(_env_conds, 'final', sub_path)\n\n # if the results have already been processed, then load them up\n if os.path.isdir(dest_folder):\n comf_mtx = load_matrix(dest_folder)\n else: # otherwise, process them into a load-able format\n # make sure the requested metric is valid for the study\n if sub_path != 'mrt':\n assert os.path.isdir(source_folder), \\\n 'Metric \"{}\" does not exist for this comfort study.'.format(sub_path)\n cmds = [folders.python_exe_path, '-m', 'ladybug_comfort', 'map',\n 'restructure-env-conditions', _env_conds, dest_folder, sub_path]\n shell = True if os.name == 'nt' else False\n custom_env = os.environ.copy()\n custom_env['PYTHONHOME'] = ''\n process = subprocess.Popen(\n cmds, stdout=subprocess.PIPE, shell=shell, env=custom_env)\n stdout = process.communicate()\n # load the resulting matrix into {{Plugin}}\n comf_mtx = load_matrix(dest_folder)\n",
"category": "HB-Energy",
"name": "HB Read Environment Matrix",
"description": "Read the detailed environmental conditions of a thermal mapping analysis from\nthe env_conds output by a thermal mapping component.\n_\nEnvironemntal conditions include raw inputs to the thermal comfort model, such as\nair temperature, MRT, longwave MRT, and shortwave MRT delta.\n-"
Expand Down
4 changes: 2 additions & 2 deletions honeybee_grasshopper_energy/json/HB_Read_Thermal_Matrix.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"version": "1.8.0",
"version": "1.8.1",
"nickname": "ThermalMtx",
"outputs": [
[
Expand Down Expand Up @@ -29,7 +29,7 @@
}
],
"subcategory": "7 :: Thermal Map",
"code": "\nimport os\nimport json\n\ntry:\n from ladybug.header import Header\n from ladybug.datacollection import HourlyContinuousCollection, \\\n HourlyDiscontinuousCollection\n from ladybug.futil import csv_to_num_matrix\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug:\\n\\t{}'.format(e))\n\ntry:\n from ladybug_{{cad}}.{{plugin}} import all_required_inputs, objectify_output\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug_{{cad}}:\\n\\t{}'.format(e))\n\n\nif all_required_inputs(ghenv.Component) and _load:\n # parse the result_info.json into a data collection header\n with open(os.path.join(_comf_result, 'results_info.json')) as json_file:\n data_header = Header.from_dict(json.load(json_file))\n a_per = data_header.analysis_period\n continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False\n if not continuous:\n dates = a_per.datetimes\n\n # parse the grids_info.json with the correct order of the grid files\n with open(os.path.join(_comf_result, 'grids_info.json')) as json_file:\n grid_list = json.load(json_file)\n\n # loop through the grid CSV files, parse their results, and build data collections\n comf_matrix = []\n for grid in grid_list:\n grid_name = grid['full_id'] if 'full_id' in grid else 'id'\n metadata = {'grid': grid_name}\n grid_file = os.path.join(_comf_result, '{}.csv'.format(grid_name))\n data_matrix = csv_to_num_matrix(grid_file)\n grid_data = []\n for i, row in enumerate(data_matrix):\n header = data_header.duplicate()\n header.metadata = metadata.copy()\n header.metadata['sensor_index'] = i\n data = HourlyContinuousCollection(header, row) if continuous else \\\n HourlyDiscontinuousCollection(header, row, dates)\n grid_data.append(data)\n comf_matrix.append(grid_data)\n\n # wrap the maptrix into an object so that it does not slow the {{Plugin}} UI\n comf_mtx = objectify_output(\n '{} Matrix'.format(data_header.data_type.name), comf_matrix)\n",
"code": "\nimport os\nimport json\nimport subprocess\n\ntry:\n from honeybee.config import folders\nexcept ImportError as e:\n raise ImportError('\\nFailed to import honeybee:\\n\\t{}'.format(e))\n\ntry:\n from ladybug.header import Header\n from ladybug.datacollection import HourlyContinuousCollection, \\\n HourlyDiscontinuousCollection\n from ladybug.futil import csv_to_num_matrix\n from ladybug.datautil import collections_from_csv\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug:\\n\\t{}'.format(e))\n\ntry:\n from ladybug_{{cad}}.{{plugin}} import all_required_inputs, objectify_output\nexcept ImportError as e:\n raise ImportError('\\nFailed to import ladybug_{{cad}}:\\n\\t{}'.format(e))\n\n\nif all_required_inputs(ghenv.Component) and _load:\n # parse the result_info.json into a data collection header\n with open(os.path.join(_comf_result, 'results_info.json')) as json_file:\n data_header = Header.from_dict(json.load(json_file))\n a_per = data_header.analysis_period\n continuous = True if a_per.st_hour == 0 and a_per.end_hour == 23 else False\n if not continuous:\n dates = a_per.datetimes\n\n # parse the grids_info.json with the correct order of the grid files\n with open(os.path.join(_comf_result, 'grids_info.json')) as json_file:\n grid_list = json.load(json_file)\n\n # check file extension\n grid_file = os.path.join(_comf_result, '{}.csv'.format(grid_list[0]['full_id']))\n extension = 'csv'\n if not os.path.exists(grid_file):\n extension = 'npy'\n\n comf_matrix = []\n if extension == 'csv':\n # loop through the grid CSV files, parse their results, and build data collections\n for grid in grid_list:\n grid_name = grid['full_id'] if 'full_id' in grid else 'id'\n metadata = {'grid': grid_name}\n grid_file = os.path.join(_comf_result, '{}.csv'.format(grid_name))\n data_matrix = csv_to_num_matrix(grid_file)\n grid_data = []\n for i, row in enumerate(data_matrix):\n header = data_header.duplicate()\n header.metadata = metadata.copy()\n header.metadata['sensor_index'] = i\n data = HourlyContinuousCollection(header, row) if continuous else \\\n HourlyDiscontinuousCollection(header, row, dates)\n grid_data.append(data)\n comf_matrix.append(grid_data)\n else:\n csv_files = []\n csv_exists = []\n # collect csv files and check if they already exists\n for grid in grid_list:\n grid_name = grid['full_id'] if 'full_id' in grid else 'id'\n grid_file = os.path.join(_comf_result, 'datacollections', '{}.csv'.format(grid_name))\n csv_files.append(grid_file)\n csv_exists.append(os.path.exists(grid_file))\n # run command if csv files do not exist\n if not all(csv_exists):\n cmds = [folders.python_exe_path, '-m', 'honeybee_radiance_postprocess',\n 'data-collection', 'folder-to-datacollections', _comf_result,\n os.path.join(_comf_result, 'results_info.json')]\n use_shell = True if os.name == 'nt' else False\n custom_env = os.environ.copy()\n custom_env['PYTHONHOME'] = ''\n process = subprocess.Popen(\n cmds, cwd=_comf_result, shell=use_shell, env=custom_env,\n stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n stdout = process.communicate() # wait for the process to finish\n for grid_file in csv_files:\n grid_data = collections_from_csv(grid_file)\n comf_matrix.append(grid_data)\n\n # wrap the maptrix into an object so that it does not slow the {{Plugin}} UI\n comf_mtx = objectify_output(\n '{} Matrix'.format(data_header.data_type.name), comf_matrix)\n",
"category": "HB-Energy",
"name": "HB Read Thermal Matrix",
"description": "Read the detailed results of a thermal mapping analysis from a folder of CSV\nfiles output by a thermal mapping component.\n_\nDetailed results include temperature amd thermal condition results. It also\nincludes metrics that give a sense of how hot or cold condition are like\npmv, utci category, or adaptive comfort degrees from neutral temperature.\n-"
Expand Down
Loading

0 comments on commit 8d9b3da

Please sign in to comment.