From 75c7b667229d65cb7eee56b9f32fac1e4f2361ec Mon Sep 17 00:00:00 2001 From: Joanne <71201494+Joanneylin@users.noreply.github.com> Date: Wed, 24 Sep 2025 16:21:10 -0700 Subject: [PATCH 1/4] Fixes #1 --- rtp_spatial_analysis/configs/config.yaml | 12 +- rtp_spatial_analysis/src/run.py | 7 +- .../src/transit_stop_intersections.py | 104 ++++++++++++++++++ 3 files changed, 119 insertions(+), 4 deletions(-) create mode 100644 rtp_spatial_analysis/src/transit_stop_intersections.py diff --git a/rtp_spatial_analysis/configs/config.yaml b/rtp_spatial_analysis/configs/config.yaml index c73ef9c..2d2aa65 100644 --- a/rtp_spatial_analysis/configs/config.yaml +++ b/rtp_spatial_analysis/configs/config.yaml @@ -1,9 +1,15 @@ -run_demo: false -run_density_and_freight: true +run_demo: true +run_density_and_freight: false +run_transit_stop_intersections: false +# rtp_transit_data_path: C:/Users/cpeak/Puget Sound Regional Council/GIS - Sharing/Projects/Transportation/RTP_2026/transit rtp_transit_data_path: /GIS - Sharing/Projects/Transportation/RTP_2026/transit +rtp_transit_network_path: /GIS - Sharing/Projects/Transportation/RTP_2026/transit/Transit_Network_2050_Scenario2b.gdb +rtp_efa_path: /GIS - Sharing/Projects/Transportation/RTP_2026/equity_focus_areas/efa_3groupings_1SD/equity_focus_areas_2023_acs.gdb activity_units_path: /GIS - Sharing/Projects/Transportation/RTP_2026/activity_units/Activity_Units_2026_RTP.gdb fgtswa_path: /GIS - Sharing/Projects/Transportation/RTP_2026/freight/FGTSWA.gdb -rtp_output_path: /GIS - Sharing/Projects/Transportation/RTP_2026/future_system_output \ No newline at end of file +rtp_output_path: /GIS - Sharing/Projects/Transportation/RTP_2026/future_system_output + +mile_in_ft: 5280 diff --git a/rtp_spatial_analysis/src/run.py b/rtp_spatial_analysis/src/run.py index c3934f1..bc23615 100644 --- a/rtp_spatial_analysis/src/run.py +++ b/rtp_spatial_analysis/src/run.py @@ -3,6 +3,7 @@ from pathlib import Path import demo import density_and_freight +import transit_stop_intersections import getpass file = Path().joinpath(configuration.args.configs_dir, "config.yaml") @@ -14,4 +15,8 @@ demo.run(config) if config['run_density_and_freight']: - density_and_freight.run(config) \ No newline at end of file + density_and_freight.run(config) + +if config['run_transit_stop_intersections']: + transit_stop_intersections.run(config) + diff --git a/rtp_spatial_analysis/src/transit_stop_intersections.py b/rtp_spatial_analysis/src/transit_stop_intersections.py new file mode 100644 index 0000000..ece68a7 --- /dev/null +++ b/rtp_spatial_analysis/src/transit_stop_intersections.py @@ -0,0 +1,104 @@ +import geopandas as gpd +import pandas as pd +import psrcelmerpy +from pathlib import Path +import utils + +list_transit_type = ['local', 'all_day', 'frequent', 'hct', 'brt'] +list_efa = ['equity_focus_areas_2023__efa_poc', + 'equity_focus_areas_2023__efa_pov200', + 'equity_focus_areas_2023__efa_lep', + 'equity_focus_areas_2023__efa_youth', + 'equity_focus_areas_2023__efa_older', + 'equity_focus_areas_2023__efa_dis'] + +def export_csv(df, config, file_nm): + """export to a pre-defined file location""" + path_to_output = f"{config['user_onedrive']}/{config['rtp_output_path']}" + pth = Path(path_to_output,file_nm) + df.to_csv(pth) + +def get_service_au(df_au, buffered_stops, col_suffix): + + sum_fields = ['sum_pop_20', 'sum_jobs_2', 'sum_au_205'] + total_au = df_au[sum_fields].sum().to_list() + data = {} + + for type in list_transit_type: + + transit_by_type = buffered_stops[buffered_stops[type]>0] + gdf = gpd.clip(df_au, transit_by_type) + + _list = gdf[sum_fields].sum().to_list() + _list_without = [total_au[i] - _list[i] for i in range(len(_list))] + data[type + col_suffix] = _list +_list_without + + df = pd.DataFrame.from_dict(data, orient='index', columns=['people with service', 'jobs with service', 'activity units with service', + 'people w/o service', 'jobs w/o service', 'activity units w/o service']) + df = df.rename_axis('Route Type') + + return df + +def run(config): + + # 2050 Transit Stops + transit_stops_2050 = gpd.read_file(Path(config['user_onedrive'])/config['rtp_transit_network_path'], layer='Transit_Stops_2050') + transit_stops_2050 = transit_stops_2050.to_crs(2285) + + # buffer 1/4 and 1/2 mile + buf2_transit_stops_2050 = utils.buffer_layer(transit_stops_2050, config['mile_in_ft']/2) + buf4_transit_stops_2050 = utils.buffer_layer(transit_stops_2050, config['mile_in_ft']/4) + + # Intersection of transit stops and future density ---- + activity_units = gpd.read_file(Path(config['user_onedrive'])/config['activity_units_path'], layer='peope_and_jobs_2050') + activity_units = activity_units.to_crs(2285) + # number of people and jobs that are in supportive densities + activity_units_dense = activity_units[activity_units['density']>=30] + + # get number of people and jobs that are in supportive densities with service and in those in supportive densities without service (Gap) + test = get_service_au(activity_units_dense, buf2_transit_stops_2050, '_half_mi') + test2 = get_service_au(activity_units_dense, buf4_transit_stops_2050, '_quarter_mi') + df_service_dense = pd.concat([test, test2]) + + export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") + + # Intersection of transit stops and Equity Focus Areas ---- + + # Load blocks layer from ElmerGeo + # eg_conn = psrcelmerpy.ElmerGeoConn() + # blocks = eg_conn.read_geolayer("block2020_nowater") + # blocks = blocks.to_crs(2285) + + # # get list of all layers in file: gpd.list_layers(Path(user_path)/config['rtp_efa_path']) + # # 2023 Equity Focused Areas + # df_efa_overall_2023 = gpd.read_file(Path(user_path)/config['rtp_efa_path'], layer='overall') + # df_efa_overall_2023 = pd.DataFrame(df_efa_overall_2023.drop(columns=['Shape_Length', 'Shape_Area', 'geometry'])) + # df_efa_overall_2023.loc[:,'tractce20'] = df_efa_overall_2023['L0ElmerGeo_DBO_tract2020_nowater_geoid20'].str[-6:] + + # # get block-level population with EFA assignments + # block_efa_2023 = blocks[['geoid20', 'county_name', 'tractce20', 'total_pop20', 'geometry']].\ + # merge(df_efa_overall_2023, on='tractce20') + # block_efa_2023['total_pop20'] = block_efa_2023['total_pop20'].astype('float') + + + + + + + # local_transit = buf_half_transit_stops_2050[buf_half_transit_stops_2050['local']>0] + # block_efa_poc = block_efa_2023[block_efa_2023['equity_focus_areas_2023__efa_poc']>0] + + # gdf = gpd.clip(block_efa_poc, local_transit) + # df = pd.DataFrame(gdf.drop(columns='geometry')) + + # efa_poc_block = utils.intersect_layers(blocks, equity_focus_areas_2023__efa_poc) + + # plt = combined_gdf.plot(figsize=(10, 6)) + # plt.savefig('world_map.png') + + # transit_stops_2050 = points_in_polygon(transit_stops_2050, blocks, "in_city_100ft", buffer=100) + print ('done') + + + + From 205da408785ba356c77b5ad828c06d96a58398dc Mon Sep 17 00:00:00 2001 From: Joanne <71201494+Joanneylin@users.noreply.github.com> Date: Fri, 26 Sep 2025 10:26:38 -0700 Subject: [PATCH 2/4] update function - move read activity units file to function --- .../src/transit_stop_intersections.py | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/rtp_spatial_analysis/src/transit_stop_intersections.py b/rtp_spatial_analysis/src/transit_stop_intersections.py index ece68a7..4aa8468 100644 --- a/rtp_spatial_analysis/src/transit_stop_intersections.py +++ b/rtp_spatial_analysis/src/transit_stop_intersections.py @@ -18,16 +18,23 @@ def export_csv(df, config, file_nm): pth = Path(path_to_output,file_nm) df.to_csv(pth) -def get_service_au(df_au, buffered_stops, col_suffix): +def get_service_au(config, buffered_stops, col_suffix): + + supportive_density = 30 + + gdf = utils.get_onedrive_layer(config, 'activity_units_path', 'peope_and_jobs_2050') + gdf = gdf.to_crs(2285) + # number of people and jobs that are in supportive densities + gdf_au = gdf[gdf['density']>=supportive_density] sum_fields = ['sum_pop_20', 'sum_jobs_2', 'sum_au_205'] - total_au = df_au[sum_fields].sum().to_list() + total_au = gdf_au[sum_fields].sum().to_list() data = {} for type in list_transit_type: transit_by_type = buffered_stops[buffered_stops[type]>0] - gdf = gpd.clip(df_au, transit_by_type) + gdf = gpd.clip(gdf_au, transit_by_type) _list = gdf[sum_fields].sum().to_list() _list_without = [total_au[i] - _list[i] for i in range(len(_list))] @@ -42,28 +49,22 @@ def get_service_au(df_au, buffered_stops, col_suffix): def run(config): # 2050 Transit Stops - transit_stops_2050 = gpd.read_file(Path(config['user_onedrive'])/config['rtp_transit_network_path'], layer='Transit_Stops_2050') + transit_stops_2050 = utils.get_onedrive_layer(config, 'rtp_transit_network_path', 'Transit_Stops_2050') transit_stops_2050 = transit_stops_2050.to_crs(2285) # buffer 1/4 and 1/2 mile buf2_transit_stops_2050 = utils.buffer_layer(transit_stops_2050, config['mile_in_ft']/2) buf4_transit_stops_2050 = utils.buffer_layer(transit_stops_2050, config['mile_in_ft']/4) - # Intersection of transit stops and future density ---- - activity_units = gpd.read_file(Path(config['user_onedrive'])/config['activity_units_path'], layer='peope_and_jobs_2050') - activity_units = activity_units.to_crs(2285) - # number of people and jobs that are in supportive densities - activity_units_dense = activity_units[activity_units['density']>=30] - + # 1. Intersection of transit stops and future density ---- # get number of people and jobs that are in supportive densities with service and in those in supportive densities without service (Gap) - test = get_service_au(activity_units_dense, buf2_transit_stops_2050, '_half_mi') - test2 = get_service_au(activity_units_dense, buf4_transit_stops_2050, '_quarter_mi') + test = get_service_au(config, buf2_transit_stops_2050, '_half_mi') + test2 = get_service_au(config, buf4_transit_stops_2050, '_quarter_mi') df_service_dense = pd.concat([test, test2]) - export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") - - # Intersection of transit stops and Equity Focus Areas ---- + # export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") + # 2. Intersection of transit stops and Equity Focus Areas ---- # Load blocks layer from ElmerGeo # eg_conn = psrcelmerpy.ElmerGeoConn() # blocks = eg_conn.read_geolayer("block2020_nowater") @@ -71,13 +72,13 @@ def run(config): # # get list of all layers in file: gpd.list_layers(Path(user_path)/config['rtp_efa_path']) # # 2023 Equity Focused Areas - # df_efa_overall_2023 = gpd.read_file(Path(user_path)/config['rtp_efa_path'], layer='overall') - # df_efa_overall_2023 = pd.DataFrame(df_efa_overall_2023.drop(columns=['Shape_Length', 'Shape_Area', 'geometry'])) - # df_efa_overall_2023.loc[:,'tractce20'] = df_efa_overall_2023['L0ElmerGeo_DBO_tract2020_nowater_geoid20'].str[-6:] + # efa_2023 = utils.get_onedrive_layer(config, 'rtp_efa_path', 'overall') + # efa_2023 = pd.DataFrame(efa_2023.drop(columns=['Shape_Length', 'Shape_Area', 'geometry'])) + # efa_2023.loc[:,'tractce20'] = efa_2023['L0ElmerGeo_DBO_tract2020_nowater_geoid20'].str[-6:] # # get block-level population with EFA assignments # block_efa_2023 = blocks[['geoid20', 'county_name', 'tractce20', 'total_pop20', 'geometry']].\ - # merge(df_efa_overall_2023, on='tractce20') + # merge(efa_2023, on='tractce20') # block_efa_2023['total_pop20'] = block_efa_2023['total_pop20'].astype('float') From d023d8685596d6e460e7905c78512f669804c39d Mon Sep 17 00:00:00 2001 From: Joanne <71201494+Joanneylin@users.noreply.github.com> Date: Fri, 26 Sep 2025 14:24:45 -0700 Subject: [PATCH 3/4] add supportive densities for transit types --- .../src/transit_stop_intersections.py | 36 ++++++++++--------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/rtp_spatial_analysis/src/transit_stop_intersections.py b/rtp_spatial_analysis/src/transit_stop_intersections.py index 4aa8468..1e71d6f 100644 --- a/rtp_spatial_analysis/src/transit_stop_intersections.py +++ b/rtp_spatial_analysis/src/transit_stop_intersections.py @@ -4,13 +4,18 @@ from pathlib import Path import utils -list_transit_type = ['local', 'all_day', 'frequent', 'hct', 'brt'] -list_efa = ['equity_focus_areas_2023__efa_poc', - 'equity_focus_areas_2023__efa_pov200', - 'equity_focus_areas_2023__efa_lep', - 'equity_focus_areas_2023__efa_youth', - 'equity_focus_areas_2023__efa_older', - 'equity_focus_areas_2023__efa_dis'] +transit_supportive_density = {'local':7, + 'all_day':15, + 'frequent':25, + 'hct':40, + 'brt':15} + +# list_efa = ['equity_focus_areas_2023__efa_poc', +# 'equity_focus_areas_2023__efa_pov200', +# 'equity_focus_areas_2023__efa_lep', +# 'equity_focus_areas_2023__efa_youth', +# 'equity_focus_areas_2023__efa_older', +# 'equity_focus_areas_2023__efa_dis'] def export_csv(df, config, file_nm): """export to a pre-defined file location""" @@ -20,25 +25,24 @@ def export_csv(df, config, file_nm): def get_service_au(config, buffered_stops, col_suffix): - supportive_density = 30 - gdf = utils.get_onedrive_layer(config, 'activity_units_path', 'peope_and_jobs_2050') gdf = gdf.to_crs(2285) - # number of people and jobs that are in supportive densities - gdf_au = gdf[gdf['density']>=supportive_density] sum_fields = ['sum_pop_20', 'sum_jobs_2', 'sum_au_205'] - total_au = gdf_au[sum_fields].sum().to_list() data = {} - for type in list_transit_type: + for key, density in transit_supportive_density.items(): + + # number of people and jobs that are in supportive densities + gdf_au = gdf[gdf['au_acre']>=density] + total_au = gdf_au[sum_fields].sum().to_list() - transit_by_type = buffered_stops[buffered_stops[type]>0] + transit_by_type = buffered_stops[buffered_stops[key]>0] gdf = gpd.clip(gdf_au, transit_by_type) _list = gdf[sum_fields].sum().to_list() _list_without = [total_au[i] - _list[i] for i in range(len(_list))] - data[type + col_suffix] = _list +_list_without + data[key + col_suffix] = _list +_list_without df = pd.DataFrame.from_dict(data, orient='index', columns=['people with service', 'jobs with service', 'activity units with service', 'people w/o service', 'jobs w/o service', 'activity units w/o service']) @@ -62,7 +66,7 @@ def run(config): test2 = get_service_au(config, buf4_transit_stops_2050, '_quarter_mi') df_service_dense = pd.concat([test, test2]) - # export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") + export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") # 2. Intersection of transit stops and Equity Focus Areas ---- # Load blocks layer from ElmerGeo From 2630c5496862d9cfcdd8c38bacc174e61ce978c3 Mon Sep 17 00:00:00 2001 From: Joanne <71201494+Joanneylin@users.noreply.github.com> Date: Fri, 26 Sep 2025 14:27:21 -0700 Subject: [PATCH 4/4] add export csv function to utils.py --- rtp_spatial_analysis/src/transit_stop_intersections.py | 8 +------- rtp_spatial_analysis/src/utils.py | 7 +++++++ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/rtp_spatial_analysis/src/transit_stop_intersections.py b/rtp_spatial_analysis/src/transit_stop_intersections.py index 1e71d6f..394be5d 100644 --- a/rtp_spatial_analysis/src/transit_stop_intersections.py +++ b/rtp_spatial_analysis/src/transit_stop_intersections.py @@ -17,12 +17,6 @@ # 'equity_focus_areas_2023__efa_older', # 'equity_focus_areas_2023__efa_dis'] -def export_csv(df, config, file_nm): - """export to a pre-defined file location""" - path_to_output = f"{config['user_onedrive']}/{config['rtp_output_path']}" - pth = Path(path_to_output,file_nm) - df.to_csv(pth) - def get_service_au(config, buffered_stops, col_suffix): gdf = utils.get_onedrive_layer(config, 'activity_units_path', 'peope_and_jobs_2050') @@ -66,7 +60,7 @@ def run(config): test2 = get_service_au(config, buf4_transit_stops_2050, '_quarter_mi') df_service_dense = pd.concat([test, test2]) - export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") + utils.export_csv(df_service_dense, config, "transit_stops_density_intersect.csv") # 2. Intersection of transit stops and Equity Focus Areas ---- # Load blocks layer from ElmerGeo diff --git a/rtp_spatial_analysis/src/utils.py b/rtp_spatial_analysis/src/utils.py index 6c24460..ae1b611 100644 --- a/rtp_spatial_analysis/src/utils.py +++ b/rtp_spatial_analysis/src/utils.py @@ -30,6 +30,13 @@ def export_layer(gdf, config, lyr_nm): pth = Path(path_to_output, lyr_nm) gdf.to_file(pth) + +def export_csv(df, config, file_nm): + """export to a pre-defined file location""" + path_to_output = f"{config['user_onedrive']}/{config['rtp_output_path']}" + pth = Path(path_to_output,file_nm) + df.to_csv(pth) + def get_onedrive_layer(config, path_name, layer): """ Load a specific layer from a geodatabase file stored in OneDrive.