Skip to content

Commit

Permalink
Stashing Changes
Browse files Browse the repository at this point in the history
  • Loading branch information
lachlan-git committed Aug 16, 2024
1 parent 8142d63 commit 8f8923a
Show file tree
Hide file tree
Showing 7 changed files with 72 additions and 14 deletions.
25 changes: 16 additions & 9 deletions scripts/compare_skims.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,11 @@

import numpy as np

network_fid_path = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.1_network_fidelity\run_result")
output_path = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.1_network_fidelity\output_summaries\skim_data")
# network_fid_path = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.1_network_fidelity\run_result")
# output_path = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.1_network_fidelity\output_summaries\skim_data")

network_fid_path = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.2_remove_cosmetic_nodes\run_result")
output_path = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.2_remove_cosmetic_nodes\output_summaries\skim_data")
output_csv = False

def read_matrix_as_long_df(path: Path, run_name):
Expand All @@ -19,7 +22,7 @@ def read_matrix_as_long_df(path: Path, run_name):
#%%
all_skims = []
# runs_to_include = ['run_1\\', 'run_3', 'run_5', 'run_11', 'run_12', 'run_15', 'run_16', 'run_17']
runs_to_include = ['run_15', 'run_16', 'run_17']
runs_to_include = ['run_18', 'run_20']
for skim_matrix_path in network_fid_path.rglob("*AM_taz.omx"):
for run_label in runs_to_include:
if run_label in str(skim_matrix_path):
Expand All @@ -34,22 +37,26 @@ def read_matrix_as_long_df(path: Path, run_name):
if output_csv:
all_skims.to_csv(output_path / "skims.csv")
else:
print("warnin not outputting")
print("warning not outputting")
#%%
scatterplots = []
skims_dropped = all_skims.copy()
for col in skims_dropped.columns:
skims_dropped = skims_dropped[skims_dropped[col] <= 1e19]

scatter_plot = px.scatter(skims_dropped.sample(100_000), x="run_15", y="run_16")
scatter_plot.write_html(output_path / "run_15_and_16.html")
scatter_plot = px.scatter(skims_dropped.sample(100_000), x="run_18", y="run_20")
scatter_plot.write_html(output_path / "run_18_and_20.html")
#%%
import matplotlib.pyplot as plt
plt.scatter(skims_dropped["run_15"], skims_dropped["run_16"])
plt.scatter(skims_dropped["run_18"], skims_dropped["run_20"])
plt.xlabel("run_18 skim (time)")
plt.ylabel("run_20 skim (time)")

plt.plot([0, 0], [250, 250], color='red', linestyle='--')
#%%
from scipy.stats import pearsonr, linregress
pearsonr(skims_dropped["run_15"], skims_dropped["run_16"])
linregress(skims_dropped["run_15"], skims_dropped["run_16"])
pearsonr(skims_dropped["run_18"], skims_dropped["run_20"])
linregress(skims_dropped["run_18"], skims_dropped["run_20"])
# %%
# %%
# import geopandas as gpd
Expand Down
59 changes: 54 additions & 5 deletions scripts/compile_model_runs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
from tqdm import tqdm
from shapely.geometry import LineString

input_dir = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.1_network_fidelity\run_result")
output_dir = input_dir / "consolidated_3"
input_dir = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.2_remove_cosmetic_nodes")
output_dir = Path(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.2_remove_cosmetic_nodes\output_summaries")



Expand All @@ -21,7 +21,8 @@
# scenarios_to_consolidate = (11, 12, 13, 14, 15)
scenarios_to_consolidate = (12, )#(11, 12, 13, 14, 15)
# runs_to_consolidate = (3, 4, 8, 11, 15)
runs_to_consolidate = (1, 15, 16, 17)
# runs_to_consolidate = (1, 11, 15, 16, 17)
runs_to_consolidate = (18, 19, 20)
#%%
# run_3 = gpd.read_file(r"Z:\MTC\US0024934.9168\Task_3_runtime_improvements\3.1_network_fidelity\run_result\run_3\Scenario_12\emme_links.shp")
#%%
Expand Down Expand Up @@ -90,8 +91,53 @@ def get_linestring_direction(linestring: LineString) -> str:

print("done")
#%%
links_table["@tollbooth"] > 0 & links_table["@tollbooth"] < 11
toll_booths = links_table[(links_table["@tollbooth"] > 0) & (links_table["@tollbooth"] < 11)]
toll_booths_geom = toll_booths[["@tollbooth", "geometry"]].drop_duplicates()
toll_booths = toll_booths.groupby(["@tollbooth", "run_number", "@capacity", "TIMAU"]).agg({"VOLAU": "sum"})
toll_booths.to_csv(output_dir / "toll_plaza_summary.csv")
toll_booths_geom.to_file(output_dir / "toll_booth_geom.shp")
j
#%%
toll_booths_geom

# %% -------------- SCATTER PLOT -------------------------

#%%

length_cutoff = 9999999999
vc_cutoff = 0

compare_run_1 = 11
compare_run_2 = 16
# run_11_and_16 =
# [['ID', "#link_id", 'TIMAU', 'INODE', 'JNODE', "run", "run_number",
# "scenario", "scenario_number", "LENGTH", "@capacity", "VOLAU", "geometry"]]
short_road_high_vc = links_table.copy()
short_road_high_vc["vc"] = short_road_high_vc["VOLAU"] / short_road_high_vc["@capacity"]


short_road_high_vc = short_road_high_vc[(short_road_high_vc["vc"] > vc_cutoff) & (short_road_high_vc["LENGTH"] < length_cutoff) & (short_road_high_vc["@ft"] == 7)]

# short_road_high_vc.to_file(output_dir / "short_links_high_vc" / "short_links_high_vc.shp")
print(short_road_high_vc["run_number"].value_counts())

only_once = ["geometry", "#link_id", "@ft"]
columns = ["ID", "@capacity", "VOLAU", "TIMAU", "vc"]
rename_dict_1 = {"@capacity": f"cap_{compare_run_1}", "VOLAU": f"volau_{compare_run_1}", "vc": f"vc_{compare_run_1}", "TIMAU": f"timeau_{compare_run_1}"}
rename_dict_2 = {"@capacity": f"cap_{compare_run_2}", "VOLAU": f"volau_{compare_run_2}", "vc": f"vc_{compare_run_2}", "TIMAU": f"timeau_{compare_run_2}"}

compare_run_1_df = short_road_high_vc[short_road_high_vc["run_number"] == compare_run_1][columns + only_once].rename(columns=rename_dict_1)
compare_run_2_df = short_road_high_vc[short_road_high_vc["run_number"] == compare_run_2][columns].rename(columns=rename_dict_2)

compared_vol_au = pd.merge(compare_run_1_df, compare_run_2_df, how="inner", on=["ID"])
compared_vol_au["11_over_16"] = (compared_vol_au["timeau_11"] / compared_vol_au["timeau_16"])
compared_vol_au_slicer = (compared_vol_au["11_over_16"] > 10) & (compared_vol_au["timeau_11"] > 10)
compared_vol_au = compared_vol_au[compared_vol_au_slicer]

# (compared_vol_au["cap_11"] != compared_vol_au["cap_16"]).sum()
compared_vol_au.to_file(output_dir / "short_links_high_vc" / "short_links_high_vc.shp")
#%%

links_table[links_table["run_number"] == 3]
#%%
all_link_counts = {}
Expand Down Expand Up @@ -131,9 +177,10 @@ def combine_tables(dfs, columns_same):
scen_number = scen_map[scen_number]
df["saturation"] = df["VOLAU"] / df["@capacity"]

df = df[["#link_id", "@capacity", "VOLAU", "geometry", "@ft"]].rename(columns = {
df = df[["#link_id", "@capacity", "VOLAU", "TIMAU", "geometry", "@ft"]].rename(columns = {
"@capacity": f"capacity_run{run_number}_scen{scen_number}",
"VOLAU": f"@volau_run{run_number}_scen{scen_number}",
"TIMAU": f"TIMAU_run{run_number}_scen{scen_number}",
"saturation": f"@saturation_run{run_number}_scen{scen_number}",
"geometry": f"geometry_run{run_number}_scen{scen_number}",
"@ft": f"ft_run{run_number}_scen{scen_number}"
Expand All @@ -151,6 +198,8 @@ def combine_tables(dfs, columns_same):
links_wide_table = combine_tables(all_links_no_none, ["#link_id", "geometry"])

links_wide_table["direction"] = links_wide_table["geometry"].apply(get_linestring_direction)

links_wide_table.to_file(output_dir / "wide_links.gpkg")
#%%
ft_cols = [col for col in links_wide_table.columns if "ft_" in col]

Expand Down
1 change: 1 addition & 0 deletions scripts/toll_booth_geom.cpg
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
UTF-8
Binary file added scripts/toll_booth_geom.dbf
Binary file not shown.
1 change: 1 addition & 0 deletions scripts/toll_booth_geom.prj
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
PROJCS["NAD_1983_HARN_StatePlane_California_VI_FIPS_0406_Feet",GEOGCS["GCS_North_American_1983_HARN",DATUM["D_North_American_1983_HARN",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic"],PARAMETER["False_Easting",6561666.667],PARAMETER["False_Northing",1640416.667],PARAMETER["Central_Meridian",-116.25],PARAMETER["Standard_Parallel_1",33.8833333333333],PARAMETER["Standard_Parallel_2",32.7833333333333],PARAMETER["Latitude_Of_Origin",32.1666666666667],UNIT["US survey foot",0.304800609601219]]
Binary file added scripts/toll_booth_geom.shp
Binary file not shown.
Binary file added scripts/toll_booth_geom.shx
Binary file not shown.

0 comments on commit 8f8923a

Please sign in to comment.