Skip to content

Commit

Permalink
black
Browse files Browse the repository at this point in the history
  • Loading branch information
DavidOry committed Mar 4, 2024
1 parent 681a6fc commit dac9317
Show file tree
Hide file tree
Showing 14 changed files with 1,578 additions and 1,044 deletions.
2 changes: 1 addition & 1 deletion tests/test_union_city.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def test_highway():
os.path.join(_EXAMPLES_DIR, r"scenario_config.toml"),
os.path.join(_EXAMPLES_DIR, r"model_config.toml"),
],
run_dir=union_city_root
run_dir=union_city_root,
)
controller.run()

Expand Down
8 changes: 5 additions & 3 deletions tm2py/acceptance/acceptance.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def _make_roadway_network_comparisons(self):
s_bridge_df = self.s.simulated_bridge_details_df.copy()

o_df["time_period"] = o_df.time_period.str.lower()
#o_df = o_df.drop(columns = ["standard_link_id"])
# o_df = o_df.drop(columns = ["standard_link_id"])
s_trim_df = s_df[
s_df["ft"] <= self.MAX_FACILITY_TYPE_FOR_ROADWAY_COMPARISONS
].copy()
Expand Down Expand Up @@ -347,7 +347,7 @@ def _make_transit_network_comparisons(self):
right_on=["standard_line_name", "daily_line_name", "time_period"],
)

boards_df = pd.concat([rail_df, non_df], axis = "rows",ignore_index=True)
boards_df = pd.concat([rail_df, non_df], axis="rows", ignore_index=True)

boards_df["operator"] = np.where(
boards_df["operator"].isnull(),
Expand Down Expand Up @@ -378,7 +378,9 @@ def _make_transit_network_comparisons(self):
].copy()
)
daily_shape_df = pd.merge(c_df, b_df, how="left", on="LINE_ID")
daily_shape_df = daily_shape_df.rename(columns={"INODE":"emme_a_node_id","JNODE":"emme_b_node_id"})
daily_shape_df = daily_shape_df.rename(
columns={"INODE": "emme_a_node_id", "JNODE": "emme_b_node_id"}
)

# step 4 -- join the shapes to the boardings
# for daily, join boardings to shape, as I care about the boardings more than the daily shapes
Expand Down
1 change: 0 additions & 1 deletion tm2py/acceptance/canonical.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,4 +240,3 @@ def _read_standard_to_emme_node_crosswalk(self) -> pd.DataFrame:
self.standard_to_emme_node_crosswalk_df = df

return

20 changes: 12 additions & 8 deletions tm2py/acceptance/observed.py
Original file line number Diff line number Diff line change
Expand Up @@ -869,10 +869,7 @@ def _reduce_pems_counts(self, read_file_from_disk=True):
)

out_df = pd.merge(
self.c.pems_to_link_crosswalk_df,
out_df,
how="left",
on="station_id"
self.c.pems_to_link_crosswalk_df, out_df, how="left", on="station_id"
)

out_df = self._join_tm2_node_ids(out_df)
Expand All @@ -893,16 +890,23 @@ def _reduce_pems_counts(self, read_file_from_disk=True):
.reset_index()
)
join_df = out_df[
["emme_a_node_id","emme_b_node_id", "time_period", "station_id", "type", "vehicle_class"]
[
"emme_a_node_id",
"emme_b_node_id",
"time_period",
"station_id",
"type",
"vehicle_class",
]
].copy()
return_df = pd.merge(
median_df,
join_df,
how="left",
on=["emme_a_node_id","emme_b_node_id", "time_period", "vehicle_class"]
on=["emme_a_node_id", "emme_b_node_id", "time_period", "vehicle_class"],
).reset_index(drop=True)

#return_df = return_df.rename(columns = {"model_link_id" : "standard_link_id"})
# return_df = return_df.rename(columns = {"model_link_id" : "standard_link_id"})
return_df = self._join_ohio_standards(return_df)
return_df = self._identify_key_arterials_and_bridges(return_df)

Expand Down Expand Up @@ -955,7 +959,7 @@ def _reduce_caltrans_counts(self):
out_df = out_df[out_df["observed_flow"].notna()]

# convert to one-way flow
out_df["observed_flow"] = out_df["observed_flow"]/2.0
out_df["observed_flow"] = out_df["observed_flow"] / 2.0

return_df = self._join_tm2_node_ids(out_df)
return_df["time_period"] = self.c.ALL_DAY_WORD
Expand Down
Loading

0 comments on commit dac9317

Please sign in to comment.