From d7053df9196c14c9f5661846ac5418205c7f358e Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 7 Dec 2023 16:04:27 -0500 Subject: [PATCH 01/43] New import_network from TNED --- .../emme/toolbox/import/import_network.py | 1725 ++++++----------- src/main/emme/toolbox/utilities/general.py | 17 +- 2 files changed, 644 insertions(+), 1098 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index d40a52f24..cf65b7f74 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -16,7 +16,7 @@ # # # Inputs: -# source: path to the location of the input network files +# source: path to the location of the input network geodatabase # traffic_scenario_id: optional scenario to store the imported network from the traffic files only # transit_scenario_id: optional scenario to store the imported network from the transit files only # merged_scenario_id: scenario to store the combined traffic and transit data from all network files @@ -25,18 +25,24 @@ # data_table_name: prefix to use to identify all data tables # overwrite: check to overwrite any existing data tables or scenarios with the same ID or name # emmebank: the Emme database in which to create the scenario. Default is the current open database +# create_time_periods: if True (default), also create per-time period scenarios (required to run assignments) # # Files referenced: -# hwycov.e00: base nodes and links for traffic network with traffic attributes in ESRI input exchange format -# linktypeturns.dbf: fixed turn travel times by to/from link type (field IFC) pairs -# turns.csv: turn bans and fixed costs by link from/to ID (field HWYCOV-ID) -# trcov.e00: base nodes and links for transit network in ESRI input exchange format -# trrt.csv: transit routes and their attributes -# trlink.csv: itineraries for each route as sequence of link IDs (TRCOV-ID field) -# trstop.csv: transit stop attributes -# timexfer_period.csv: table of timed transfer pairs of lines, by period +# +# *.gdb: A Geodatabase file with the network data for both highway and transit. The following tables are used +# - TNED_HwyNet +# - TNED_HwyNodes +# - TNED_RailNet +# - TNED_RailNodes +# - trrt +# - trlink +# - trstop +# - Turns +# The following files are also used (in the same directory as the *.gdb) +# # mode5tod.csv: global (per-mode) transit cost and perception attributes -# special_fares.txt: table listing special fares in terms of boarding and incremental in-vehicle costs. +# timexfer_.csv (optional): table of timed transfer pairs of lines, by period +# special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. # off_peak_toll_factors.csv (optional): factors to calculate the toll for EA, MD, and EV periods from the OP toll input for specified facilities # vehicle_class_toll_factors.csv (optional): factors to adjust the toll cost by facility name and class (DA, S2, S3, TRK_L, TRK_M, TRK_H) # @@ -46,10 +52,10 @@ import os modeller = inro.modeller.Modeller() main_directory = os.path.dirname(os.path.dirname(modeller.desktop.project.path)) - source_dir = os.path.join(main_directory, "input") + source_file = os.path.join(main_directory, "input", "EMMEOutputs.gdb") title = "Base 2012 scenario" import_network = modeller.tool("sandag.import.import_network") - import_network(output_dir, merged_scenario_id=100, title=title, + import_network(source_file, merged_scenario_id=100, title=title, data_table_name="2012_base", overwrite=True) """ @@ -85,22 +91,22 @@ FILE_NAMES = { "FARES": "special_fares.txt", + "TIMEXFER": "timexfer_%s.csv", "OFF_PEAK": "off_peak_toll_factors.csv", "VEHICLE_CLASS": "vehicle_class_toll_factors.csv", - "node_taz_map": "node_taz_map.csv", + "MODE5TOD": "MODE5TOD.csv", } class ImportNetwork(_m.Tool(), gen_utils.Snapshot): source = _m.Attribute(unicode) - traffic_scenario_id = _m.Attribute(int) - transit_scenario_id = _m.Attribute(int) - merged_scenario_id = _m.Attribute(int) + scenario_id = _m.Attribute(int) overwrite = _m.Attribute(bool) title = _m.Attribute(unicode) save_data_tables = _m.Attribute(bool) data_table_name = _m.Attribute(unicode) + create_time_periods = _m.Attribute(bool) tool_run_msg = "" @@ -116,37 +122,43 @@ def __init__(self): self.overwrite = False self.title = "" self.data_table_name = "" + self.create_time_periods = True self.attributes = [ - "source", "traffic_scenario_id", "transit_scenario_id", "merged_scenario_id", - "overwrite", "title", "save_data_tables", "data_table_name"] + "source", "scenario_id", "overwrite", "title", "save_data_tables", "data_table_name", "create_time_periods" + ] def page(self): if not self.data_table_name: - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) - self.data_table_name = props["scenarioYear"] + try: + load_properties = _m.Modeller().tool('sandag.utilities.properties') + props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) + self.data_table_name = props["scenarioYear"] + except: + pass pb = _m.ToolPageBuilder(self) pb.title = "Import network" pb.description = """ - Create an Emme network from the E00 and associated files - generated from TCOVED. - The timed transfer is stored in data tables with the suffix "_timed_xfers_period". -
-
- The following files are used: + Create an Emme network from TNED geodatabase (*.gdb) and associated files. +
+
+ The following layers in the gdb are used:
    -
  • hwycov.e00
  • -
  • LINKTYPETURNS.DBF
  • -
  • turns.csv
  • -
  • trcov.e00
  • -
  • trrt.csv
  • -
  • trlink.csv
  • -
  • trstop.csv
  • -
  • timexfer_period.csv, where period = EA,AM,MD,PM,EV
  • -
  • MODE5TOD.csv
  • -
  • special_fares.txt
  • +
  • TNED_HwyNet
  • +
  • TNED_HwyNodes
  • +
  • TNED_RailNet
  • +
  • TNED_RailNodes
  • +
  • trrt
  • +
  • trlink
  • +
  • trstop
  • +
  • Turns
  • +
+ The following files are also used (in the same directory as the *.gdb): +
    +
  • mode5tod.csv
  • +
  • timexfer_.csv (optional)
  • +
  • special_fares.txt (optional)
  • off_peak_toll_factors.csv (optional)
  • vehicle_class_toll_factors.csv (optional)
@@ -158,16 +170,15 @@ def page(self): pb.tool_run_status(self.tool_run_msg_status) pb.add_select_file("source", window_type="directory", file_filter="", - title="Source directory:",) + title="Source gdb:",) - pb.add_text_box("traffic_scenario_id", size=6, title="Scenario ID for traffic (optional):") - pb.add_text_box("transit_scenario_id", size=6, title="Scenario ID for transit (optional):") - pb.add_text_box("merged_scenario_id", size=6, title="Scenario ID for merged network:") + pb.add_text_box("scenario_id", size=6, title="Scenario ID for imported network:") pb.add_text_box("title", size=80, title="Scenario title:") pb.add_checkbox("save_data_tables", title=" ", label="Save reference data tables of file data") pb.add_text_box("data_table_name", size=80, title="Name for data tables:", note="Prefix name to use for all saved data tables") pb.add_checkbox("overwrite", title=" ", label="Overwrite existing scenarios and data tables") + pb.add_checkbox("create_time_periods", title=" ", label="Copy base scenario to all time periods and set modes (required for assignments)") return pb.render() @@ -186,15 +197,12 @@ def run(self): error, _traceback.format_exc()) raise - def __call__(self, source, - traffic_scenario_id=None, transit_scenario_id=None, merged_scenario_id=None, + def __call__(self, source, scenario_id, title="", save_data_tables=False, data_table_name="", overwrite=False, - emmebank=None): + emmebank=None, create_time_periods=True): self.source = source - self.traffic_scenario_id = traffic_scenario_id - self.transit_scenario_id = transit_scenario_id - self.merged_scenario_id = merged_scenario_id + self.scenario_id = scenario_id self.title = title self.save_data_tables = save_data_tables self.data_table_name = data_table_name @@ -203,11 +211,12 @@ def __call__(self, source, self.emmebank = _m.Modeller().emmebank else: self.emmebank = emmebank + self.create_time_periods = create_time_periods with self.setup(): self.execute() - return self.emmebank.scenario(merged_scenario_id) + return self.emmebank.scenario(scenario_id) @_context def setup(self): @@ -217,13 +226,12 @@ def setup(self): attributes = OrderedDict([ ("self", str(self)), ("source", self.source), - ("traffic_scenario_id", self.traffic_scenario_id), - ("transit_scenario_id", self.transit_scenario_id), - ("merged_scenario_id", self.merged_scenario_id), + ("scenario_id", self.scenario_id), ("title", self.title), ("save_data_tables", self.save_data_tables), ("data_table_name", self.data_table_name), ("overwrite", self.overwrite), + ("create_time_periods", self.create_time_periods) ]) self._log = [{ "content": attributes.items(), @@ -232,6 +240,8 @@ def setup(self): }] with _m.logbook_trace("Import network", attributes=attributes) as trace: gen_utils.log_snapshot("Import network", str(self), attributes) + load_properties = _m.Modeller().tool('sandag.utilities.properties') + self._props = load_properties(_join(_dir(_dir(self.source)), "conf", "sandag_abm.properties")) try: yield except Exception as error: @@ -242,133 +252,76 @@ def setup(self): fatal_error = True raise finally: + self._props = None self.log_report() + self._auto_mode_lookup = None + self._transit_mode_lookup = None if self._error: if fatal_error: - trace.write("Import network failed (%s errors)" % len(self._error), attributes=attributes) + trace.write("Import network failed (%s errors)" % len(self._error), attributes=attributes) else: trace.write("Import network completed (%s non-fatal errors)" % len(self._error), attributes=attributes) def execute(self): - traffic_attr_map = { - "NODE": { - "interchange": ("@interchange", "DERIVED", "EXTRA", "is interchange node"), - "HNODE": ("@hnode_hwy", "DERIVED","EXTRA", "HNODE label from hwycov" ), - "zone_id": ("@zone_id", "DERIVED", "EXTRA", "TAZ number a node is in"), - }, - "LINK": OrderedDict([ - ("HWYCOV-ID", ("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), - ("SPHERE", ("@sphere", "TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), - ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), - ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), - ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), - ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), - ("ASPD", ("@speed_adjusted", "TWO_WAY", "EXTRA", "Adjusted link speed (miles/hr)")), - ("IYR", ("@year_open_traffic", "TWO_WAY", "EXTRA", "The year the link opened to traffic")), - ("IPROJ", ("@project_code", "TWO_WAY", "EXTRA", "Project number for use with hwyproj.xls")), - ("IJUR", ("@jurisdiction_type", "TWO_WAY", "EXTRA", "Link jurisdiction type")), - ("IFC", ("type", "TWO_WAY", "STANDARD", "")), - ("IHOV", ("@lane_restriction", "TWO_WAY", "EXTRA", "Link operation type")), - ("ITRUCK", ("@truck_restriction", "TWO_WAY", "EXTRA", "Truck restriction code (ITRUCK)")), - ("ISPD", ("@speed_posted", "TWO_WAY", "EXTRA", "Posted speed limit (mph)")), - ("IMED", ("@median", "TWO_WAY", "EXTRA", "Median type")), - ("AU", ("@lane_auxiliary", "ONE_WAY", "EXTRA", "Number of auxiliary lanes")), - ("CNT", ("@traffic_control", "ONE_WAY", "EXTRA", "Intersection control type")), - ("TL", ("@turn_thru", "ONE_WAY", "EXTRA", "Intersection approach through lanes")), - ("RL", ("@turn_right", "ONE_WAY", "EXTRA", "Intersection approach right-turn lanes")), - ("LL", ("@turn_left", "ONE_WAY", "EXTRA", "Intersection approach left-turn lanes")), - ("GC", ("@green_to_cycle_init", "ONE_WAY", "EXTRA", "Initial green-to-cycle ratio")), - ("CHO", ("@capacity_hourly_op", "ONE_WAY", "EXTRA", "Off-Peak hourly mid-link capacity")), - ("CHA", ("@capacity_hourly_am", "ONE_WAY", "EXTRA", "AM Peak hourly mid-link capacity")), - ("CHP", ("@capacity_hourly_pm", "ONE_WAY", "EXTRA", "PM Peak hourly mid-link capacity")), - # These attributes are expanded from 3 time periods to 5 - ("ITOLLO", ("toll_op", "TWO_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("ITOLLA", ("toll_am", "TWO_WAY", "INTERNAL", "")), - ("ITOLLP", ("toll_pm", "TWO_WAY", "INTERNAL", "")), - ("LNO", ("lane_op", "ONE_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("LNA", ("lane_am", "ONE_WAY", "INTERNAL", "")), - ("LNP", ("lane_pm", "ONE_WAY", "INTERNAL", "")), - ("CPO", ("capacity_link_op", "ONE_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("CPA", ("capacity_link_am", "ONE_WAY", "INTERNAL", "")), - ("CPP", ("capacity_link_pm", "ONE_WAY", "INTERNAL", "")), - ("CXO", ("capacity_inter_op", "ONE_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("CXA", ("capacity_inter_am", "ONE_WAY", "INTERNAL", "")), - ("CXP", ("capacity_inter_pm", "ONE_WAY", "INTERNAL", "")), - ("TMO", ("time_link_op", "ONE_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("TMA", ("time_link_am", "ONE_WAY", "INTERNAL", "")), - ("TMP", ("time_link_pm", "ONE_WAY", "INTERNAL", "")), - ("TXO", ("time_inter_op", "ONE_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("TXA", ("time_inter_am", "ONE_WAY", "INTERNAL", "")), - ("TXP", ("time_inter_pm", "ONE_WAY", "INTERNAL", "")), - # These three attributes are used to cross-reference the turn directions - ("TLB", ("through_link", "ONE_WAY", "INTERNAL", "")), - ("RLB", ("right_link", "ONE_WAY", "INTERNAL", "")), - ("LLB", ("left_link", "ONE_WAY", "INTERNAL", "")), - ("@cost_operating", ("@cost_operating","DERIVED", "EXTRA", "Fuel and maintenance cost")), - ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), - ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), - ]) - } - time_period_attrs = OrderedDict([ - ("@cost_auto", "toll + cost autos"), - ("@cost_hov2", "toll (non-mngd) + cost HOV2"), - ("@cost_hov3", "toll (non-mngd) + cost HOV3+"), - ("@cost_lgt_truck", "toll + cost light trucks"), - ("@cost_med_truck", "toll + cost medium trucks"), - ("@cost_hvy_truck", "toll + cost heavy trucks"), - ("@cycle", "cycle length (minutes)"), - ("@green_to_cycle", "green to cycle ratio"), - ("@capacity_link", "mid-link capacity"), - ("@capacity_inter", "approach capacity"), - ("@toll", "toll cost (cent)"), - ("@lane", "number of lanes"), - ("@time_link", "link time in minutes"), - ("@time_inter", "intersection delay time"), - ("@sta_reliability", "static reliability") - ]) - time_name = { - "_ea": "Early AM ", "_am": "AM Peak ", "_md": "Mid-day ", "_pm": "PM Peak ", "_ev": "Evening " - } - time_periods = ["_ea", "_am", "_md", "_pm", "_ev"] - for attr, desc_tmplt in time_period_attrs.iteritems(): - for time in time_periods: - traffic_attr_map["LINK"][attr + time] = \ - (attr + time, "DERIVED", "EXTRA", time_name[time] + desc_tmplt) - - transit_attr_map = { - "NODE": OrderedDict([ - ("IPARK", ("@ipark", "DERIVED", "EXTRA", "parking indicator" )), - ("HNODE", ("@hnode_tr", "DERIVED", "EXTRA", "HNODE label from trcov" )), + attr_map = { + "NODE": OrderedDict([ + ("HNODE", ("@hnode", "BOTH", "EXTRA", "HNODE label from TNED" )), + ("TAP", ("@tap_id", "BOTH", "EXTRA", "TAP number")), + ("PARK", ("@park", "BOTH", "EXTRA", "parking indicator" )), + ("STOPTYPE", ("@stoptype", "BOTH", "EXTRA", "stop type indicator" )), + ("ELEV", ("@elev", "BOTH", "EXTRA", "station/stop elevation in feet")), + ("interchange", ("@interchange", "DERIVED", "EXTRA", "is interchange node")), ]), - "LINK": OrderedDict([ - ("TRCOV-ID", ("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), - ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), - ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), - ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), - ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), - ("OSPD", ("@speed_observed", "TWO_WAY", "EXTRA", "Observed speed")), - ("IYR", ("@year_open_traffic", "TWO_WAY", "EXTRA", "The year the link opened to traffic ")), - ("IFC", ("type", "TWO_WAY", "STANDARD", "")), - ("IHOV", ("@lane_restriction_tr", "TWO_WAY", "EXTRA", "Link operation type")), - ("ISPD", ("@speed_posted_tr_l", "TWO_WAY", "EXTRA", "Posted speed limit (mph)")), - ("IMED", ("@median", "TWO_WAY", "EXTRA", "Median type")), - ("TMO", ("trtime_link_op", "ONE_WAY", "INTERNAL", "Expanded to EA, MD and EV")), - ("TMEA", ("@trtime_link_ea", "DERIVED", "EXTRA", "Early AM transit link time in minutes")), - ("TMA", ("@trtime_link_am", "ONE_WAY", "EXTRA", "AM Peak transit link time in minutes")), - ("TMMD", ("@trtime_link_md", "DERIVED", "EXTRA", "Mid-day transit link time in minutes")), - ("TMP", ("@trtime_link_pm", "ONE_WAY", "EXTRA", "PM Peak transit link time in minutes")), - ("TMEV", ("@trtime_link_ev", "DERIVED", "EXTRA", "Evening transit link time in minutes")), - ("MINMODE", ("@mode_hierarchy", "TWO_WAY", "EXTRA", "Transit mode type")), + "LINK": OrderedDict([ + ("HWYCOV0_ID",("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), + ("SPHERE", ("@sphere", "HWY_TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), + ("HWYSegGUID",("#hwyseg_guid", "TWO_WAY", "STRING", "HWYSegGUID")), + ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), + ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), + ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), + ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), + ("ASPD", ("@speed_adjusted", "HWY_TWO_WAY", "EXTRA", "Adjusted link speed (miles/hr)")), + ("YR", ("@year_open_traffic", "HWY_TWO_WAY", "EXTRA", "The year the link opened to traffic")), + ("PROJ", ("@project_code", "HWY_TWO_WAY", "EXTRA", "Project number for use with hwyproj.xls")), + ("FC", ("type", "TWO_WAY", "STANDARD", "")), + ("HOV", ("@hov", "TWO_WAY", "EXTRA", "Link operation type")), + ("MINMODE", ("@minmode", "TWO_WAY", "EXTRA", "Transit mode type")), + ("EATRUCK", ("@truck_ea", "HWY_TWO_WAY", "EXTRA", "Early AM truck restriction code ")), + ("AMTRUCK", ("@truck_am", "HWY_TWO_WAY", "EXTRA", "AM Peak truck restriction code ")), + ("MDTRUCK", ("@truck_md", "HWY_TWO_WAY", "EXTRA", "Mid-day truck restriction code ")), + ("PMTRUCK", ("@truck_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak truck restriction code ")), + ("EVTRUCK", ("@truck_ev", "HWY_TWO_WAY", "EXTRA", "Evening truck restriction code ")), + ("TOLLEA", ("@toll_ea", "HWY_TWO_WAY", "EXTRA", "Early AM toll cost (cent)")), + ("TOLLA", ("@toll_am", "HWY_TWO_WAY", "EXTRA", "AM Peak toll cost (cent)")), + ("TOLLMD", ("@toll_md", "HWY_TWO_WAY", "EXTRA", "Mid-day toll cost (cent)")), + ("TOLLP", ("@toll_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak toll cost (cent)")), + ("TOLLEV", ("@toll_ev", "HWY_TWO_WAY", "EXTRA", "Evening toll cost (cent)")), + + ("SPD", ("@speed_posted", "HWY_TWO_WAY", "EXTRA", "Posted speed limit (mph)")), + ("MED", ("@median", "TWO_WAY", "EXTRA", "Median type")), + ("AU", ("@lane_auxiliary", "HWY_ONE_WAY", "EXTRA", "Number of auxiliary lanes")), + ("CNT", ("@traffic_control", "HWY_ONE_WAY", "EXTRA", "Intersection control type")), + ("TL", ("@turn_thru", "HWY_ONE_WAY", "EXTRA", "Intersection approach through lanes")), + ("RL", ("@turn_right", "HWY_ONE_WAY", "EXTRA", "Intersection approach right-turn lanes")), + ("LL", ("@turn_left", "HWY_ONE_WAY", "EXTRA", "Intersection approach left-turn lanes")), + ("GC", ("@green_to_cycle_init", "HWY_ONE_WAY", "EXTRA", "Initial green-to-cycle ratio")), + ("WAY", ("way", "HWY_TWO_WAY", "INTERNAL", "")), + ("TRANSIT_MODES", ("transit_modes", "DERIVED", "INTERNAL", "")), + ("@cost_operating", ("@cost_operating", "DERIVED", "EXTRA", "Fuel and maintenance cost")), + ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), + ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), + + ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), + ]), "TRANSIT_LINE": OrderedDict([ ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), - ("OP_Headway", ("@headway_op", "TRRT", "EXTRA", "Off-Peak actual headway")), - ("Night_Headway", ("@headway_night", "TRRT", "EXTRA", "Night actual headway")), - ("Night_Headway_rev", ("@headway_rev_night", "DERIVED", "EXTRA", "Night Peak revised headway")), + ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), + ("Evening_Headway", ("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), + ("EarlyAM_Headway", ("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), - ("OP_Headway_rev", ("@headway_rev_op", "DERIVED", "EXTRA", "Off-Peak revised headway")), ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), @@ -377,522 +330,424 @@ def execute(self): ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), - ("Night_Hours", ("@night_hours", "TRRT", "EXTRA", "Night hours")), - ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), + ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), + ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), + ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), ]), "TRANSIT_SEGMENT": OrderedDict([ ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), ("Pass_Count", ("@pass_count", "TRSTOP", "EXTRA", "Number of times this stop is passed")), ("Milepost", ("@milepost", "TRSTOP", "EXTRA", "Distance from start of line")), - ("FareZone", ("@fare_zone", "TRSTOP", "EXTRA", "Fare zone ID")), ("StopName", ("#stop_name", "TRSTOP", "STRING", "Name of stop")), ("@coaster_fare_board", ("@coaster_fare_board", "DERIVED", "EXTRA", "Boarding fare for coaster")), ("@coaster_fare_inveh", ("@coaster_fare_inveh", "DERIVED", "EXTRA", "Incremental fare for Coaster")), ]) } + time_name = { + "_ea": "Early AM ", "_am": "AM Peak ", "_md": "Mid-day ", "_pm": "PM Peak ", "_ev": "Evening " + } + time_name_dst = ["_ea", "_am", "_md", "_pm", "_ev"] + time_name_src = ["EA", "A", "MD", "P", "EV"] + time_period_attrs = [ + ("CP", "@capacity_link", "mid-link capacity"), + ("CX", "@capacity_inter", "approach capacity"), + ("CH", "@capacity_hourly", "hourly mid-link capacity"), + ("LN", "@lane", "number of lanes"), + ("TM", "@time_link", "link time in minutes"), + ("TX", "@time_inter", "intersection delay time"), + ] + for src_attr, dst_attr, desc_tmplt in time_period_attrs: + for time_s, time_d in zip(time_name_src, time_name_dst): + attr_map["LINK"][src_attr + time_s] = \ + (dst_attr + time_d, "HWY_ONE_WAY", "EXTRA", time_name[time_d] + desc_tmplt) + derived_period_attrs = [ + ("@cost_auto", "toll + cost autos"), + ("@cost_hov2", "toll (non-mngd) + cost HOV2"), + ("@cost_hov3", "toll (non-mngd) + cost HOV3+"), + ("@cost_lgt_truck", "toll + cost light trucks"), + ("@cost_med_truck", "toll + cost medium trucks"), + ("@cost_hvy_truck", "toll + cost heavy trucks"), + ("@cycle", "cycle length (minutes)"), + ("@green_to_cycle", "green to cycle ratio"), + ("@sta_reliability", "static reliability") + ] + for attr, desc_tmplt in derived_period_attrs: + for time in time_name_dst: + attr_map["LINK"][attr + time] = \ + (attr + time, "DERIVED", "EXTRA", time_name[time] + desc_tmplt) + create_scenario = _m.Modeller().tool( "inro.emme.data.scenario.create_scenario") - file_names = [ - "hwycov.e00", "LINKTYPETURNS.DBF", "turns.csv", - "trcov.e00", "trrt.csv", "trlink.csv", "trstop.csv", - "timexfer_EA.csv", "timexfer_AM.csv","timexfer_MD.csv", - "timexfer_PM.csv","timexfer_EV.csv","MODE5TOD.csv", - ] - for name in file_names: - file_path = _join(self.source, name) - if not os.path.exists(file_path): - raise Exception("missing file '%s' in directory %s" % (name, self.source)) - title = self.title if not title: - existing_scenario = self.emmebank.scenario(self.merged_scenario_id) + existing_scenario = self.emmebank.scenario(self.scenario_id) if existing_scenario: title = existing_scenario.title - def create_attributes(scenario, attr_map): - for elem_type, mapping in attr_map.iteritems(): - for name, _tcoved_type, emme_type, desc in mapping.values(): - if emme_type == "EXTRA": - if not scenario.extra_attribute(name): - xatt = scenario.create_extra_attribute(elem_type, name) + scenario = create_scenario(self.scenario_id, title, overwrite=self.overwrite, emmebank=self.emmebank) + scenarios = [scenario] + if self.create_time_periods: + periods=["EA", "AM", "MD", "PM", "EV"] + period_ids = list(enumerate(periods, start=int(self.scenario_id) + 1)) + for ident, period in period_ids: + scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), + overwrite=self.overwrite, emmebank=self.emmebank)) + # create attributes in scenario + for elem_type, mapping in attr_map.iteritems(): + for name, _tcoved_type, emme_type, desc in mapping.values(): + if emme_type == "EXTRA": + for s in scenarios: + if not s.extra_attribute(name): + xatt = s.create_extra_attribute(elem_type, name) xatt.description = desc - elif emme_type == "STRING": - if not scenario.network_field(elem_type, name): - scenario.create_network_field(elem_type, name, 'STRING', description=desc) - - if self.traffic_scenario_id: - traffic_scenario = create_scenario( - self.traffic_scenario_id, title + " Traffic", - overwrite=self.overwrite, emmebank=self.emmebank) - create_attributes(traffic_scenario, traffic_attr_map) - else: - traffic_scenario = None - if self.transit_scenario_id: - transit_scenario = create_scenario( - self.transit_scenario_id, title + " Transit", - overwrite=self.overwrite, emmebank=self.emmebank) - create_attributes(transit_scenario, transit_attr_map) - else: - transit_scenario = None - if self.merged_scenario_id: - scenario = create_scenario( - self.merged_scenario_id, title, - overwrite=self.overwrite, emmebank=self.emmebank) - create_attributes(scenario, traffic_attr_map) - create_attributes(scenario, transit_attr_map) - else: - scenario = traffic_scenario or transit_scenario - - traffic_network = _network.Network() - transit_network = _network.Network() + elif emme_type == "STRING": + for s in scenarios: + if not s.network_field(elem_type, name): + s.create_network_field(elem_type, name, 'STRING', description=desc) + + log_content = [] + for k, v in mapping.iteritems(): + if v[3] == "DERIVED": + k = "--" + log_content.append([k] + list(v)) + self._log.append({ + "content": log_content, + "type": "table", + "header": ["TNED", "Emme", "Source", "Type", "Description"], + "title": "Network %s attributes" % elem_type.lower().replace("_", " "), + "disclosure": True + }) + + network = _network.Network() + for elem_type, mapping in attr_map.iteritems(): + for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): + if emme_type == "STANDARD": + continue + default = "" if emme_type == "STRING" else 0 + network.create_attribute(elem_type, attr, default) try: - if self.traffic_scenario_id or self.merged_scenario_id: - for elem_type, attrs in traffic_attr_map.iteritems(): - log_content = [] - for k, v in attrs.iteritems(): - if v[3] == "DERIVED": - k = "--" - log_content.append([k] + list(v)) - self._log.append({ - "content": log_content, - "type": "table", - "header": ["TCOVED", "Emme", "Source", "Type", "Description"], - "title": "Traffic %s attributes" % elem_type.lower().replace("_", " "), - "disclosure": True - }) - try: - self.create_traffic_base(traffic_network, traffic_attr_map) - self.create_turns(traffic_network) - self.calc_traffic_attributes(traffic_network) - self.check_zone_access(traffic_network, traffic_network.mode("d")) - finally: - if traffic_scenario: - traffic_scenario.publish_network(traffic_network, resolve_attributes=True) - - if self.transit_scenario_id or self.merged_scenario_id: - for elem_type, attrs in transit_attr_map.iteritems(): - log_content = [] - for k, v in attrs.iteritems(): - if v[3] == "DERIVED": - k = "--" - log_content.append([k] + list(v)) - self._log.append({ - "content": log_content, - "type": "table", - "header": ["TCOVED", "Emme", "Source", "Type", "Description"], - "title": "Transit %s attributes" % elem_type.lower().replace("_", " "), - "disclosure": True - }) - try: - self.create_transit_base(transit_network, transit_attr_map) - self.create_transit_lines(transit_network, transit_attr_map) - self.calc_transit_attributes(transit_network) - new_node_id = max( - max(n.number for n in traffic_network.nodes()), - max(n.number for n in transit_network.nodes()) - ) - new_node_id = int(_ceiling(new_node_id / 10000.0) * 10000) - new_node_id = self.renumber_transit_nodes(transit_network, new_node_id) - finally: - if transit_scenario: - for link in transit_network.links(): - if link.type <= 0: - link.type = 99 - transit_scenario.publish_network(transit_network, resolve_attributes=True) - if self.merged_scenario_id: - self.add_transit_to_traffic(traffic_network, transit_network, new_node_id) + self.create_modes(network) + self.create_road_base(network, attr_map) + self.create_turns(network) + self.calc_traffic_attributes(network) + self.check_zone_access(network, network.mode("d")) + self.create_rail_base(network, attr_map) + self.create_transit_lines(network, attr_map) + self.calc_transit_attributes(network) finally: - if self.merged_scenario_id: - scenario.publish_network(traffic_network, resolve_attributes=True) + # TAP connectors included in network, fix type setting and renumber node IDs + for link in network.links(): + if link.type <= 0: + link.type = 99 + self.renumber_base_nodes(network) + scenario.publish_network(network, resolve_attributes=True) self.set_functions(scenario) self.check_connectivity(scenario) - def create_traffic_base(self, network, attr_map): - self._log.append({"type": "header", "content": "Import traffic base network from hwycov.e00"}) - hwy_data = gen_utils.DataTableProc("ARC", _join(self.source, "hwycov.e00")) - - if self.save_data_tables: - hwy_data.save("%s_hwycov" % self.data_table_name, self.overwrite) - - for elem_type in "NODE", "TURN": - mapping = attr_map.get(elem_type) - if not mapping: - continue - for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): - default = "" if emme_type == "STRING" else 0 - network.create_attribute(elem_type, attr, default) - - # Create Modes - dummy_auto = network.create_mode("AUTO", "d") - hov2 = network.create_mode("AUX_AUTO", "h") - hov2_toll = network.create_mode("AUX_AUTO", "H") - hov3 = network.create_mode("AUX_AUTO", "i") - hov3_toll = network.create_mode("AUX_AUTO", "I") - sov = network.create_mode("AUX_AUTO", "s") - sov_toll = network.create_mode("AUX_AUTO", "S") - heavy_trk = network.create_mode("AUX_AUTO", "v") - heavy_trk_toll = network.create_mode("AUX_AUTO", "V") - medium_trk = network.create_mode("AUX_AUTO", "m") - medium_trk_toll = network.create_mode("AUX_AUTO", "M") - light_trk = network.create_mode("AUX_AUTO", "t") - light_trk_toll = network.create_mode("AUX_AUTO", "T") - - dummy_auto.description = "dummy auto" - sov.description = "SOV" - hov2.description = "HOV2" - hov3.description = "HOV3+" - light_trk.description = "TRKL" - medium_trk.description = "TRKM" - heavy_trk.description = "TRKH" - - sov_toll.description = "SOV TOLL" - hov2_toll.description = "HOV2 TOLL" - hov3_toll.description = "HOV3+ TOLL" - light_trk_toll.description = "TRKL TOLL" - medium_trk_toll.description = "TRKM TOLL" - heavy_trk_toll.description = "TRKH TOLL" - - is_centroid = lambda arc, node : (arc["IFC"] == 10) and (node == "AN") - - # Note: only truck types 1, 3, 4, and 7 found in 2012 base network - modes_gp_lanes= { - 1: set([dummy_auto, sov, hov2, hov3, light_trk, medium_trk, heavy_trk, - sov_toll, hov2_toll, hov3_toll, light_trk_toll, medium_trk_toll, - heavy_trk_toll]), - 2: set([dummy_auto, sov, hov2, hov3, light_trk, medium_trk, - sov_toll, hov2_toll, hov3_toll, light_trk_toll, medium_trk_toll]), - 3: set([dummy_auto, sov, hov2, hov3, light_trk, sov_toll, hov2_toll, - hov3_toll, light_trk_toll]), - 4: set([dummy_auto, sov, hov2, hov3, sov_toll, hov2_toll, hov3_toll]), - 5: set([dummy_auto, heavy_trk, heavy_trk_toll]), - 6: set([dummy_auto, medium_trk, heavy_trk, medium_trk_toll, heavy_trk_toll]), - 7: set([dummy_auto, light_trk, medium_trk, heavy_trk, light_trk_toll, - medium_trk_toll, heavy_trk_toll]), + if "modify_network.py" in os.listdir(os.getcwd()): + try: + with _m.logbook_trace("Modify network script"): + import modify_network + reload(modify_network) + modify_network.run(base_scenario) + except ImportError as e: + pass + network = base_scenario.get_network() + network.create_attribute("LINK", "transit_modes") + + if self.create_time_periods: + for link in network.links(): + link.transit_modes = link.modes + for ident, period in period_ids: + self.set_auto_modes(network, period) + scenario = self.emmebank.scenario(ident) + scenario.publish_network(network, resolve_attributes=True) + + def create_modes(self, network): + # combined traffic and transit mode creation + mode_table = { + "AUTO": [("d", "dummy auto")], + "AUX_AUTO": [ + ("h", "SOV"), + ("H", "HOV2"), + ("i", "HOV3+"), + ("I", "TRKL"), + ("s", "TRKM"), + ("S", "TRKH"), + ("v", "SOV TOLL"), + ("V", "HOV2 TOLL"), + ("m", "HOV3+ TOLL"), + ("M", "TRKL TOLL"), + ("t", "TRKM TOLL"), + ("T", "TRKH TOLL"), + ], + "TRANSIT": [ + ("b", "BUS" ), # (vehicle type 100, PCE=3.0) + ("e", "EXP BUS"), # (vehicle type 90 , PCE=3.0) + ("p", "LTDEXP BUS"), # (vehicle type 80 , PCE=3.0) + ("l", "LRT"), # (vehicle type 50) + ("y", "BRT YEL"), # (vehicle type 60 , PCE=3.0) + ("r", "BRT RED"), # (vehicle type 70 , PCE=3.0) + ("c", "CMR"), # (vehicle type 40) + ("o", "TIER1"), # (vehicle type 45) + ], + "AUX_TRANSIT": [ + ("a", "ACCESS", 3), + ("x", "TRANSFER", 3), + ("w", "WALK", 3), + ("u", "ACCESS_WLK", 3), + ("k", "EGRESS_WLK", 3), + ("f", "ACCESS_PNR", 25), + ("g", "EGRESS_PNR", 25), + ("q", "ACCESS_KNR", 25), + ("j", "EGRESS_KNR", 25), + ("Q", "ACCESS_TNC", 25), + ("J", "EGRESS_TNC", 25), + ], } - modes_toll_lanes = { - 1: set([dummy_auto, sov_toll, hov2_toll, hov3_toll, light_trk_toll, - medium_trk_toll, heavy_trk_toll]), - 2: set([dummy_auto, sov_toll, hov2_toll, hov3_toll, light_trk_toll, - medium_trk_toll]), - 3: set([dummy_auto, sov_toll, hov2_toll, hov3_toll, light_trk_toll]), - 4: set([dummy_auto, sov_toll, hov2_toll, hov3_toll]), - 5: set([dummy_auto, heavy_trk_toll]), - 6: set([dummy_auto, medium_trk_toll, heavy_trk_toll]), - 7: set([dummy_auto, light_trk_toll, medium_trk_toll, heavy_trk_toll]), + for mode_type, modes in mode_table.iteritems(): + for mode_info in modes: + mode = network.create_mode(mode_type, mode_info[0]) + mode.description = mode_info[1] + if len(mode_info) == 3: + mode.speed = mode_info[2] + self._transit_mode_lookup = { + 0: set([]), + 1: set([network.mode(m_id) for m_id in "x"]), # 1 = special transfer walk links between certain nearby stops + 2: set([network.mode(m_id) for m_id in "w"]), # 2 = walk links in the downtown area + 3: set([network.mode(m_id) for m_id in "a"]), # 3 = the special TAP connectors + 400: set([network.mode(m_id) for m_id in "c"]), # 4 = Coaster Rail Line + 500: set([network.mode(m_id) for m_id in "l"]), # 5 = Trolley & Light Rail Transit (LRT) + 600: set([network.mode(m_id) for m_id in "bpeyr"]), # 6 = Yellow Car Bus Rapid Transit (BRT) + 700: set([network.mode(m_id) for m_id in "bpeyr"]), # 7 = Red Car Bus Rapid Transit (BRT) + 800: set([network.mode(m_id) for m_id in "bpe"]), # 8 = Limited Express Bus + 900: set([network.mode(m_id) for m_id in "bpe"]), # 9 = Express Bus + 1000: set([network.mode(m_id) for m_id in "bpe"]), # 10 = Local Bus + 11: set([network.mode(m_id) for m_id in "u"]), # = access walk links + 12: set([network.mode(m_id) for m_id in "k"]), # = egress walk links + 13: set([network.mode(m_id) for m_id in "f"]), # = access PNR links + 14: set([network.mode(m_id) for m_id in "g"]), # = egress PNR links + 15: set([network.mode(m_id) for m_id in "q"]), # = access KNR links + 16: set([network.mode(m_id) for m_id in "j"]), # = egress KNR links + 17: set([network.mode(m_id) for m_id in "Q"]), # = access TNC links + 18: set([network.mode(m_id) for m_id in "J"]), # = egress TNC links + } + modes_gp_lanes = { + 0: set([]), + 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes + 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck + 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck + 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck + 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks + 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks + 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only + } + non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) + self._auto_mode_lookup = { + "GP": modes_gp_lanes, + "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), + "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), + "HOV3": set([network.mode(m_id) for m_id in "dim"]), } - modes_HOV2 = set([dummy_auto, hov2, hov3, hov2_toll, hov3_toll]) - modes_HOV3 = set([dummy_auto, hov3, hov3_toll]) - - def define_modes(arc): - if arc["IFC"] == 10: # connector - return modes_gp_lanes[1] - elif arc["IHOV"] == 1: - return modes_gp_lanes[arc["ITRUCK"]] - elif arc["IHOV"] == 2: + def set_auto_modes(self, network, period): + # time periods + # need to update the modes from the XTRUCK for their time of day + # Note: only truck types 1, 3, 4, and 7 found in 2012 base network + truck = "@truck_%s" % period.lower() + lookup = self._auto_mode_lookup + for link in network.links(): + auto_modes = set([]) + if link.type == 10: # connector + auto_modes = lookup["GP"][link[truck]] + elif link.type in [11, 12]: + pass # no auto modes, rail only (11) or bus only (12) + elif link["@hov"] == 1: + auto_modes = lookup["GP"][link[truck]] + elif link["@hov"] in [2, 3]: # managed lanes, free for HOV2 and HOV3+, tolls for SOV - if arc["ITOLLO"] + arc["ITOLLA"] + arc["ITOLLP"] > 0: - return modes_toll_lanes[arc["ITRUCK"]] | modes_HOV2 + if link["@toll_ea"] + link["@toll_am"] + link["@toll_md"] + link["@toll_pm"] + link["@toll_ev"] > 0: + auto_modes = lookup["TOLL"][link[truck]] # special case of I-15 managed lanes base year and 2020, no build - elif arc["IFC"] == 1 and arc["IPROJ"] in [41, 42, 486, 373, 711]: - return modes_toll_lanes[arc["ITRUCK"]] | modes_HOV2 - elif arc["IFC"] == 8 or arc["IFC"] == 9: - return modes_toll_lanes[arc["ITRUCK"]] | modes_HOV2 + elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: + auto_modes = lookup["TOLL"][link[truck]] + elif link.type == 8 or link.type == 9: + auto_modes = lookup["TOLL"][link[truck]] + if link["@hov"] == 2: + auto_modes = auto_modes | lookup["HOV2"] else: - return modes_HOV2 - elif arc["IHOV"] == 3: - # managed lanes, free for HOV3+, tolls for SOV and HOV2 - if arc["ITOLLO"] + arc["ITOLLA"] + arc["ITOLLP"] > 0: - return modes_toll_lanes[arc["ITRUCK"]] | modes_HOV3 - # special case of I-15 managed lanes for base year and 2020, no build - elif arc["IFC"] == 1 and arc["IPROJ"] in [41, 42, 486, 373, 711]: - return modes_toll_lanes[arc["ITRUCK"]] | modes_HOV3 - elif arc["IFC"] == 8 or arc["IFC"] == 9: - return modes_toll_lanes[arc["ITRUCK"]] | modes_HOV3 - else: - return modes_HOV3 - elif arc["IHOV"] == 4: - return modes_toll_lanes[arc["ITRUCK"]] - else: - return modes_gp_lanes[arc["ITRUCK"]] + auto_modes = auto_modes | lookup["HOV3"] + elif link["@hov"] == 4: + auto_modes = lookup["TOLL"][link[truck]] + link.modes = link.transit_modes | auto_modes + + def create_road_base(self, network, attr_map): + self._log.append({"type": "header", "content": "Import roadway base network from TNED_HwyNet %s" % self.source}) + hwy_data = gen_utils.DataTableProc("TNED_HwyNet", self.source) + # TEMP workaround: BN field is string + bn_index = hwy_data._attr_names.index("BN") + hwy_data._values[bn_index] = hwy_data._values[bn_index].astype(int) + + if self.save_data_tables: + hwy_data.save("%s_TNED_HwyNet" % self.data_table_name, self.overwrite) + + is_centroid = lambda arc, node : (arc["FC"] == 10) and (node == "AN") + link_attr_map = {} + for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): + if tcoved_type in ("TWO_WAY", "HWY_TWO_WAY", "ONE_WAY", "HWY_ONE_WAY"): + link_attr_map[field] = (name, tcoved_type.replace("HWY_", ""), emme_type, desc) + + def define_modes(arc): + if arc["FC"] in [11, 12] or arc["ABLNA"] == 0: #or ((arc["HOV"] < 1 or arc["HOV"] > 4) and arc["FC"] != 10): + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] + return [network.mode('d')] self._create_base_net( - hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, - arc_id_name="HWYCOV-ID", link_attr_map=attr_map["LINK"]) - hwy_node_data = gen_utils.E00FileProc("HWYCOV.NAT", _join(self.source, "hwycov.e00")) + hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map) + + hwy_node_data = gen_utils.DataTableProc("TNED_HwyNodes", self.source) + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + if v[1] in ("BOTH", "HWY")] for record in hwy_node_data: - node = network.node(record["HWYCOV-ID"]) + node = network.node(record["HNODE"]) if node: - node["@hnode_hwy"] = record["HNODE"] + for src, dst in node_attrs: + node[dst] = record[src] + else: + self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) self._log.append({"type": "text", "content": "Import traffic base network complete"}) - def create_transit_base(self, network, attr_map): - self._log.append({"type": "header", "content": "Import transit base network from trcov.e00"}) - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) - transit_data = gen_utils.DataTableProc("ARC", _join(self.source, "trcov.e00")) + def create_rail_base(self, network, attr_map): + self._log.append({"type": "header", "content": "Import rail base network from TNED_RailNet %s" % self.source}) + transit_data = gen_utils.DataTableProc("TNED_RailNet", self.source) if self.save_data_tables: - transit_data.save("%s_trcov" % self.data_table_name, self.overwrite) - - # aux mode speed is always 3 (miles/hr) - access = network.create_mode("AUX_TRANSIT", "a") - transfer = network.create_mode("AUX_TRANSIT", "x") - walk = network.create_mode("AUX_TRANSIT", "w") - AccWlk = network.create_mode("AUX_TRANSIT", "u") - EgrWlk = network.create_mode("AUX_TRANSIT", "k") - AccPNR = network.create_mode("AUX_TRANSIT", "f") - EgrPNR = network.create_mode("AUX_TRANSIT", "g") - AccKNR = network.create_mode("AUX_TRANSIT", "q") - EgrKNR = network.create_mode("AUX_TRANSIT", "j") - AccTNC = network.create_mode("AUX_TRANSIT", "Q") - EgrTNC = network.create_mode("AUX_TRANSIT", "J") - - bus = network.create_mode("TRANSIT", "b") - express_bus = network.create_mode("TRANSIT", "e") - ltdexp_bus = network.create_mode("TRANSIT", "p") - brt_red = network.create_mode("TRANSIT", "r") - brt_yellow = network.create_mode("TRANSIT", "y") - lrt = network.create_mode("TRANSIT", "l") - coaster_rail = network.create_mode("TRANSIT", "c") - tier1 = network.create_mode("TRANSIT", "o") - - access.description = "ACCESS" - transfer.description = "TRANSFER" - walk.description = "WALK" - AccWlk.description = "ACCESS_WLK" - EgrWlk.description = "EGRESS_WLK" - AccPNR.description = "ACCESS_PNR" - EgrPNR.description = "EGRESS_PNR" - AccKNR.description = "ACCESS_KNR" - EgrKNR.description = "EGRESS_KNR" - AccTNC.description = "ACCESS_TNC" - EgrTNC.description = "EGRESS_TNC" - bus.description = "BUS" # (vehicle type 100, PCE=3.0) - express_bus.description = "EXP BUS" # (vehicle type 90 , PCE=3.0) - ltdexp_bus.description = "LTDEXP BUS" # (vehicle type 80 , PCE=3.0) - lrt.description = "LRT" # (vehicle type 50) - brt_yellow.description = "BRT YEL" # (vehicle type 60 , PCE=3.0) - brt_red.description = "BRT RED" # (vehicle type 70 , PCE=3.0) - coaster_rail.description = "CMR" # (vehicle type 40) - tier1.description = "TIER1" # (vehicle type 45) - - access.speed = 3 - transfer.speed = 3 - walk.speed = 3 - AccWlk.speed = 3 - EgrWlk.speed = 3 - AccPNR.speed = 25 - EgrPNR.speed = 25 - AccKNR.speed = 25 - EgrKNR.speed = 25 - AccTNC.speed = 25 - EgrTNC.speed = 25 - - ## define TAP connectors as centroids - #is_centroid = lambda arc, node: (int(arc["MINMODE"]) == 3) and (node == "BN") - - #replaced with centroid definition of highway - is_centroid = lambda arc, node : (arc["IFC"] == 10) and (node == "AN") - ##Do not create any centroird related stuff - #is_centroid = lambda arc, node : False - #network.delete_node() - mode_setting = { - 1: set([transfer]), # 1 = special transfer walk links between certain nearby stops - 2: set([walk]), # 2 = walk links in the downtown area - 3: set([access]), # 3 = the special TAP connectors - 4: set([coaster_rail]), # 4 = Coaster Rail Line - 5: set([lrt]), # 5 = Light Rail Transit (LRT) Line - 6: set([brt_yellow, ltdexp_bus, express_bus, bus]), # 6 = Yellow Car Bus Rapid Transit (BRT) - 7: set([brt_red, ltdexp_bus, express_bus, bus]), # 7 = Red Car Bus Rapid Transit (BRT) - 8: set([ltdexp_bus, express_bus, bus]), # 8 = Limited Express Bus - 9: set([ltdexp_bus, express_bus, bus]), # 9 = Express Bus - 10: set([ltdexp_bus, express_bus, bus]), # 10 = Local Bus - 11: set([AccWlk]), # 4 = access walk links - 12: set([EgrWlk]), # 5 = egress walk links - 13: set([AccPNR]), # 6 = access PNR links - 14: set([EgrPNR]), # 7 = egress PNR links - 15: set([AccKNR]), # 8 = access KNR links - 16: set([EgrKNR]), # 9 = egress KNR links - 17: set([AccTNC]), # 8 = access TNC links - 18: set([EgrTNC]), # 9 = egress TNC links - } - tier1_rail_link_name = props["transit.newMode"] + transit_data.save("%s_TNED_RailNet" % self.data_table_name, self.overwrite) + + link_attr_map = {} + for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): + if tcoved_type in ("TWO_WAY", "RAIL_TWO_WAY", "ONE_WAY", "RAIL_ONE_WAY"): + link_attr_map[field] = (name, tcoved_type.replace("RAIL_", ""), emme_type, desc) + + tier1_modes = set([network.mode(m_id) for m_id in "o"]) + tier1_rail_link_name = self._props["transit.newMode"] def define_modes(arc): if arc["NM"] == tier1_rail_link_name: - return set([tier1]) - return mode_setting[arc["MINMODE"]] - - arc_filter = lambda arc: (arc["MINMODE"] > 2) + return tier1_modes + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] - # first pass to create the main base network for vehicles, xfer links and TAPs self._create_base_net( - transit_data, network, mode_callback=define_modes, centroid_callback=is_centroid, - arc_id_name="TRCOV-ID", link_attr_map=attr_map["LINK"], arc_filter=arc_filter) + transit_data, network, mode_callback=define_modes, link_attr_map=link_attr_map) - # second pass to add special walk links / modify modes on existing links - reverse_dir_map = {1:3, 3:1, 2:4, 4:2, 0:0} - - def set_reverse_link(link, modes): - reverse_link = link.reverse_link - if reverse_link: - reverse_link.modes |= modes - else: - reverse_link = network.create_link(link.j_node, link.i_node, modes) - for attr in network.attributes("LINK"): - reverse_link[attr] = link[attr] - reverse_link["@direction_cardinal"] = reverse_dir_map[link["@direction_cardinal"]] - reverse_link["@tcov_id"] = -1*link["@tcov_id"] - reverse_link.vertices = list(reversed(link.vertices)) - - def epsilon_compare(a, b, epsilon): - return abs((a - b) / (a if abs(a) > 1 else 1)) <= epsilon - - for arc in transit_data: - # possible improvement: snap walk nodes to nearby node if not matched and within distance - if arc_filter(arc): - continue - if float(arc["AN"]) == 0 or float(arc["BN"]) == 0: - self._log.append({"type": "text", - "content": "Node ID 0 in AN (%s) or BN (%s) for link ID %s." % - (arc["AN"], arc["BN"], arc["TRCOV-ID"])}) - continue - coordinates = arc["geo_coordinates"] - arc_length = arc["LENGTH"] / 5280.0 # convert feet to miles - i_node = get_node(network, arc['AN'], coordinates[0]) - j_node = get_node(network, arc['BN'], coordinates[-1]) - modes = define_modes(arc) - link = network.link(i_node, j_node) - split_link_case = False - if link: - link.modes |= modes - else: - # Note: additional cases of "tunnel" walk links could be - # considered to optimize network matching - # check if this a special "split" link case where - # we do not need to add a "tunnel" walk link - for link1 in i_node.outgoing_links(): - if split_link_case: - break - for link2 in link1.j_node.outgoing_links(): - if link2.j_node == j_node: - if epsilon_compare(link1.length + link2.length, arc_length, 10**-5): - self._log.append({"type": "text", - "content": "Walk link AN %s BN %s matched to two links TCOV-ID %s, %s" % - (arc['AN'], arc['BN'], link1["@tcov_id"], link2["@tcov_id"])}) - link1.modes |= modes - link2.modes |= modes - set_reverse_link(link1, modes) - set_reverse_link(link2, modes) - split_link_case = True - break - if not split_link_case: - link = network.create_link(i_node, j_node, modes) - link.length = arc_length - if len(coordinates) > 2: - link.vertices = coordinates[1:-1] - if not split_link_case: - set_reverse_link(link, modes) - - for attr, _, emme_type, _ in attr_map["NODE"].itervalues(): - default = "" if emme_type == "STRING" else 0 - network.create_attribute("NODE", attr, default) - transit_node_data = gen_utils.E00FileProc("TRCOV.NAT", _join(self.source, "trcov.e00")) - # Load IPARK data onto transit nodes + transit_node_data = gen_utils.DataTableProc("TNED_RailNodes", self.source) + # Load PARK, elevation, stop type data onto transit nodes + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + if v[1] in ("BOTH", "RAIL")] for record in transit_node_data: - node = network.node(record["TRCOV-ID"]) + node = network.node(record["HNODE"]) if node: - node["@ipark"] = record["IPARK"] - node["@hnode_tr"] = record["HNODE"] + for src, dst in node_attrs: + node[dst] = record[src] + else: + self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) self._log.append({"type": "text", "content": "Import transit base network complete"}) - def _create_base_net(self, data, network, mode_callback, centroid_callback, arc_id_name, link_attr_map, arc_filter=None): + def _create_base_net(self, data, network, link_attr_map, mode_callback, centroid_callback=None): forward_attr_map = {} reverse_attr_map = {} + arc_id_name = "HWYCOV0_ID" + arc_guid_name = "HWYSegGUID" for field, (name, tcoved_type, emme_type, desc) in link_attr_map.iteritems(): - if emme_type != "STANDARD": - default = "" if emme_type == "STRING" else 0 - network.create_attribute("LINK", name, default) - - if field in [arc_id_name, "DIR"]: + if field in [arc_id_name, arc_guid_name, "DIR"]: # these attributes are special cases for reverse link forward_attr_map[field] = name - elif tcoved_type == "TWO_WAY": + elif tcoved_type in "TWO_WAY": forward_attr_map[field] = name reverse_attr_map[field] = name - elif tcoved_type == "ONE_WAY": + elif tcoved_type in "ONE_WAY": forward_attr_map["AB" + field] = name reverse_attr_map["BA" + field] = name emme_id_name = forward_attr_map[arc_id_name] + emme_guid_name = forward_attr_map[arc_guid_name] dir_name = forward_attr_map["DIR"] - reverse_dir_map = {1:3, 3:1, 2:4, 4:2, 0:0} + reverse_dir_map = {1: 3, 3: 1, 2: 4, 4: 2, 0: 0} new_node_id = max(data.values("AN").max(), data.values("BN").max()) + 1 - if arc_filter is None: - arc_filter = lambda arc : True + + if centroid_callback is None: + centroid_callback = lambda a,n: False # Create nodes and links for arc in data: - if not arc_filter(arc): - continue if float(arc["AN"]) == 0 or float(arc["BN"]) == 0: self._log.append({"type": "text", - "content": "Node ID 0 in AN (%s) or BN (%s) for link ID %s." % - (arc["AN"], arc["BN"], arc[arc_id_name])}) + "content": "Node ID 0 in AN (%s) or BN (%s) for link GUID/ID %s/%s." % + (arc["AN"], arc["BN"], arc[arc_guid_name], arc[arc_id_name])}) continue coordinates = arc["geo_coordinates"] i_node = get_node(network, arc['AN'], coordinates[0], centroid_callback(arc, "AN")) j_node = get_node(network, arc['BN'], coordinates[-1], centroid_callback(arc, "BN")) - existing_link = network.link(i_node, j_node) - if existing_link: - msg = "Duplicate link between AN %s and BN %s. Link IDs %s and %s." % \ - (arc["AN"], arc["BN"], existing_link[emme_id_name], arc[arc_id_name]) + link = network.link(i_node, j_node) + if link: + msg = "Duplicate link between AN %s and BN %s. Link GUID/IDs %s/%s and %s/%s." % \ + (arc["AN"], arc["BN"], link[emme_guid_name], link[emme_id_name], arc[arc_guid_name], arc[arc_id_name]) self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - self._split_link(network, i_node, j_node, new_node_id) - new_node_id += 1 - - modes = mode_callback(arc) - link = network.create_link(i_node, j_node, modes) - link.length = arc["LENGTH"] / 5280.0 # convert feet to miles - if len(coordinates) > 2: - link.vertices = coordinates[1:-1] + if link[emme_guid_name] == arc[arc_guid_name]: + self._log.append({"type": "text", "content": "... but GUIDs match (not an error)"}) + else: + self._error.append(msg) + else: + modes = mode_callback(arc) + link = network.create_link(i_node, j_node, modes) + link.length = arc["LENGTH"] + if len(coordinates) > 2: + link.vertices = coordinates[1:-1] for field, attr in forward_attr_map.iteritems(): link[attr] = arc[field] - if arc["IWAY"] == 2 or arc["IWAY"] == 0: - reverse_link = network.create_link(j_node, i_node, modes) - reverse_link.length = link.length - reverse_link.vertices = list(reversed(link.vertices)) + if arc["WAY"] == 2 or arc["WAY"] == 0: + reverse_link = network.link(j_node, i_node) + if not reverse_link: + reverse_link = network.create_link(j_node, i_node, modes) + reverse_link.length = link.length + reverse_link.vertices = list(reversed(link.vertices)) for field, attr in reverse_attr_map.iteritems(): reverse_link[attr] = arc[field] reverse_link[emme_id_name] = -1*arc[arc_id_name] + reverse_link[emme_guid_name] = "-" + arc[arc_guid_name] reverse_link[dir_name] = reverse_dir_map[arc["DIR"]] def create_transit_lines(self, network, attr_map): self._log.append({"type": "header", "content": "Import transit lines"}) - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) fatal_errors = 0 - # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,OP_Headway,Night_Headway,Night_Hours,Config,Fare - transit_line_data = gen_utils.DataTableProc("trrt", _join(self.source, "trrt.csv")) - # Route_ID,Link_ID,Direction - transit_link_data = gen_utils.DataTableProc("trlink", _join(self.source, "trlink.csv")) - # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,FareZone,StopName - transit_stop_data = gen_utils.DataTableProc("trstop", _join(self.source, "trstop.csv")) + # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare + transit_line_data = gen_utils.DataTableProc("trrt", self.source) + # Route_ID,Link_ID,Link_GUID,Direction + transit_link_data = gen_utils.DataTableProc("trlink", self.source) + # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName + #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) + transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) # From_line,To_line,Board_stop,Wait_time # Note: Board_stop is not used # Timed xfer data periods = ['EA', 'AM', 'MD', 'PM', 'EV'] timed_xfer_data = {} for period in periods: - timed_xfer_data[period] = gen_utils.DataTableProc("timexfer_"+period, _join(self.source, "timexfer_"+period+".csv")) + file_path = _join(_dir(self.source), FILE_NAMES["TIMEXFER"] % period) + if os.path.exists(file_path): + timed_xfer_data[period] = gen_utils.DataTableProc("timexfer_"+period, file_path) + else: + timed_xfer_data[period] = [] - mode_properties = gen_utils.DataTableProc("MODE5TOD", _join(self.source, "MODE5TOD.csv"), convert_numeric=True) + mode_properties = gen_utils.DataTableProc("MODE5TOD", _join(_dir(self.source), FILE_NAMES["MODE5TOD"]), convert_numeric=True) mode_details = {} for record in mode_properties: mode_details[int(record["MODE_ID"])] = record @@ -910,7 +765,7 @@ def create_transit_lines(self, network, attr_map): premium_bus = network.create_transit_vehicle(80, 'p') # 8 prem express express_bus = network.create_transit_vehicle(90, 'e') # 9 regular express local_bus = network.create_transit_vehicle(100, 'b') # 10 local bus - tier1 = network.create_transit_vehicle(45, 'o') # 11 Tier 1 + tier1 = network.create_transit_vehicle(45, 'o') # 11 Tier 1 brt_yellow.auto_equivalent = 3.0 brt_red.auto_equivalent = 3.0 @@ -932,55 +787,60 @@ def create_transit_lines(self, network, attr_map): for elem_type in "TRANSIT_LINE", "TRANSIT_SEGMENT": mapping = attr_map[elem_type] for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): - default = "" if emme_type == "STRING" else 0 - network.create_attribute(elem_type, attr, default) if tcoved_type == "TRRT": trrt_attrs.append((field, attr)) elif tcoved_type == "MODE5TOD": mode5tod_attrs.append((field, attr)) + network.create_attribute("TRANSIT_SEGMENT", "milepost") - # Pre-process transit line (trrt.csv) to know the route names for errors / warnings + # Pre-process transit line (trrt) to know the route names for errors / warnings transit_line_records = list(transit_line_data) line_names = {} for record in transit_line_records: - line_names[int(record["Route_ID"])] = record["Route_Name"].strip() + line_names[int(record["Route_ID"])] = str(record["Route_Name"]) - links = dict((link["@tcov_id"], link) for link in network.links()) + links = dict((link["#hwyseg_guid"], link) for link in network.links()) transit_routes = _defaultdict(lambda: []) for record in transit_link_data: line_ref = line_names.get(int(record["Route_ID"]), record["Route_ID"]) - link_id = int(record["Link_ID"]) - if "+" in record["Direction"]: - link = links.get(link_id) - else: - link = links.get(-1*link_id) - if not link: - link = links.get(link_id) - if link and not link.reverse_link: - reverse_link = network.create_link(link.j_node, link.i_node, link.modes) - reverse_link.vertices = list(reversed(link.vertices)) - for attr in network.attributes("LINK"): - if attr not in set(["vertices"]): - reverse_link[attr] = link[attr] - reverse_link["@tcov_id"] = -1 * link["@tcov_id"] - msg = "Transit line %s : Missing reverse link with ID %s (%s) (reverse link created)" % ( - line_ref, record["Link_ID"], link) - self._log.append({"type": "text", "content": msg}) - self._error.append("Transit route import: " + msg) - link = reverse_link + link_id = record["Link_GUID"] + if "-" in record["Direction"]: + link_id = "-" + link_id + link = links.get(link_id) + if not link: + if "-" in record["Direction"]: + reverse_link = links.get("-" + link_id) + else: + reverse_link = links.get(link_id[1:]) + if reverse_link: + link = network.create_link(reverse_link.j_node, reverse_link.i_node, reverse_link.modes) + link.vertices = list(reversed(reverse_link.vertices)) + for attr in network.attributes("LINK"): + if attr not in set(["vertices"]): + link[attr] = reverse_link[attr] + link["@tcov_id"] = -1 * reverse_link["@tcov_id"] + link["#hwyseg_guid"] = link_id + links[link_id] = link + msg = "Transit line %s : Missing reverse link with ID %s (%s) (reverse link created)" % ( + line_ref, record["Link_GUID"], link) + self._log.append({"type": "text", "content": msg}) + self._error.append("Transit route import: " + msg) + link = reverse_link if not link: - msg = "Transit line %s : No link with ID %s, line not created" % ( - line_ref, record["Link_ID"]) + msg = "Transit line %s : No link with GUID %s, routing may not be correct" % ( + line_ref, record["Link_GUID"]) self._log.append({"type": "text", "content": msg}) self._error.append("Transit route import: " + msg) fatal_errors += 1 continue + transit_routes[int(record["Route_ID"])].append(link) # lookup list of special tier 1 mode route names - tier1_rail_route_names = [str(n) for n in props["transit.newMode.route"]] + tier1_rail_route_names = [str(n) for n in self._props["transit.newMode.route"]] dummy_links = set([]) transit_lines = {} + auto_mode = network.mode("d") for record in transit_line_records: try: route = transit_routes[int(record["Route_ID"])] @@ -988,7 +848,6 @@ def create_transit_lines(self, network, attr_map): is_tier1_rail = False for name in tier1_rail_route_names: if str(record["Route_Name"]).startswith(name): - print('record["Route_Name"]2', record["Route_Name"]) is_tier1_rail = True break if is_tier1_rail: @@ -1001,8 +860,8 @@ def create_transit_lines(self, network, attr_map): itinerary = [prev_link] for link in route[1:]: if prev_link.j_node != link.i_node: # filling in the missing gap - msg = "line %s : Links not adjacent, shortest path interpolation used (%s and %s)" % ( - record["Route_Name"], prev_link["@tcov_id"], link["@tcov_id"]) + msg = "Transit line %s (index %s): Links not adjacent, shortest path interpolation used (%s and %s)" % ( + record["Route_Name"], record["Route_ID"], prev_link["#hwyseg_guid"], link["#hwyseg_guid"]) log_record = {"type": "text", "content": msg} self._log.append(log_record) sub_path = find_path(prev_link, link, mode) @@ -1012,17 +871,17 @@ def create_transit_lines(self, network, attr_map): prev_link = link node_itinerary = [itinerary[0].i_node] + [l.j_node for l in itinerary] - try: - tline = network.create_transit_line( - record["Route_Name"].strip(), vehicle_type, node_itinerary) - except: - msg = "Transit line %s : missing mode added to at least one link" % ( - record["Route_Name"]) - self._log.append({"type": "text", "content": msg}) - for link in itinerary: + missing_mode = 0 + for link in itinerary: + if mode not in link.modes: link.modes |= set([mode]) - tline = network.create_transit_line( - record["Route_Name"].strip(), vehicle_type, node_itinerary) + missing_mode += 1 + if missing_mode: + msg = "Transit line %s (index %s): missing mode added to %s link(s)" % ( + str(record["Route_Name"]), record["Route_ID"], missing_mode) + self._log.append({"type": "text", "content": msg}) + tline = network.create_transit_line( + str(record["Route_Name"]), vehicle_type, node_itinerary) for field, attr in trrt_attrs: tline[attr] = float(record[field]) @@ -1041,12 +900,18 @@ def create_transit_lines(self, network, attr_map): tline.layover_time = 5 transit_lines[int(record["Route_ID"])] = tline + milepost = 0 for segment in tline.segments(): + segment.milepost = milepost + milepost += segment.link.length segment.allow_boardings = False segment.allow_alightings = False - segment.transit_time_func = 2 - # ft2 = ul2 -> copied @trtime_link_XX - # segments on links matched to auto network (with auto mode) are changed to ft1 = timau + if auto_mode in segment.link.modes: + # segments on links with auto mode are ft1 = timau + segment.transit_time_func = 1 + else: + # ft2 = ul2 -> copied @trtime_link_XX (fixed speed) + segment.transit_time_func = 2 except Exception as error: msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) self._log.append({"type": "text", "content": msg}) @@ -1065,8 +930,10 @@ def create_transit_lines(self, network, attr_map): except KeyError: self._log.append( {"type": "text", - "content": "Stop %s: could not find transit line by ID %s (link ID %s)" % ( - record["Stop_ID"], record["Route_ID"], record["Link_ID"])}) + "content": "Stop %s: could not find transit line by ID %s (link GUID %s)" % ( + record["Stop_ID"], record["Route_ID"], record["Link_GUID"])}) + for stops in line_stops.itervalues(): + stops.sort(key=lambda stop: float(stop["Milepost"])) seg_float_attr_map = [] seg_string_attr_map = [] @@ -1083,41 +950,35 @@ def create_transit_lines(self, network, attr_map): continue itinerary = tline.segments(include_hidden=True) segment = prev_segment = itinerary.next() - tcov_id = abs(segment.link["@tcov_id"]) for stop in stops: if "DUMMY" in stop["StopName"]: continue - link_id = int(stop['Link_ID']) - node_id = int(stop['TrnNode']) - while segment.link and tcov_id != link_id: + stop_link_id = stop['Link_GUID'] + node_id = int(stop['Node']) + while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: segment = itinerary.next() - if segment.link is None: - break - tcov_id = abs(segment.link["@tcov_id"]) if node_id == segment.i_node.number: pass elif segment.j_node and node_id == segment.j_node.number: - segment = itinerary.next() # its the next segment + # if matches the J-node then the stop is on the next segment + segment = itinerary.next() else: - next_segment = None - if segment.j_node: - next_segment = itinerary.next() - if next_segment and abs(next_segment.link["@tcov_id"]) == link_id and \ - node_id == next_segment.j_node.number: - # split link case, where stop is at the end of the next segment - segment = next_segment + if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: + msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( + line_name, stop["Route_ID"], segment, stop_link_id, node_id) else: - msg = "Transit line %s: could not find stop on link ID %s at node ID %s" % (line_name, link_id, node_id) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - # reset iterator to start back from previous segment - itinerary = tline.segments(include_hidden=True) + msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( + line_name, stop["Route_ID"], stop_link_id, node_id) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + # reset iterator to start back from previous segment + itinerary = tline.segments(include_hidden=True) + segment = itinerary.next() + while segment.id != prev_segment.id: segment = itinerary.next() - while segment.id != prev_segment.id: - segment = itinerary.next() - continue + continue segment.allow_boardings = True segment.allow_alightings = True segment.dwell_time = min(tline.default_dwell_time, 99.99) @@ -1137,9 +998,9 @@ def lookup_line(ident): raise Exception("'%s' is not a route name or route ID" % ident) # Normalizing the case of the headers as different examples have been seen - for period in periods: + for period, data in timed_xfer_data.iteritems(): norm_data = [] - for record in timed_xfer_data[period]: + for record in data: norm_record = {} for key, val in record.iteritems(): norm_record[key.lower()] = val @@ -1165,78 +1026,18 @@ def lookup_line(ident): gen_utils.DataTableProc("%s_timed_xfer_%s" % (self.data_table_name, period), data=timed_xfer) if fatal_errors > 0: - raise Exception("Cannot create transit network, %s fatal errors found" % fatal_errors) + raise Exception("Import of transit lines: %s fatal errors found" % fatal_errors) self._log.append({"type": "text", "content": "Import transit lines complete"}) def calc_transit_attributes(self, network): - self._log.append({"type": "header", "content": "Calculate derived transit attributes"}) - # - TM by 5 TOD periods copied from TM for 3 time periods - # NOTE: the values of @trtime_link_## are only used for - # separate guideway. - # Links shared with the traffic network use the - # assignment results in timau - for link in network.links(): - for time in ["_ea", "_md", "_ev"]: - link["@trtime_link" + time] = link["trtime_link_op"] - if link.type == 0: # walk only links have IFC ==0 - link.type = 99 + # for link in network.links(): + # if link.type == 0: # walk only links have FC ==0 + # link.type = 99 - # ON TRANSIT LINES - # Set 4-period headway based on revised headway calculation - for line in network.transit_lines(): - for period in ["am", "pm", "op", "night"]: - line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) - - # Special incremental boarding and in-vehicle fares - # to recreate the coaster zone fares fares_file_name = FILE_NAMES["FARES"] special_fare_path = _join(self.source, fares_file_name) - if os.path.isfile(special_fare_path): - with open(special_fare_path) as fare_file: - self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) - special_fares = None - yaml_installed = True - try: - import yaml - special_fares = yaml.load(fare_file) - self._log.append({"type": "text", "content": yaml.dump(special_fares).replace("\n", "
")}) - except ImportError: - yaml_installed = False - except: - pass - if special_fares is None: - try: - import json - special_fares = json.load(fare_file) - self._log.append({"type": "text", "content": json.dumps(special_fares, indent=4).replace("\n", "
")}) - except: - pass - if special_fares is None: - msg = "YAML or JSON" if yaml_installed else "JSON (YAML parser not installed)" - raise Exception(fares_file_name + ": file could not be parsed as " + msg) - else: - # Default coaster fare for 2012 base year - special_fares = { - "boarding_cost": { - "base": [ - {"line": "398104", "cost" : 4.0}, - {"line": "398204", "cost" : 4.0} - ], - "stop_increment": [ - {"line": "398104", "stop": "SORRENTO VALLEY", "cost": 0.5}, - {"line": "398204", "stop": "SORRENTO VALLEY", "cost": 0.5} - ] - }, - "in_vehicle_cost": [ - {"line": "398104", "from": "SOLANA BEACH", "cost": 1.0}, - {"line": "398104", "from": "SORRENTO VALLEY", "cost": 0.5}, - {"line": "398204", "from": "OLD TOWN", "cost": 1.0}, - {"line": "398204", "from": "SORRENTO VALLEY", "cost": 0.5} - ], - "day_pass": 5.0, - "regional_pass": 12.0 - } - self._log.append({"type": "text", "content": "Using default coaster fare based on 2012 base year setup."}) + if not os.path.isfile(special_fare_path): + return def get_line(line_id): line = network.transit_line(line_id) @@ -1244,6 +1045,33 @@ def get_line(line_id): raise Exception("%s: line does not exist: %s" % (fares_file_name, line_id)) return line + # Special incremental boarding and in-vehicle fares + # to recreate the coaster zone fares + self._log.append({"type": "header", "content": "Apply special_fares to transit lines"}) + with open(special_fare_path) as fare_file: + self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) + special_fares = None + yaml_installed = True + try: + import yaml + special_fares = yaml.load(fare_file) + self._log.append({"type": "text", "content": yaml.dump(special_fares).replace("\n", "
")}) + except ImportError: + yaml_installed = False + except: + pass + if special_fares is None: + try: + import json + special_fares = json.load(fare_file) + self._log.append({"type": "text", "content": json.dumps(special_fares, indent=4).replace("\n", "
")}) + except: + pass + if special_fares is None: + msg = "YAML or JSON" if yaml_installed else "JSON (YAML parser not installed)" + raise Exception(fares_file_name + ": file could not be parsed as " + msg) + + for record in special_fares["boarding_cost"]["base"]: line = get_line(record["line"]) line["@fare"] = 0 @@ -1272,104 +1100,58 @@ def get_line(line_id): pass_values.add_attribute(_dt.Attribute("pass_type", _np.array(pass_cost_keys).astype("O"))) pass_values.add_attribute(_dt.Attribute("cost", _np.array(pass_costs).astype("f8"))) gen_utils.DataTableProc("%s_transit_passes" % self.data_table_name, data=pass_values) - self._log.append({"type": "text", "content": "Calculate derived transit attributes complete"}) - return - - def renumber_transit_nodes(self, network, new_node_id): - nodes_to_renumber = [] - # 1. find all node which have valid HNODE IDs, - # and renumber all other nodes to their new IDs - for node in network.nodes(): - if node["@hnode_tr"] > 0: - nodes_to_renumber.append(node) - else: - node.number = new_node_id - new_node_id += 1 - # 2. renumber nodes with HNODE values to move them - # out of the way - hnode_new_id = new_node_id - for node in nodes_to_renumber: - node.number = hnode_new_id - hnode_new_id += 1 - # 3. renumber nodes with HNODE values to their - # final IDs - for node in nodes_to_renumber: - node.number = node["@hnode_tr"] - return new_node_id + self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) + + def renumber_base_nodes(self, network): + # TODO: log node renumberings + tracker = AvailableNodeIDTracker(network) + nodes = [n for n in network.nodes() if n.number > 999999] + nodes = sorted(nodes, key=lambda x: x.number, reverse=True) + if nodes: + self._log.append({"type": "text", "content": "Renumbered %s nodes" % len(nodes)}) + for n in nodes: + old_number = n.number + n.number = tracker.get_id() + self._log.append({"type": "text", "content": " - renumbered %s to %s " % (old_number, n.number)}) def create_turns(self, network): self._log.append({"type": "header", "content": "Import turns and turn restrictions"}) - self._log.append({"type": "text", "content": "Process LINKTYPETURNS.DBF for turn prohibited by type"}) - # Process LINKTYPETURNS.DBF for turn prohibited by type - with _fiona.open(_join(self.source, "LINKTYPETURNS.DBF"), 'r') as f: - link_type_turns = _defaultdict(lambda: {}) - for record in f: - record = record['properties'] - link_type_turns[record["FROM"]][record["TO"]] = { - "LEFT": record["LEFT"], - "RIGHT": record["RIGHT"], - "STRAIGHT": record["STRAIGHT"], - "UTURN": record["UTURN"] - } - for from_link in network.links(): - if from_link.type in link_type_turns: - to_link_turns = link_type_turns[from_link.type] - for to_link in from_link.j_node.outgoing_links(): - if to_link.type in to_link_turns: - record = to_link_turns[to_link.type] - if not from_link.j_node.is_intersection: - network.create_intersection(from_link.j_node) - turn = network.turn(from_link.i_node, from_link.j_node, to_link.j_node) - turn.penalty_func = 1 - if to_link["@tcov_id"] == from_link["left_link"]: - turn.data1 = record["LEFT"] - elif to_link["@tcov_id"] == from_link["through_link"]: - turn.data1 = record["STRAIGHT"] - elif to_link["@tcov_id"] == from_link["right_link"]: - turn.data1 = record["RIGHT"] - else: - turn.data1 = record["UTURN"] - - self._log.append({"type": "text", "content": "Process turns.csv for turn prohibited by ID"}) - turn_data = gen_utils.DataTableProc("turns", _join(self.source, "turns.csv")) + self._log.append({"type": "text", "content": "Process turns for turn prohibited by ID"}) + turn_data = gen_utils.DataTableProc("Turns", self.source) if self.save_data_tables: turn_data.save("%s_turns" % self.data_table_name, self.overwrite) - links = dict((link["@tcov_id"], link) for link in network.links()) - - # Process turns.csv for prohibited turns from_id, to_id, penalty + # Process turns.csv for prohibited turns penalty for i, record in enumerate(turn_data): - from_link_id, to_link_id = int(record["from_id"]), int(record["to_id"]) - from_link, to_link = links[from_link_id], links[to_link_id] - if from_link.j_node == to_link.i_node: - pass - elif from_link.j_node == to_link.j_node: - to_link = to_link.reverse_link - elif from_link.i_node == to_link.i_node: - from_link = from_link.reverse_link - elif from_link.i_node == to_link.j_node: - from_link = from_link.reverse_link - to_link = to_link.reverse_link + from_node_id, to_node_id, at_node_id = record["FromNode"], record["ToNode"], record["MidNode"] + at_node = network.node(at_node_id) + if at_node and not at_node.is_intersection: + try: + network.create_intersection(at_node) + except Exception as error: + text = ("record %s turn from %s, at %s, to %s: cannot create intersection" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + trace_text = _traceback.format_exc().replace("\n", "
") + self._log.append({"type": "text", "content": trace_text}) + self._error.append(text) + continue + turn = network.turn(from_node_id, at_node_id, to_node_id) + if at_node is None: + text = ("record %s turn from %s, at %s, to %s: at node does not exist" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + self._error.append(text) + elif turn is None: + text = ("record %s turn from %s, at %s, to %s: does not form a turn" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + self._error.append(text) else: - msg = "Record %s: links are not adjacent %s - %s." % (i, from_link_id, to_link_id) - self._log.append({"type": "text", "content": msg}) - self._error.append("Turn import: " + msg) - continue - if not from_link or not to_link: - msg = "Record %s: links adjacent but in reverse direction %s - %s." % (i, from_link_id, to_link_id) - self._log.append({"type": "text", "content": msg}) - self._error.append("Turn import: " + msg) - continue - - node = from_link.j_node - if not node.is_intersection: - network.create_intersection(node) - turn = network.turn(from_link.i_node, node, to_link.j_node) - if not record["penalty"]: turn.penalty_func = 0 # prohibit turn - else: - turn.penalty_func = 1 - turn.data1 = float(record["penalty"]) - self._log.append({"type": "text", "content": "Import turns and turn restrictions complete"}) + # NOTE: could support penalty value + # turn.penalty_func = 1 + # turn.data1 = float(record["penalty"]) + self._log.append({"type": "text", "content": "Import turns and turn prohibitions complete"}) def calc_traffic_attributes(self, network): self._log.append({"type": "header", "content": "Calculate derived traffic attributes"}) @@ -1382,13 +1164,11 @@ def calc_traffic_attributes(self, network): # "ITOLL4": "@cost_med_truck" # ITOLL4 - Toll * 1.03 + AOC # "ITOLL5": "@cost_hvy_truck" # ITOLL5 - Toll * 2.33 + AOC fatal_errors = 0 - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) try: - aoc = float(props["aoc.fuel"]) + float(props["aoc.maintenance"]) + aoc = float(self._props["aoc.fuel"]) + float(self._props["aoc.maintenance"]) except ValueError: raise Exception("Error during float conversion for aoc.fuel or aoc.maintenance from sandag_abm.properties file") - scenario_year = int(props["scenarioYear"]) + scenario_year = int(self._props["scenarioYear"]) periods = ["EA", "AM", "MD", "PM", "EV"] time_periods = ["_ea", "_am", "_md", "_pm", "_ev"] src_time_periods = ["_op", "_am", "_op", "_pm", "_op"] @@ -1415,42 +1195,6 @@ def calc_traffic_attributes(self, network): for node in network.nodes(): node["@interchange"] = node.is_interchange - #add zone_id attribute - # map_file_name = FILE_NAMES["node_taz_map"] - # map_file_name = _join(self.source, map_file_name) - # node_taz_map = pd.read_csv(map_file_name) - - # # Loop through all nodes in the network to - # for n in network.nodes(): - # # Check if the node is in the list of nodes we have (ext not there) - # if n.number in node_taz_map.HNODE: - # n["@zone_id"] = int(node_taz_map[node_taz_map.HNODE == n.number]["TAZ"].values[0]) - # else: - # n["@zone_id"] = 0 - - # #read in csv file with parking node numbers and parking counts - # parking_file_name = FILE_NAMES["PARKING"] - # parking_file_path = _join(self.source, parking_file_name) - # if os.path.isfile(parking_file_path): - # with open(parking_file_path) as parking_file: - # self._log.append({"type": "text", "content": "Using parking node details from %s" % parking_file_name}) - # parking_nodes = [] - # for line in parking_file: - # #grab the second column - # node_number = line.split("\t")[0] - # parking_nodes.append(int(node_number)) - # self._log.append({"type": "text", "content": parking_nodes}) - - # # Loop through all nodes in the network - # for n in network.nodes(): - # # Check if the node is in the list of parking nodes - # if n.number in parking_nodes: - # # If it is, add the attribute to the node - # self._log.append({"type": "text", "content": "node %s has parking" % n.number}) - # n["@parking"] = 1 - # else: - # n["@parking"] = 0 - for link in network.links(): if link.type == 1 and mode_d in link.modes: link["@intdist_down"] = interchange_distance(link, "DOWNSTREAM") @@ -1493,69 +1237,22 @@ def calc_traffic_attributes(self, network): } for link in network.links(): # Change SR125 toll speed to 70MPH - if link["@lane_restriction"] == 4 and link.type == 1: + if link["@hov"] == 4 and link.type == 1: link["@speed_posted"] = 70 - link["@cost_operating"] = link.length * aoc - - # Expand off-peak TOD attributes, copy peak period attributes - for time, src_time in zip(time_periods, src_time_periods): - link["@lane" + time] = link["lane" + src_time] - link["@time_link" + time] = link["time_link" + src_time] - + for time in time_periods: # add link delay (30 sec=0.5mins) to HOV connectors to discourage travel - if link.type == 8 and (link["@lane_restriction"] == 2 or link["@lane_restriction"] == 3): + if link.type == 8 and (link["@hov"] == 2 or link["@hov"] == 3): link["@time_link" + time] = link["@time_link" + time] + 0.375 # make speed on HOV lanes (70mph) the same as parallel GP lanes (65mph) # - set speed back to posted speed - increase travel time by (speed_adj/speed_posted) - if link.type == 1 and (link["@lane_restriction"] == 2 or link["@lane_restriction"] == 3): + if link.type == 1 and (link["@hov"] == 2 or link["@hov"] == 3): speed_adj = link["@speed_adjusted"] speed_posted = link["@speed_posted"] if speed_adj>0: link["@time_link" + time] = (speed_adj/(speed_posted*1.0)) * link["@time_link" + time] - link["@time_inter" + time] = link["time_inter" + src_time] - link["@toll" + time] = link["toll" + src_time] - - off_peak_factor_file = FILE_NAMES["OFF_PEAK"] - if os.path.exists(_join(self.source, off_peak_factor_file)): - msg = "Adjusting off-peak tolls based on factors from %s" % off_peak_factor_file - self._log.append({"type": "text", "content": msg}) - tolled_links = list(link for link in network.links() if link["toll_op"] > 0) - # NOTE: CSV Reader sets the field names to UPPERCASE for consistency - with gen_utils.CSVReader(_join(self.source, off_peak_factor_file)) as r: - for row in r: - name = row["FACILITY_NAME"] - ea_factor = float(row["OP_EA_FACTOR"]) - md_factor = float(row["OP_MD_FACTOR"]) - ev_factor = float(row["OP_EV_FACTOR"]) - count = 0 - for link in tolled_links: - if name in link["#name"]: - count += 1 - link["@toll_ea"] = link["@toll_ea"] * ea_factor - link["@toll_md"] = link["@toll_md"] * md_factor - link["@toll_ev"] = link["@toll_ev"] * ev_factor - - msg = "Facility name '%s' matched to %s links." % (name, count) - msg += " Adjusted off-peak period tolls EA: %s, MD: %s, EV: %s" % (ea_factor, md_factor, ev_factor) - self._log.append({"type": "text2", "content": msg}) - - for link in network.links(): - factors = [(3.0/12.0), 1.0, (6.5/12.0), (3.5/3.0), (8.0/12.0)] - for f, time, src_time in zip(factors, time_periods, src_time_periods): - if link["capacity_link" + src_time] != 999999: - link["@capacity_link" + time] = f * link["capacity_link" + src_time] - else: - link["@capacity_link" + time] = 999999 - if link["capacity_inter" + src_time] != 999999: - link["@capacity_inter" + time] = f * link["capacity_inter" + src_time] - else: - link["@capacity_inter" + time] = 999999 - if link["@capacity_hourly" + src_time] != 0: - link["@capacity_hourly" + src_time] = round(link["@capacity_hourly" + src_time]) - # Required file vehicle_class_factor_file = FILE_NAMES["VEHICLE_CLASS"] facility_factors = _defaultdict(lambda: {}) @@ -1622,12 +1319,12 @@ def match_facility_factors(link): factors["count"] += 1 factors = _copy(factors) del factors["count"] - # @lane_restriction = 2 or 3 overrides hov2 and hov3 costs - if link["@lane_restriction"] == 2: + # @hov = 2 or 3 overrides hov2 and hov3 costs + if link["@hov"] == 2: for _, time_factors in factors.iteritems(): time_factors["hov2"] = 0.0 time_factors["hov3"] = 0.0 - elif link["@lane_restriction"] == 3: + elif link["@hov"] == 3: for _, time_factors in factors.iteritems(): time_factors["hov3"] = 0.0 return factors @@ -1648,7 +1345,9 @@ def match_facility_factors(link): msg = "Facility name '%s' matched to %s links." % (name, class_factors["count"]) self._log.append({"type": "text2", "content": msg}) - self._log.append({"type": "text", "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) + self._log.append({ + "type": "text", + "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) # calculate static reliability for link in network.links(): @@ -1664,7 +1363,7 @@ def match_facility_factors(link): # arterial/ramp/other apply road parameters elif link["type"] <= 9 and link["@lane" + time] > 0: lane_factor = road_rel["lanes"].get(link["@lane" + time], 0.0) - speed_bin = link["@speed_posted"] + speed_bin = int(link["@speed_posted"] / 5) * 5 # truncate to multiple of 5 if speed_bin < 35: speed_bin = "<35" elif speed_bin > 50: @@ -1680,7 +1379,7 @@ def match_facility_factors(link): # Cycle length matrix # Intersecting Link # Approach Link 2 3 4 5 6 7 8 9 - # IFC Description + # FC Description # 2 Prime Arterial 2.5 2 2 2 2 2 2 2 # 3 Major Arterial 2 2 2 2 2 2 2 2 # 4 Collector 2 2 1.5 1.5 1.5 1.5 1.5 1.5 @@ -1800,175 +1499,6 @@ def check_zone_access(self, network, mode): if not access: raise Exception("No access permitted to zone %s" % centroid.id) - def add_transit_to_traffic(self, hwy_network, tr_network, new_node_id): - if not self.merged_scenario_id or not hwy_network or not tr_network: - return - self._log.append({"type": "header", "content": "Merge transit network to traffic network"}) - fatal_errors = 0 - for tr_mode in tr_network.modes(): - hwy_mode = hwy_network.create_mode(tr_mode.type, tr_mode.id) - hwy_mode.description = tr_mode.description - hwy_mode.speed = tr_mode.speed - for tr_veh in tr_network.transit_vehicles(): - hwy_veh = hwy_network.create_transit_vehicle(tr_veh.id, tr_veh.mode.id) - hwy_veh.description = tr_veh.description - hwy_veh.auto_equivalent = tr_veh.auto_equivalent - hwy_veh.seated_capacity = tr_veh.seated_capacity - hwy_veh.total_capacity = tr_veh.total_capacity - - for elem_type in ["NODE", "LINK", "TRANSIT_LINE", "TRANSIT_SEGMENT"]: - for attr in tr_network.attributes(elem_type): - if not attr in hwy_network.attributes(elem_type): - default = "" if attr.startswith("#") else 0 - new_attr = hwy_network.create_attribute(elem_type, attr, default) - - hwy_link_index = dict((l["@tcov_id"], l) for l in hwy_network.links()) - hwy_node_position_index = dict(((n.x, n.y), n) for n in hwy_network.nodes()) - hwy_node_index = dict() - not_matched_links = [] - for tr_link in tr_network.links(): - tcov_id = tr_link["@tcov_id"] - if tcov_id == 0: - i_node = hwy_node_position_index.get((tr_link.i_node.x, tr_link.i_node.y)) - j_node = hwy_node_position_index.get((tr_link.j_node.x, tr_link.j_node.y)) - if i_node and j_node: - hwy_link = hwy_network.link(i_node, j_node) - else: - hwy_link = None - else: - hwy_link = hwy_link_index.get(tcov_id) - if not hwy_link: - not_matched_links.append(tr_link) - else: - if tr_link.i_node not in hwy_node_index: - hwy_node_index[tr_link.i_node] = hwy_link.i_node - for attr in tr_network.attributes("NODE"): - hwy_link.i_node[attr] = tr_link.i_node[attr] - if tr_link.j_node not in hwy_node_index: - hwy_node_index[tr_link.j_node] = hwy_link.j_node - for attr in tr_network.attributes("NODE"): - hwy_link.j_node[attr] = tr_link.j_node[attr] - - hwy_link.modes |= tr_link.modes - - bus_mode = tr_network.mode("b") - - def lookup_node(src_node, new_node_id): - node = hwy_node_index.get(src_node) - if not node: - node = hwy_node_position_index.get((src_node.x, src_node.y)) - if not node: - if hwy_network.node(src_node.number): - node = hwy_network.create_regular_node(new_node_id) - new_node_id += 1 - self._log.append({ - "type": "text", - "content": "Duplicate node ID, renumber transit node %s to %s" % - (src_node.number, new_node_id) - }) - else: - node = hwy_network.create_regular_node(src_node.number) - for attr in tr_network.attributes("NODE"): - node[attr] = src_node[attr] - hwy_node_index[src_node] = node - return node, new_node_id - - for tr_link in not_matched_links: - i_node, new_node_id = lookup_node(tr_link.i_node, new_node_id) - j_node, new_node_id = lookup_node(tr_link.j_node, new_node_id) - # check for duplicate but different links - # All cases to be logged and then an error raised at end - ex_link = hwy_network.link(i_node, j_node) - if ex_link: - self._log.append({ - "type": "text", - "content": "Duplicate links between the same nodes with different IDs in traffic/transit merge. " - "Traffic link ID %s, transit link ID %s." % (ex_link["@tcov_id"], tr_link["@tcov_id"]) - }) - self._error.append("Duplicate links with different IDs between traffic (%s) and transit (%s) networks" % - (ex_link["@tcov_id"], tr_link["@tcov_id"])) - self._split_link(hwy_network, i_node, j_node, new_node_id) - new_node_id += 1 - fatal_errors += 1 - try: - link = hwy_network.create_link(i_node, j_node, tr_link.modes) - except Exception as error: - self._log.append({ - "type": "text", - "content": "Error creating link '%s', I-node '%s', J-node '%s'. Error message %s" % - (tr_link["@tcov_id"], i_node, j_node, error) - }) - self._error.append("Cannot create transit link '%s' in traffic network" % tr_link["@tcov_id"]) - fatal_errors += 1 - continue - hwy_link_index[tr_link["@tcov_id"]] = link - for attr in tr_network.attributes("LINK"): - link[attr] = tr_link[attr] - link.vertices = tr_link.vertices - - # Create transit lines and copy segment data - for tr_line in tr_network.transit_lines(): - itinerary = [] - for seg in tr_line.segments(True): - itinerary.append(hwy_node_index[seg.i_node]) - try: - hwy_line = hwy_network.create_transit_line(tr_line.id, tr_line.vehicle.id, itinerary) - except Exception as error: - msg = "Transit line %s, error message %s" % (tr_line.id, error) - self._log.append({"type": "text", "content": msg}) - self._error.append("Cannot create transit line '%s' in traffic network" % tr_line.id) - fatal_errors += 1 - continue - for attr in hwy_network.attributes("TRANSIT_LINE"): - hwy_line[attr] = tr_line[attr] - for tr_seg, hwy_seg in _izip(tr_line.segments(True), hwy_line.segments(True)): - for attr in hwy_network.attributes("TRANSIT_SEGMENT"): - hwy_seg[attr] = tr_seg[attr] - - # Change ttf from ft2 (fixed speed) to ft1 (congested auto time) - auto_mode = hwy_network.mode("d") - for hwy_link in hwy_network.links(): - if auto_mode in hwy_link.modes: - for seg in hwy_link.segments(): - seg.transit_time_func = 1 - if fatal_errors > 0: - raise Exception("Cannot merge traffic and transit network, %s fatal errors found" % fatal_errors) - - self._log.append({"type": "text", "content": "Merge transit network to traffic network complete"}) - - def _split_link(self, network, i_node, j_node, new_node_id): - # Attribute types to maintain consistency for correspondence with incoming / outgoing link data - periods = ["ea", "am", "md", "pm", "ev"] - approach_attrs = ["@traffic_control", "@turn_thru", "@turn_right", "@turn_left", - "@lane_auxiliary", "@green_to_cycle_init"] - for p_attr in ["@green_to_cycle_", "@time_inter_", "@cycle_"]: - approach_attrs.extend([p_attr + p for p in periods]) - capacity_inter = ["@capacity_inter_" + p for p in periods] - cost_attrs = ["@cost_operating"] - for p_attr in ["@cost_lgt_truck_", "@cost_med_truck_", "@cost_hvy_truck_", "@cost_hov2_", - "@cost_hov3_", "@cost_auto_", "@time_link_", "@trtime_link_", "@toll_"]: - cost_attrs.extend([p_attr + p for p in periods]) - approach_attrs = [a for a in approach_attrs if a in network.attributes("LINK")] - capacity_inter = [a for a in capacity_inter if a in network.attributes("LINK")] - cost_attrs = [a for a in cost_attrs if a in network.attributes("LINK")] - - new_node = network.split_link(i_node, j_node, new_node_id) - - # Correct attributes on the split links - for link in new_node.incoming_links(): - link["#name_to"] = "" - for attr in approach_attrs: - link[attr] = 0 - for attr in capacity_inter: - link[attr] = 999999 - for attr in cost_attrs: - link[attr] = 0.5 * link[attr] - link.volume_delay_func = 10 - for link in new_node.outgoing_links(): - link["#name_from"] = "" - for attr in cost_attrs: - link[attr] = 0.5 * link[attr] - @_m.logbook_trace("Set database functions (VDF, TPF and TTF)") def set_functions(self, scenario): create_function = _m.Modeller().tool( @@ -1982,12 +1512,10 @@ def set_functions(self, scenario): if function: emmebank.delete_function(function) - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) - smartSignalf_CL = props["smartSignal.factor.LC"] - smartSignalf_MA = props["smartSignal.factor.MA"] - smartSignalf_PA = props["smartSignal.factor.PA"] - atdmf = props["atdm.factor"] + smartSignalf_CL = self._props["smartSignal.factor.LC"] + smartSignalf_MA = self._props["smartSignal.factor.MA"] + smartSignalf_PA = self._props["smartSignal.factor.PA"] + atdmf = self._props["atdm.factor"] reliability_tmplt = ( "* (1 + el2 + {0}*(".format(atdmf)+ @@ -2097,7 +1625,7 @@ def check_connectivity(self, scenario): # Note matrix is also created in initialize_matrices create_matrix("ms1", "zero", "zero", scenario=scenario, overwrite=True) with gen_utils.temp_matrices(emmebank, "FULL", 1) as (result_matrix,): - result_matrix.name = "TEMP_SOV_TRAVEL_TIME" + result_matrix.name = "TEMP_AUTO_TRAVEL_TIME" set_extra_function_para( el1="@green_to_cycle_am", el2="@sta_reliability_am", @@ -2110,7 +1638,7 @@ def check_connectivity(self, scenario): "background_traffic": None, "classes": [ { - "mode": "S", # SOV toll mode + "mode": "d", "demand": 'ms"zero"', "generalized_cost": None, "results": { @@ -2150,7 +1678,7 @@ def check_connectivity(self, scenario): scenario.has_traffic_results = False def log_report(self): - report = _m.PageBuilder(title="Import network from TCOVED files report") + report = _m.PageBuilder(title="Import network from TNED files report") try: if self._error: report.add_html("
Errors detected during import: %s
" % len(self._error)) @@ -2160,7 +1688,7 @@ def log_report(self): error_msg.append("") report.add_html("".join(error_msg)) else: - report.add_html("No errors detected during import") + report.add_html("

No errors detected during import :-)") for item in self._log: if item["type"] == "text": @@ -2246,6 +1774,17 @@ def find_path(orig_link, dest_link, mode): return list(reversed(route)) +class AvailableNodeIDTracker(object): + def __init__(self, network, start=999999): + self._network = network + self._node_id = start + + def get_id(self): + while self._network.node(self._node_id): + self._node_id -= 1 + return self._node_id + + class NoPathException(Exception): pass diff --git a/src/main/emme/toolbox/utilities/general.py b/src/main/emme/toolbox/utilities/general.py index 27a8e9aef..14ee8ca41 100644 --- a/src/main/emme/toolbox/utilities/general.py +++ b/src/main/emme/toolbox/utilities/general.py @@ -141,10 +141,10 @@ def __init__(self, table_name, path=None, data=None, convert_numeric=False): self._dt_db = dt_db = project.data_tables() self._convert_numeric = convert_numeric if path: - #try: - source = _dt.DataSource(path) - #except: - # raise Exception("Cannot open file at %s" % path) + try: + source = _dt.DataSource(path) + except _dt.Error as error: + raise Exception("Cannot open file at %s" % path) layer = source.layer(table_name) self._data = layer.get_data() elif data: @@ -179,7 +179,14 @@ def _load_data(self): attr = data.attribute("geometry") for record in attr.values: geo_obj = _ogr.CreateGeometryFromWkt(record.text) - geo_coords.append(geo_obj.GetPoints()) + if _ogr.GeometryTypeToName(geo_obj.GetGeometryType()) == 'Multi Line String': + coords = [] + for line in geo_obj: + coords.extend(line.GetPoints()) + else: + coords = geo_obj.GetPoints() + coords = [point[:2] for point in coords] + geo_coords.append(coords) self._values.append(geo_coords) self._attr_names.append("geo_coordinates") From 8a15a13550c0de262efa48b5bc8ed61fa7148828 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 7 Dec 2023 16:06:59 -0500 Subject: [PATCH 02/43] Changed @lane_restriction to @hov, and removed @truck_restriction (now per time period) --- .../toolbox/assignment/traffic_assignment.py | 20 +++++++++---------- .../export/export_data_loader_network.py | 4 ++-- .../toolbox/export/export_for_transponder.py | 4 ++-- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/main/emme/toolbox/assignment/traffic_assignment.py b/src/main/emme/toolbox/assignment/traffic_assignment.py index ca11c066f..042c49c89 100644 --- a/src/main/emme/toolbox/assignment/traffic_assignment.py +++ b/src/main/emme/toolbox/assignment/traffic_assignment.py @@ -565,7 +565,7 @@ def run_assignment(self, period, relative_gap, max_iterations, num_processors, s # create_attribute("LINK", "@cost_hov2_%s" % p, "toll (non-mngd) + cost for HOV2", # 0, overwrite=True, scenario=scenario) # net_calc("@cost_hov2_%s" % p, "@cost_hov_%s" % p, "modes=d") - # net_calc("@cost_hov2_%s" % p, "@cost_auto_%s" % p, "@lane_restriction=3") + # net_calc("@cost_hov2_%s" % p, "@cost_auto_%s" % p, "@hov=3") with _m.logbook_trace("Transit line headway and background traffic"): # set headway for the period @@ -734,7 +734,7 @@ def run_stochastic_assignment( # create_attribute("LINK", "@cost_hov2_%s" % p, "toll (non-mngd) + cost for HOV2", # 0, overwrite=True, scenario=scenario) # net_calc("@cost_hov2_%s" % p, "@cost_hov_%s" % p, "modes=d") - # net_calc("@cost_hov2_%s" % p, "@cost_auto_%s" % p, "@lane_restriction=3") + # net_calc("@cost_hov2_%s" % p, "@cost_auto_%s" % p, "@hov=3") with _m.logbook_trace("Transit line headway and background traffic"): # set headway for the period: format is (attribute_name, period duration in hours) @@ -829,17 +829,17 @@ def calc_network_results(self, period, num_processors, scenario): create_attribute("TURN", "@auto_time_turn", "traffic turn time (ptimau)", overwrite=True, scenario=scenario) - net_calc("@hovdist", "length", {"link": "@lane_restriction=2,3"}) + net_calc("@hovdist", "length", {"link": "@hov=2,3"}) net_calc("@tollcost", "@cost_auto_%s - @cost_operating" % p) - net_calc("@h2tollcost", "@cost_hov2_%s - @cost_operating" % p, {"link": "@lane_restriction=3,4"}) - net_calc("@h3tollcost", "@cost_hov3_%s - @cost_operating" % p, {"link": "@lane_restriction=4"}) + net_calc("@h2tollcost", "@cost_hov2_%s - @cost_operating" % p, {"link": "@hov=3,4"}) + net_calc("@h3tollcost", "@cost_hov3_%s - @cost_operating" % p, {"link": "@hov=4"}) net_calc("@trk_ltollcost", "@cost_lgt_truck_%s - @cost_operating" % p) net_calc("@trk_mtollcost", "@cost_med_truck_%s - @cost_operating" % p) net_calc("@trk_htollcost", "@cost_hvy_truck_%s - @cost_operating" % p) - net_calc("@mlcost", "@toll_%s" % p, {"link": "not @lane_restriction=4"}) - net_calc("@tolldist", "length", {"link": "@lane_restriction=2,4"}) - net_calc("@h2tolldist", "length", {"link": "@lane_restriction=3,4"}) - net_calc("@h3tolldist", "length", {"link": "@lane_restriction=4"}) + net_calc("@mlcost", "@toll_%s" % p, {"link": "not @hov=4"}) + net_calc("@tolldist", "length", {"link": "@hov=2,4"}) + net_calc("@h2tolldist", "length", {"link": "@hov=3,4"}) + net_calc("@h3tolldist", "length", {"link": "@hov=4"}) net_calc("@auto_volume", "volau", {"link": "modes=d"}) net_calc("ul2", "volau+volad", {"link": "modes=d"}) vdfs = [f for f in emmebank.functions() if f.type == "VOLUME_DELAY"] @@ -1026,7 +1026,7 @@ def change_mode_sovntp(self, scenario): gen_sov_mode = 's' sov_mode = scenario.mode(gen_sov_mode) change_link_modes(modes=[sov_mode], action="ADD", - selection="@lane_restriction=4", scenario=scenario) + selection="@hov=4", scenario=scenario) def report(self, period, scenario, classes): emmebank = scenario.emmebank diff --git a/src/main/emme/toolbox/export/export_data_loader_network.py b/src/main/emme/toolbox/export/export_data_loader_network.py index de4585537..a5cca0d5e 100644 --- a/src/main/emme/toolbox/export/export_data_loader_network.py +++ b/src/main/emme/toolbox/export/export_data_loader_network.py @@ -209,8 +209,8 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, ("IPROJ", "@project_code"), ("IJUR", "@jurisdiction_type"), ("IFC", "type"), - ("IHOV", "@lane_restriction"), - ("ITRUCK", "@truck_restriction"), + ("IHOV", "@hov"), + #("ITRUCK", "@truck_restriction"), ("ISPD", "@speed_posted"), ("ITSPD", "zero"), ("IWAY", "iway"), diff --git a/src/main/emme/toolbox/export/export_for_transponder.py b/src/main/emme/toolbox/export/export_for_transponder.py index 17300c6ae..7f16293e6 100644 --- a/src/main/emme/toolbox/export/export_for_transponder.py +++ b/src/main/emme/toolbox/export/export_for_transponder.py @@ -122,7 +122,7 @@ def ml_facility_dist(self, network): ml_link_coords = [] ml_links = [] for link in network.links(): - if link["type"] == 1 and link["@lane_restriction"] in (2,3) and ( + if link["type"] == 1 and link["@hov"] in (2,3) and ( link["@toll_am"] + link["@toll_md"] + link["@toll_pm"]) > 0: ml_link_coords.append(LineString(link.shape)) ml_links.append(link) @@ -201,7 +201,7 @@ def percent_detour(self, scenario, network, props, num_processors): ml_link_coords = [] freeway_links = [] for link in network.links(): - if link["@lane_restriction"] in [2, 3] and link["type"] == 1 and ( + if link["@hov"] in [2, 3] and link["type"] == 1 and ( link["@toll_am"] + link["@toll_md"] + link["@toll_pm"]) > 0: ml_link_coords.append(LineString(link.shape)) if sov_non_toll_mode in link.modes: From a2edf46042a750caecad089aab845edb0643cf7b Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Mon, 11 Dec 2023 13:44:58 -0500 Subject: [PATCH 03/43] Updating master run to have compatible call to new Import network (for TNED); Import network: correcting mode setting for tolls to be by time of day period --- .../emme/toolbox/import/import_network.py | 3 +- src/main/emme/toolbox/master_run.py | 35 +++++-------------- 2 files changed, 11 insertions(+), 27 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index cf65b7f74..09fa2207d 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -558,6 +558,7 @@ def set_auto_modes(self, network, period): # need to update the modes from the XTRUCK for their time of day # Note: only truck types 1, 3, 4, and 7 found in 2012 base network truck = "@truck_%s" % period.lower() + toll = "@toll_%s" % period.lower() lookup = self._auto_mode_lookup for link in network.links(): auto_modes = set([]) @@ -569,7 +570,7 @@ def set_auto_modes(self, network, period): auto_modes = lookup["GP"][link[truck]] elif link["@hov"] in [2, 3]: # managed lanes, free for HOV2 and HOV3+, tolls for SOV - if link["@toll_ea"] + link["@toll_am"] + link["@toll_md"] + link["@toll_pm"] + link["@toll_ev"] > 0: + if link[toll] > 0: auto_modes = lookup["TOLL"][link[truck]] # special case of I-15 managed lanes base year and 2020, no build elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: diff --git a/src/main/emme/toolbox/master_run.py b/src/main/emme/toolbox/master_run.py index 0409ef83e..c3f607e42 100644 --- a/src/main/emme/toolbox/master_run.py +++ b/src/main/emme/toolbox/master_run.py @@ -86,6 +86,7 @@ import pyodbc import win32com.client as win32 import shutil +import glob import multiprocessing @@ -490,23 +491,19 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, self.complete_work(scenarioYear, input_dir, output_dir, mgraFile, "walkMgraEquivMinutes.csv") if not skipBuildNetwork: + source_gdb = glog.glob(os.path.join(input_dir, "*.gdb")) + if len(source_gdb) > 1: + raise Exception("Multiple *.gdb files found in input directory") + if len(source_gdb) < 1: + raise Exception("No *.gdb file found in input directory") base_scenario = import_network( - source=input_dir, - merged_scenario_id=scenario_id, + source=source_gdb[0], + scenario_id=scenario_id, title=scenario_title, data_table_name=scenarioYear, overwrite=True, emmebank=main_emmebank) - if "modify_network.py" in os.listdir(os.getcwd()): - try: - with _m.logbook_trace("Modify network script"): - import modify_network - reload(modify_network) - modify_network.run(base_scenario) - except ImportError as e: - pass - if not skipInputChecker: input_checker(path=self._path) @@ -515,21 +512,7 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, availabilities = self.parse_availability_file(_join(input_dir, availability_file), periods) # initialize per time-period scenarios for number, period in period_ids: - title = "%s - %s assign" % (base_scenario.title, period) - # copy_scenario(base_scenario, number, title, overwrite=True) - _m.logbook_write( - name="Copy scenario %s to %s" % (base_scenario.number, number), - attributes={ - 'from_scenario': base_scenario.number, - 'scenario_id': number, - 'overwrite': True, - 'scenario_title': title - } - ) - if main_emmebank.scenario(number): - main_emmebank.delete_scenario(number) - scenario = main_emmebank.copy_scenario(base_scenario.number, number) - scenario.title = title + scenario = main_emmebank.scenario(number) # Apply availabilities by facility and vehicle class to this time period self.apply_availabilities(period, scenario, availabilities) else: From fac7f625908e3f8b077457a0e7710ce6efe76057 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 14 Dec 2023 14:55:05 -0500 Subject: [PATCH 04/43] Updated comments in import_network to reference trstop.csv; used already imported glob as _glob in master_run --- src/main/emme/toolbox/import/import_network.py | 4 ++-- src/main/emme/toolbox/master_run.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 09fa2207d..6a4aa2a95 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -36,10 +36,10 @@ # - TNED_RailNodes # - trrt # - trlink -# - trstop # - Turns # The following files are also used (in the same directory as the *.gdb) # +# trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period # special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. @@ -151,11 +151,11 @@ def page(self):
  • TNED_RailNodes
  • trrt
  • trlink
  • -
  • trstop
  • Turns
  • The following files are also used (in the same directory as the *.gdb):
      +
    • trstop.csv
    • mode5tod.csv
    • timexfer_.csv (optional)
    • special_fares.txt (optional)
    • diff --git a/src/main/emme/toolbox/master_run.py b/src/main/emme/toolbox/master_run.py index c3f607e42..084e7fac7 100644 --- a/src/main/emme/toolbox/master_run.py +++ b/src/main/emme/toolbox/master_run.py @@ -86,7 +86,6 @@ import pyodbc import win32com.client as win32 import shutil -import glob import multiprocessing @@ -491,7 +490,7 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, self.complete_work(scenarioYear, input_dir, output_dir, mgraFile, "walkMgraEquivMinutes.csv") if not skipBuildNetwork: - source_gdb = glog.glob(os.path.join(input_dir, "*.gdb")) + source_gdb = _glog.glob(os.path.join(input_dir, "*.gdb")) if len(source_gdb) > 1: raise Exception("Multiple *.gdb files found in input directory") if len(source_gdb) < 1: From 069e2655803148dfe7a6efa4889ec7aeef3cdfb7 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 14 Dec 2023 16:01:02 -0500 Subject: [PATCH 05/43] Adding back in TMO -> @trtime, removing references to @trtime_link_

      , now using @trtime for all time periods --- .../emme/toolbox/assignment/build_transit_scenario.py | 8 +++----- src/main/emme/toolbox/assignment/transit_assignment.py | 8 ++++---- .../emme/toolbox/export/export_data_loader_network.py | 3 +-- src/main/emme/toolbox/import/import_network.py | 3 ++- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/src/main/emme/toolbox/assignment/build_transit_scenario.py b/src/main/emme/toolbox/assignment/build_transit_scenario.py index 0a59b4a5d..54f3150a1 100644 --- a/src/main/emme/toolbox/assignment/build_transit_scenario.py +++ b/src/main/emme/toolbox/assignment/build_transit_scenario.py @@ -322,7 +322,7 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari self.timed_transfers(network, timed_transfers_with_walk, period) #self.connect_circle_lines(network) #self.duplicate_tap_adajcent_stops(network) - # The fixed guideway travel times are stored in "@trtime_link_xx" + # The fixed guideway travel times are stored in "@trtime" # and copied to data2 (ul2) for the ttf # The congested auto times for mixed traffic are in "@auto_time" # (output from traffic assignment) which needs to be copied to auto_time (a.k.a. timau) @@ -449,13 +449,11 @@ def split_link(link, node_id, lines, split_links, stop_attr, waits=None): if near_side_stop: in_link.length = length out_link.length = 0 - for p in ["ea", "am", "md", "pm", "ev"]: - out_link["@trtime_link_" + p] = 0 + out_link["@trtime"] = 0 else: out_link.length = length in_link.length = 0 - for p in ["ea", "am", "md", "pm", "ev"]: - in_link["@trtime_link_" + p] = 0 + in_link["@trtime"] = 0 for seg in in_link.segments(): if not near_side_stop: diff --git a/src/main/emme/toolbox/assignment/transit_assignment.py b/src/main/emme/toolbox/assignment/transit_assignment.py index 721ec1ab1..e30c9c649 100644 --- a/src/main/emme/toolbox/assignment/transit_assignment.py +++ b/src/main/emme/toolbox/assignment/transit_assignment.py @@ -267,7 +267,7 @@ def get_perception_parameters(self, period): "xfer_headway": "@headway_op", "fare": "@fare_per_op", "in_vehicle": "@vehicle_per_op", - "fixed_link_time": "@trtime_link_ea" + "fixed_link_time": "@trtime" }, "AM": { "access" : access, @@ -280,7 +280,7 @@ def get_perception_parameters(self, period): "xfer_headway": "@headway_am", "fare": "@fare_per_pk", "in_vehicle": "@vehicle_per_pk", - "fixed_link_time": "@trtime_link_am" + "fixed_link_time": "@trtime" }, "MD": { "access" : access, @@ -293,7 +293,7 @@ def get_perception_parameters(self, period): "xfer_headway": "@headway_op", "fare": "@fare_per_op", "in_vehicle": "@vehicle_per_op", - "fixed_link_time": "@trtime_link_md" + "fixed_link_time": "@trtime" }, "PM": { "access" : access, @@ -306,7 +306,7 @@ def get_perception_parameters(self, period): "xfer_headway": "@headway_pm", "fare": "@fare_per_pk", "in_vehicle": "@vehicle_per_pk", - "fixed_link_time": "@trtime_link_pm" + "fixed_link_time": "@trtime" }, "EV": { "access" : access, diff --git a/src/main/emme/toolbox/export/export_data_loader_network.py b/src/main/emme/toolbox/export/export_data_loader_network.py index a5cca0d5e..c8907bffc 100644 --- a/src/main/emme/toolbox/export/export_data_loader_network.py +++ b/src/main/emme/toolbox/export/export_data_loader_network.py @@ -970,8 +970,7 @@ def collapse_network_adjustments(self, network, segment_results, link_results): link_result_attrs = link_results.values() + ["aux_transit_volume"] link_attrs = network.attributes("LINK") link_modified_attrs = [ - "length", "@trtime_link_ea", "@trtime_link_am", "@trtime_link_md", - "@trtime_link_pm", "@trtime_link_ev", link_results["link_transit_flow"]] + "length", "@trtime", link_results["link_transit_flow"]] seg_attrs = network.attributes("TRANSIT_SEGMENT") line_attrs = network.attributes("TRANSIT_LINE") diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 6a4aa2a95..444c97981 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -311,6 +311,7 @@ def execute(self): ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), + ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), ]), @@ -911,7 +912,7 @@ def create_transit_lines(self, network, attr_map): # segments on links with auto mode are ft1 = timau segment.transit_time_func = 1 else: - # ft2 = ul2 -> copied @trtime_link_XX (fixed speed) + # ft2 = ul2 -> copied @trtime (fixed speed) segment.transit_time_func = 2 except Exception as error: msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) From 59bc617d68f5d9d9f1b953200e98a4ec3f73f96d Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 14 Dec 2023 16:09:37 -0500 Subject: [PATCH 06/43] Fixed typo glog to glob --- src/main/emme/toolbox/master_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/emme/toolbox/master_run.py b/src/main/emme/toolbox/master_run.py index 084e7fac7..7908045fb 100644 --- a/src/main/emme/toolbox/master_run.py +++ b/src/main/emme/toolbox/master_run.py @@ -490,7 +490,7 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, self.complete_work(scenarioYear, input_dir, output_dir, mgraFile, "walkMgraEquivMinutes.csv") if not skipBuildNetwork: - source_gdb = _glog.glob(os.path.join(input_dir, "*.gdb")) + source_gdb = _glob.glob(os.path.join(input_dir, "*.gdb")) if len(source_gdb) > 1: raise Exception("Multiple *.gdb files found in input directory") if len(source_gdb) < 1: From 3ac7374e09366854f11842dddde2d3f91f223fe0 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Tue, 19 Dec 2023 11:45:53 -0500 Subject: [PATCH 07/43] Adjusting Available node ID logic in build_transit_scenario to match import_network: counting down from 999999 to find available node ID. Centralized implementation in general.py --- .../assignment/build_transit_scenario.py | 18 ++++++------------ src/main/emme/toolbox/import/import_network.py | 14 +------------- src/main/emme/toolbox/utilities/general.py | 11 +++++++++++ 3 files changed, 18 insertions(+), 25 deletions(-) diff --git a/src/main/emme/toolbox/assignment/build_transit_scenario.py b/src/main/emme/toolbox/assignment/build_transit_scenario.py index 54f3150a1..052602fa3 100644 --- a/src/main/emme/toolbox/assignment/build_transit_scenario.py +++ b/src/main/emme/toolbox/assignment/build_transit_scenario.py @@ -118,6 +118,7 @@ def __init__(self): self.attributes = [ "period", "scenario_id", "base_scenario_id", "data_table_name", "scenario_title", "overwrite"] + self._node_id_tracker = None def page(self): if not self.data_table_name: @@ -218,6 +219,7 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari for field in base_scenario.network_fields(): scenario.create_network_field(field.type, field.name, field.atype, field.description) network = base_scenario.get_network() + self._node_id_tracker = gen_utils.AvailableNodeIDTracker(network) new_attrs = [ ("TRANSIT_LINE", "@xfer_from_day", "Fare for xfer from daypass/trolley"), ("TRANSIT_LINE", "@xfer_from_premium", "Fare for first xfer from premium"), @@ -235,7 +237,6 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari attr.description = desc network.create_attribute(elem, name) network.create_attribute("TRANSIT_LINE", "xfer_from_bus") - self._init_node_id(network) transit_passes = gen_utils.DataTableProc("%s_transit_passes" % data_table_name) transit_passes = {row["pass_type"]: row["cost"] for row in transit_passes} @@ -336,6 +337,7 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari network.set_attribute_values("LINK", dst_attrs, values) scenario.publish_network(network) + self._node_id_tracker = None ##copying auto_time to ul1, so it does not get wiped when transit connectors are created. if scenario.has_traffic_results and "@auto_time" in scenario.attributes("LINK"): @@ -474,10 +476,10 @@ def split_link(link, node_id, lines, split_links, stop_attr, waits=None): split_links = {} for transfer in network_transfers: new_alight_node = split_link( - transfer["from_link"], self._get_node_id(), transfer["from_lines"], + transfer["from_link"], self._node_id_tracker.get_id(), transfer["from_lines"], split_links, "allow_alightings") new_board_node = split_link( - transfer["to_link"], self._get_node_id(), transfer["to_lines"], + transfer["to_link"], self._node_id_tracker.get_id(), transfer["to_lines"], split_links, "allow_boardings", waits=transfer["wait"]) walk_link = transfer["walk_link"] transfer_link = network.create_link( @@ -506,7 +508,7 @@ def offset_coords(node): if first_seg.i_node == last_seg.i_node: # Add new node, offset from existing node start_node = line.segment(0).i_node - xfer_node = network.create_node(self._get_node_id(), False) + xfer_node = network.create_node(self._node_id_tracker.get_id(), False) xfer_node["@network_adj"] = 2 xfer_node.x, xfer_node.y = offset_coords(start_node) network.create_link(start_node, xfer_node, [line.vehicle.mode]) @@ -554,11 +556,3 @@ def offset_coords(node): seg[k] = v network.delete_attribute("NODE", "circle_lines") - - def _init_node_id(self, network): - new_node_id = max(n.number for n in network.nodes()) - self._new_node_id = math.ceil(new_node_id / 10000.0) * 10000 - - def _get_node_id(self): - self._new_node_id += 1 - return self._new_node_id diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 444c97981..e16c5fbf2 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -1105,8 +1105,7 @@ def get_line(line_id): self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) def renumber_base_nodes(self, network): - # TODO: log node renumberings - tracker = AvailableNodeIDTracker(network) + tracker = gen_utils.AvailableNodeIDTracker(network) nodes = [n for n in network.nodes() if n.number > 999999] nodes = sorted(nodes, key=lambda x: x.number, reverse=True) if nodes: @@ -1776,17 +1775,6 @@ def find_path(orig_link, dest_link, mode): return list(reversed(route)) -class AvailableNodeIDTracker(object): - def __init__(self, network, start=999999): - self._network = network - self._node_id = start - - def get_id(self): - while self._network.node(self._node_id): - self._node_id -= 1 - return self._node_id - - class NoPathException(Exception): pass diff --git a/src/main/emme/toolbox/utilities/general.py b/src/main/emme/toolbox/utilities/general.py index 14ee8ca41..4d002d835 100644 --- a/src/main/emme/toolbox/utilities/general.py +++ b/src/main/emme/toolbox/utilities/general.py @@ -77,6 +77,17 @@ def __call__(self, result, expression, selections=None, aggregation=None): return self._network_calc(spec, self._scenario) +class AvailableNodeIDTracker(object): + def __init__(self, network, start=999999): + self._network = network + self._node_id = start + + def get_id(self): + while self._network.node(self._node_id): + self._node_id -= 1 + return self._node_id + + @_context def temp_matrices(emmebank, mat_type, total=1, default_value=0.0): matrices = [] From 28619a2f216eeead282eeb590cdfd4378de2b3da Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Tue, 19 Dec 2023 14:23:30 -0500 Subject: [PATCH 08/43] Assignment: split off-peak (op) times to ea, md and pm; added back calculation of revised_headway to import network --- .../toolbox/assignment/transit_assignment.py | 14 +++--- .../emme/toolbox/import/import_network.py | 45 +++++++++++-------- 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/src/main/emme/toolbox/assignment/transit_assignment.py b/src/main/emme/toolbox/assignment/transit_assignment.py index e30c9c649..f47b3d5b3 100644 --- a/src/main/emme/toolbox/assignment/transit_assignment.py +++ b/src/main/emme/toolbox/assignment/transit_assignment.py @@ -263,8 +263,8 @@ def get_perception_parameters(self, period): "init_wait": 1.5, "xfer_wait": 3.0, "walk": 2.0, - "init_headway": "@headway_rev_op", - "xfer_headway": "@headway_op", + "init_headway": "@headway_rev_ea", + "xfer_headway": "@headway_ea", "fare": "@fare_per_op", "in_vehicle": "@vehicle_per_op", "fixed_link_time": "@trtime" @@ -289,8 +289,8 @@ def get_perception_parameters(self, period): "init_wait": 1.5, "xfer_wait": 3.0, "walk": 2.0, - "init_headway": "@headway_rev_op", - "xfer_headway": "@headway_op", + "init_headway": "@headway_rev_md", + "xfer_headway": "@headway_md", "fare": "@fare_per_op", "in_vehicle": "@vehicle_per_op", "fixed_link_time": "@trtime" @@ -315,11 +315,11 @@ def get_perception_parameters(self, period): "init_wait": 1.5, "xfer_wait": 3.0, "walk": 2.0, - "init_headway": "@headway_rev_night", - "xfer_headway": "@headway_night", + "init_headway": "@headway_rev_ev", + "xfer_headway": "@headway_ev", "fare": "@fare_per_op", "in_vehicle": "@vehicle_per_op", - "fixed_link_time": "@trtime_link_ev" + "fixed_link_time": "@trtime" } } return perception_parameters[period] diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index e16c5fbf2..a32fdd5d1 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -316,24 +316,27 @@ def execute(self): ]), "TRANSIT_LINE": OrderedDict([ - ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), - ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), - ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), - ("Evening_Headway", ("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), - ("EarlyAM_Headway", ("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), - ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), - ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), - ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), - ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), - ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), - ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), - ("DWELLTIME", ("default_dwell_time", "MODE5TOD", "INTERNAL", "")), - ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), - ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), - ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), - ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), - ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), - ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), + ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), + ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), + ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), + ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), + ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), + ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), + ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), + ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), + ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), + ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), + ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), + ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), + ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), + ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), + ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), + ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), + ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), + ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), + ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), + ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), + ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), ]), "TRANSIT_SEGMENT": OrderedDict([ ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), @@ -1041,6 +1044,12 @@ def calc_transit_attributes(self, network): if not os.path.isfile(special_fare_path): return + # ON TRANSIT LINES + # Set 3-period headway based on revised headway calculation + for line in network.transit_lines(): + for period in ["ea", "am", "md", "pm", "ev"]: + line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) + def get_line(line_id): line = network.transit_line(line_id) if line is None: From c7eb821926771bc5c1790ed9aaa3bf81560306f7 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 20 Dec 2023 13:30:52 -0500 Subject: [PATCH 09/43] Changed @ipark to @park --- .../emme/toolbox/assignment/create_transit_connector.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/emme/toolbox/assignment/create_transit_connector.py b/src/main/emme/toolbox/assignment/create_transit_connector.py index 67423dc27..200c564b1 100644 --- a/src/main/emme/toolbox/assignment/create_transit_connector.py +++ b/src/main/emme/toolbox/assignment/create_transit_connector.py @@ -202,7 +202,7 @@ def create_tr_connectors(self, period, create_connector_flag, main_directory): delete_existing=True, selection={ "centroid":"all", - "node": "@ipark=1,9 and %s" % self.line_haul_mode_specs[i], + "node": "@park=1,9 and %s" % self.line_haul_mode_specs[i], "link":"none", "exclude_split_links":False, "only_midblock_nodes": False}, @@ -284,7 +284,7 @@ def create_tr_connectors(self, period, create_connector_flag, main_directory): delete_existing=True, selection={ "centroid":"all", - "node": "@ipark=1,9 and %s" % self.line_haul_mode_specs[i], + "node": "@park=1,9 and %s" % self.line_haul_mode_specs[i], "link":"none", "exclude_split_links":False, "only_midblock_nodes": False}, @@ -318,7 +318,7 @@ def create_tr_connectors(self, period, create_connector_flag, main_directory): delete_existing=True, selection={ "centroid":"all", - "node": "@ipark=1,9 and %s" % self.line_haul_mode_specs[i], + "node": "@park=1,9 and %s" % self.line_haul_mode_specs[i], "link":"none", "exclude_split_links":False, "only_midblock_nodes": False}, @@ -331,7 +331,7 @@ def create_tr_connectors(self, period, create_connector_flag, main_directory): delete_existing=True, selection={ "centroid":"i=1,4", - "node": "@ipark=1,9 and %s" % self.line_haul_mode_specs[i], + "node": "@park=1,9 and %s" % self.line_haul_mode_specs[i], "link":"none", "exclude_split_links":False, "only_midblock_nodes": False}, From e2901ef7ff53a27508c963882d3679fc5de9a107 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Tue, 19 Dec 2023 16:01:30 -0800 Subject: [PATCH 10/43] Typos under import_network.py: - extra comma in line 314 - missing comma in line 333 --- .../emme/toolbox/import/import_network.py | 4 +- .../emme/toolbox/import/import_network.py.bak | 1836 +++++++++++++++++ 2 files changed, 1838 insertions(+), 2 deletions(-) create mode 100644 src/main/emme/toolbox/import/import_network.py.bak diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index a32fdd5d1..ae328727a 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -311,7 +311,7 @@ def execute(self): ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), - ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), + ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes")), ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), ]), @@ -330,7 +330,7 @@ def execute(self): ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), - ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), + ("DWELLTIME", ("default_dwell_time", "MODE5TOD", "INTERNAL", "")), ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), diff --git a/src/main/emme/toolbox/import/import_network.py.bak b/src/main/emme/toolbox/import/import_network.py.bak new file mode 100644 index 000000000..a32fdd5d1 --- /dev/null +++ b/src/main/emme/toolbox/import/import_network.py.bak @@ -0,0 +1,1836 @@ +#////////////////////////////////////////////////////////////////////////////// +#//// /// +#//// Copyright INRO, 2016-2017. /// +#//// Rights to use and modify are granted to the /// +#//// San Diego Association of Governments and partner agencies. /// +#//// This copyright notice must be preserved. /// +#//// /// +#//// import/import_network.py /// +#//// /// +#//// /// +#//// /// +#//// /// +#////////////////////////////////////////////////////////////////////////////// +# +# Imports the network from the input network files. +# +# +# Inputs: +# source: path to the location of the input network geodatabase +# traffic_scenario_id: optional scenario to store the imported network from the traffic files only +# transit_scenario_id: optional scenario to store the imported network from the transit files only +# merged_scenario_id: scenario to store the combined traffic and transit data from all network files +# title: the title to use for the imported scenario +# save_data_tables: if checked, create a data table for each reference file for viewing in the Emme Desktop +# data_table_name: prefix to use to identify all data tables +# overwrite: check to overwrite any existing data tables or scenarios with the same ID or name +# emmebank: the Emme database in which to create the scenario. Default is the current open database +# create_time_periods: if True (default), also create per-time period scenarios (required to run assignments) +# +# Files referenced: +# +# *.gdb: A Geodatabase file with the network data for both highway and transit. The following tables are used +# - TNED_HwyNet +# - TNED_HwyNodes +# - TNED_RailNet +# - TNED_RailNodes +# - trrt +# - trlink +# - Turns +# The following files are also used (in the same directory as the *.gdb) +# +# trstop.csv: stop data for the transit lines +# mode5tod.csv: global (per-mode) transit cost and perception attributes +# timexfer_.csv (optional): table of timed transfer pairs of lines, by period +# special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. +# off_peak_toll_factors.csv (optional): factors to calculate the toll for EA, MD, and EV periods from the OP toll input for specified facilities +# vehicle_class_toll_factors.csv (optional): factors to adjust the toll cost by facility name and class (DA, S2, S3, TRK_L, TRK_M, TRK_H) +# +# +# Script example: +""" + import os + modeller = inro.modeller.Modeller() + main_directory = os.path.dirname(os.path.dirname(modeller.desktop.project.path)) + source_file = os.path.join(main_directory, "input", "EMMEOutputs.gdb") + title = "Base 2012 scenario" + import_network = modeller.tool("sandag.import.import_network") + import_network(source_file, merged_scenario_id=100, title=title, + data_table_name="2012_base", overwrite=True) +""" + + +TOOLBOX_ORDER = 11 + + +import inro.modeller as _m +import inro.emme.datatable as _dt +import inro.emme.network as _network +from inro.emme.core.exception import Error as _NetworkError + +from itertools import izip as _izip +from collections import defaultdict as _defaultdict, OrderedDict +from contextlib import contextmanager as _context +import fiona as _fiona + +from math import ceil as _ceiling +from copy import deepcopy as _copy +import numpy as _np +import heapq as _heapq +import pandas as pd + +import traceback as _traceback +import os + +_join = os.path.join +_dir = os.path.dirname + + +gen_utils = _m.Modeller().module("sandag.utilities.general") +dem_utils = _m.Modeller().module("sandag.utilities.demand") + +FILE_NAMES = { + "FARES": "special_fares.txt", + "TIMEXFER": "timexfer_%s.csv", + "OFF_PEAK": "off_peak_toll_factors.csv", + "VEHICLE_CLASS": "vehicle_class_toll_factors.csv", + "MODE5TOD": "MODE5TOD.csv", +} + + +class ImportNetwork(_m.Tool(), gen_utils.Snapshot): + + source = _m.Attribute(unicode) + scenario_id = _m.Attribute(int) + overwrite = _m.Attribute(bool) + title = _m.Attribute(unicode) + save_data_tables = _m.Attribute(bool) + data_table_name = _m.Attribute(unicode) + create_time_periods = _m.Attribute(bool) + + tool_run_msg = "" + + @_m.method(return_type=_m.UnicodeType) + def tool_run_msg_status(self): + return self.tool_run_msg + + def __init__(self): + self._log = [] + self._error = [] + project_dir = _dir(_m.Modeller().desktop.project.path) + self.source = _join(_dir(project_dir), "input") + self.overwrite = False + self.title = "" + self.data_table_name = "" + self.create_time_periods = True + self.attributes = [ + "source", "scenario_id", "overwrite", "title", "save_data_tables", "data_table_name", "create_time_periods" + ] + + def page(self): + if not self.data_table_name: + try: + load_properties = _m.Modeller().tool('sandag.utilities.properties') + props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) + self.data_table_name = props["scenarioYear"] + except: + pass + + pb = _m.ToolPageBuilder(self) + pb.title = "Import network" + pb.description = """ +

      + Create an Emme network from TNED geodatabase (*.gdb) and associated files. +
      +
      + The following layers in the gdb are used: +
        +
      • TNED_HwyNet
      • +
      • TNED_HwyNodes
      • +
      • TNED_RailNet
      • +
      • TNED_RailNodes
      • +
      • trrt
      • +
      • trlink
      • +
      • Turns
      • +
      + The following files are also used (in the same directory as the *.gdb): +
        +
      • trstop.csv
      • +
      • mode5tod.csv
      • +
      • timexfer_.csv (optional)
      • +
      • special_fares.txt (optional)
      • +
      • off_peak_toll_factors.csv (optional)
      • +
      • vehicle_class_toll_factors.csv (optional)
      • +
      +
      + """ + pb.branding_text = "- SANDAG - Import" + + if self.tool_run_msg != "": + pb.tool_run_status(self.tool_run_msg_status) + + pb.add_select_file("source", window_type="directory", file_filter="", + title="Source gdb:",) + + pb.add_text_box("scenario_id", size=6, title="Scenario ID for imported network:") + pb.add_text_box("title", size=80, title="Scenario title:") + pb.add_checkbox("save_data_tables", title=" ", label="Save reference data tables of file data") + pb.add_text_box("data_table_name", size=80, title="Name for data tables:", + note="Prefix name to use for all saved data tables") + pb.add_checkbox("overwrite", title=" ", label="Overwrite existing scenarios and data tables") + pb.add_checkbox("create_time_periods", title=" ", label="Copy base scenario to all time periods and set modes (required for assignments)") + + return pb.render() + + def run(self): + self.tool_run_msg = "" + try: + self.emmebank = _m.Modeller().emmebank + with self.setup(): + self.execute() + run_msg = "Network import complete" + if self._error: + run_msg += " with %s non-fatal errors. See logbook for details" % len(self._error) + self.tool_run_msg = _m.PageBuilder.format_info(run_msg, escape=False) + except Exception as error: + self.tool_run_msg = _m.PageBuilder.format_exception( + error, _traceback.format_exc()) + raise + + def __call__(self, source, scenario_id, + title="", save_data_tables=False, data_table_name="", overwrite=False, + emmebank=None, create_time_periods=True): + + self.source = source + self.scenario_id = scenario_id + self.title = title + self.save_data_tables = save_data_tables + self.data_table_name = data_table_name + self.overwrite = overwrite + if not emmebank: + self.emmebank = _m.Modeller().emmebank + else: + self.emmebank = emmebank + self.create_time_periods = create_time_periods + + with self.setup(): + self.execute() + + return self.emmebank.scenario(scenario_id) + + @_context + def setup(self): + self._log = [] + self._error = [] + fatal_error = False + attributes = OrderedDict([ + ("self", str(self)), + ("source", self.source), + ("scenario_id", self.scenario_id), + ("title", self.title), + ("save_data_tables", self.save_data_tables), + ("data_table_name", self.data_table_name), + ("overwrite", self.overwrite), + ("create_time_periods", self.create_time_periods) + ]) + self._log = [{ + "content": attributes.items(), + "type": "table", "header": ["name", "value"], + "title": "Tool input values" + }] + with _m.logbook_trace("Import network", attributes=attributes) as trace: + gen_utils.log_snapshot("Import network", str(self), attributes) + load_properties = _m.Modeller().tool('sandag.utilities.properties') + self._props = load_properties(_join(_dir(_dir(self.source)), "conf", "sandag_abm.properties")) + try: + yield + except Exception as error: + self._log.append({"type": "text", "content": error}) + trace_text = _traceback.format_exc().replace("\n", "
      ") + self._log.append({"type": "text", "content": trace_text}) + self._error.append(error) + fatal_error = True + raise + finally: + self._props = None + self.log_report() + self._auto_mode_lookup = None + self._transit_mode_lookup = None + if self._error: + if fatal_error: + trace.write("Import network failed (%s errors)" % len(self._error), attributes=attributes) + else: + trace.write("Import network completed (%s non-fatal errors)" % len(self._error), attributes=attributes) + + def execute(self): + attr_map = { + "NODE": OrderedDict([ + ("HNODE", ("@hnode", "BOTH", "EXTRA", "HNODE label from TNED" )), + ("TAP", ("@tap_id", "BOTH", "EXTRA", "TAP number")), + ("PARK", ("@park", "BOTH", "EXTRA", "parking indicator" )), + ("STOPTYPE", ("@stoptype", "BOTH", "EXTRA", "stop type indicator" )), + ("ELEV", ("@elev", "BOTH", "EXTRA", "station/stop elevation in feet")), + ("interchange", ("@interchange", "DERIVED", "EXTRA", "is interchange node")), + ]), + "LINK": OrderedDict([ + ("HWYCOV0_ID",("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), + ("SPHERE", ("@sphere", "HWY_TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), + ("HWYSegGUID",("#hwyseg_guid", "TWO_WAY", "STRING", "HWYSegGUID")), + ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), + ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), + ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), + ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), + ("ASPD", ("@speed_adjusted", "HWY_TWO_WAY", "EXTRA", "Adjusted link speed (miles/hr)")), + ("YR", ("@year_open_traffic", "HWY_TWO_WAY", "EXTRA", "The year the link opened to traffic")), + ("PROJ", ("@project_code", "HWY_TWO_WAY", "EXTRA", "Project number for use with hwyproj.xls")), + ("FC", ("type", "TWO_WAY", "STANDARD", "")), + ("HOV", ("@hov", "TWO_WAY", "EXTRA", "Link operation type")), + ("MINMODE", ("@minmode", "TWO_WAY", "EXTRA", "Transit mode type")), + ("EATRUCK", ("@truck_ea", "HWY_TWO_WAY", "EXTRA", "Early AM truck restriction code ")), + ("AMTRUCK", ("@truck_am", "HWY_TWO_WAY", "EXTRA", "AM Peak truck restriction code ")), + ("MDTRUCK", ("@truck_md", "HWY_TWO_WAY", "EXTRA", "Mid-day truck restriction code ")), + ("PMTRUCK", ("@truck_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak truck restriction code ")), + ("EVTRUCK", ("@truck_ev", "HWY_TWO_WAY", "EXTRA", "Evening truck restriction code ")), + ("TOLLEA", ("@toll_ea", "HWY_TWO_WAY", "EXTRA", "Early AM toll cost (cent)")), + ("TOLLA", ("@toll_am", "HWY_TWO_WAY", "EXTRA", "AM Peak toll cost (cent)")), + ("TOLLMD", ("@toll_md", "HWY_TWO_WAY", "EXTRA", "Mid-day toll cost (cent)")), + ("TOLLP", ("@toll_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak toll cost (cent)")), + ("TOLLEV", ("@toll_ev", "HWY_TWO_WAY", "EXTRA", "Evening toll cost (cent)")), + + ("SPD", ("@speed_posted", "HWY_TWO_WAY", "EXTRA", "Posted speed limit (mph)")), + ("MED", ("@median", "TWO_WAY", "EXTRA", "Median type")), + ("AU", ("@lane_auxiliary", "HWY_ONE_WAY", "EXTRA", "Number of auxiliary lanes")), + ("CNT", ("@traffic_control", "HWY_ONE_WAY", "EXTRA", "Intersection control type")), + ("TL", ("@turn_thru", "HWY_ONE_WAY", "EXTRA", "Intersection approach through lanes")), + ("RL", ("@turn_right", "HWY_ONE_WAY", "EXTRA", "Intersection approach right-turn lanes")), + ("LL", ("@turn_left", "HWY_ONE_WAY", "EXTRA", "Intersection approach left-turn lanes")), + ("GC", ("@green_to_cycle_init", "HWY_ONE_WAY", "EXTRA", "Initial green-to-cycle ratio")), + ("WAY", ("way", "HWY_TWO_WAY", "INTERNAL", "")), + ("TRANSIT_MODES", ("transit_modes", "DERIVED", "INTERNAL", "")), + ("@cost_operating", ("@cost_operating", "DERIVED", "EXTRA", "Fuel and maintenance cost")), + ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), + ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), + + ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), + ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), + + ]), + "TRANSIT_LINE": OrderedDict([ + ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), + ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), + ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), + ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), + ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), + ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), + ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), + ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), + ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), + ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), + ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), + ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), + ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), + ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), + ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), + ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), + ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), + ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), + ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), + ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), + ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), + ]), + "TRANSIT_SEGMENT": OrderedDict([ + ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), + ("Pass_Count", ("@pass_count", "TRSTOP", "EXTRA", "Number of times this stop is passed")), + ("Milepost", ("@milepost", "TRSTOP", "EXTRA", "Distance from start of line")), + ("StopName", ("#stop_name", "TRSTOP", "STRING", "Name of stop")), + ("@coaster_fare_board", ("@coaster_fare_board", "DERIVED", "EXTRA", "Boarding fare for coaster")), + ("@coaster_fare_inveh", ("@coaster_fare_inveh", "DERIVED", "EXTRA", "Incremental fare for Coaster")), + ]) + } + + time_name = { + "_ea": "Early AM ", "_am": "AM Peak ", "_md": "Mid-day ", "_pm": "PM Peak ", "_ev": "Evening " + } + time_name_dst = ["_ea", "_am", "_md", "_pm", "_ev"] + time_name_src = ["EA", "A", "MD", "P", "EV"] + time_period_attrs = [ + ("CP", "@capacity_link", "mid-link capacity"), + ("CX", "@capacity_inter", "approach capacity"), + ("CH", "@capacity_hourly", "hourly mid-link capacity"), + ("LN", "@lane", "number of lanes"), + ("TM", "@time_link", "link time in minutes"), + ("TX", "@time_inter", "intersection delay time"), + ] + for src_attr, dst_attr, desc_tmplt in time_period_attrs: + for time_s, time_d in zip(time_name_src, time_name_dst): + attr_map["LINK"][src_attr + time_s] = \ + (dst_attr + time_d, "HWY_ONE_WAY", "EXTRA", time_name[time_d] + desc_tmplt) + derived_period_attrs = [ + ("@cost_auto", "toll + cost autos"), + ("@cost_hov2", "toll (non-mngd) + cost HOV2"), + ("@cost_hov3", "toll (non-mngd) + cost HOV3+"), + ("@cost_lgt_truck", "toll + cost light trucks"), + ("@cost_med_truck", "toll + cost medium trucks"), + ("@cost_hvy_truck", "toll + cost heavy trucks"), + ("@cycle", "cycle length (minutes)"), + ("@green_to_cycle", "green to cycle ratio"), + ("@sta_reliability", "static reliability") + ] + for attr, desc_tmplt in derived_period_attrs: + for time in time_name_dst: + attr_map["LINK"][attr + time] = \ + (attr + time, "DERIVED", "EXTRA", time_name[time] + desc_tmplt) + + create_scenario = _m.Modeller().tool( + "inro.emme.data.scenario.create_scenario") + + title = self.title + if not title: + existing_scenario = self.emmebank.scenario(self.scenario_id) + if existing_scenario: + title = existing_scenario.title + + scenario = create_scenario(self.scenario_id, title, overwrite=self.overwrite, emmebank=self.emmebank) + scenarios = [scenario] + if self.create_time_periods: + periods=["EA", "AM", "MD", "PM", "EV"] + period_ids = list(enumerate(periods, start=int(self.scenario_id) + 1)) + for ident, period in period_ids: + scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), + overwrite=self.overwrite, emmebank=self.emmebank)) + # create attributes in scenario + for elem_type, mapping in attr_map.iteritems(): + for name, _tcoved_type, emme_type, desc in mapping.values(): + if emme_type == "EXTRA": + for s in scenarios: + if not s.extra_attribute(name): + xatt = s.create_extra_attribute(elem_type, name) + xatt.description = desc + elif emme_type == "STRING": + for s in scenarios: + if not s.network_field(elem_type, name): + s.create_network_field(elem_type, name, 'STRING', description=desc) + + log_content = [] + for k, v in mapping.iteritems(): + if v[3] == "DERIVED": + k = "--" + log_content.append([k] + list(v)) + self._log.append({ + "content": log_content, + "type": "table", + "header": ["TNED", "Emme", "Source", "Type", "Description"], + "title": "Network %s attributes" % elem_type.lower().replace("_", " "), + "disclosure": True + }) + + network = _network.Network() + for elem_type, mapping in attr_map.iteritems(): + for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): + if emme_type == "STANDARD": + continue + default = "" if emme_type == "STRING" else 0 + network.create_attribute(elem_type, attr, default) + try: + self.create_modes(network) + self.create_road_base(network, attr_map) + self.create_turns(network) + self.calc_traffic_attributes(network) + self.check_zone_access(network, network.mode("d")) + self.create_rail_base(network, attr_map) + self.create_transit_lines(network, attr_map) + self.calc_transit_attributes(network) + finally: + # TAP connectors included in network, fix type setting and renumber node IDs + for link in network.links(): + if link.type <= 0: + link.type = 99 + self.renumber_base_nodes(network) + scenario.publish_network(network, resolve_attributes=True) + + self.set_functions(scenario) + self.check_connectivity(scenario) + + if "modify_network.py" in os.listdir(os.getcwd()): + try: + with _m.logbook_trace("Modify network script"): + import modify_network + reload(modify_network) + modify_network.run(base_scenario) + except ImportError as e: + pass + network = base_scenario.get_network() + network.create_attribute("LINK", "transit_modes") + + if self.create_time_periods: + for link in network.links(): + link.transit_modes = link.modes + for ident, period in period_ids: + self.set_auto_modes(network, period) + scenario = self.emmebank.scenario(ident) + scenario.publish_network(network, resolve_attributes=True) + + def create_modes(self, network): + # combined traffic and transit mode creation + mode_table = { + "AUTO": [("d", "dummy auto")], + "AUX_AUTO": [ + ("h", "SOV"), + ("H", "HOV2"), + ("i", "HOV3+"), + ("I", "TRKL"), + ("s", "TRKM"), + ("S", "TRKH"), + ("v", "SOV TOLL"), + ("V", "HOV2 TOLL"), + ("m", "HOV3+ TOLL"), + ("M", "TRKL TOLL"), + ("t", "TRKM TOLL"), + ("T", "TRKH TOLL"), + ], + "TRANSIT": [ + ("b", "BUS" ), # (vehicle type 100, PCE=3.0) + ("e", "EXP BUS"), # (vehicle type 90 , PCE=3.0) + ("p", "LTDEXP BUS"), # (vehicle type 80 , PCE=3.0) + ("l", "LRT"), # (vehicle type 50) + ("y", "BRT YEL"), # (vehicle type 60 , PCE=3.0) + ("r", "BRT RED"), # (vehicle type 70 , PCE=3.0) + ("c", "CMR"), # (vehicle type 40) + ("o", "TIER1"), # (vehicle type 45) + ], + "AUX_TRANSIT": [ + ("a", "ACCESS", 3), + ("x", "TRANSFER", 3), + ("w", "WALK", 3), + ("u", "ACCESS_WLK", 3), + ("k", "EGRESS_WLK", 3), + ("f", "ACCESS_PNR", 25), + ("g", "EGRESS_PNR", 25), + ("q", "ACCESS_KNR", 25), + ("j", "EGRESS_KNR", 25), + ("Q", "ACCESS_TNC", 25), + ("J", "EGRESS_TNC", 25), + ], + } + for mode_type, modes in mode_table.iteritems(): + for mode_info in modes: + mode = network.create_mode(mode_type, mode_info[0]) + mode.description = mode_info[1] + if len(mode_info) == 3: + mode.speed = mode_info[2] + self._transit_mode_lookup = { + 0: set([]), + 1: set([network.mode(m_id) for m_id in "x"]), # 1 = special transfer walk links between certain nearby stops + 2: set([network.mode(m_id) for m_id in "w"]), # 2 = walk links in the downtown area + 3: set([network.mode(m_id) for m_id in "a"]), # 3 = the special TAP connectors + 400: set([network.mode(m_id) for m_id in "c"]), # 4 = Coaster Rail Line + 500: set([network.mode(m_id) for m_id in "l"]), # 5 = Trolley & Light Rail Transit (LRT) + 600: set([network.mode(m_id) for m_id in "bpeyr"]), # 6 = Yellow Car Bus Rapid Transit (BRT) + 700: set([network.mode(m_id) for m_id in "bpeyr"]), # 7 = Red Car Bus Rapid Transit (BRT) + 800: set([network.mode(m_id) for m_id in "bpe"]), # 8 = Limited Express Bus + 900: set([network.mode(m_id) for m_id in "bpe"]), # 9 = Express Bus + 1000: set([network.mode(m_id) for m_id in "bpe"]), # 10 = Local Bus + 11: set([network.mode(m_id) for m_id in "u"]), # = access walk links + 12: set([network.mode(m_id) for m_id in "k"]), # = egress walk links + 13: set([network.mode(m_id) for m_id in "f"]), # = access PNR links + 14: set([network.mode(m_id) for m_id in "g"]), # = egress PNR links + 15: set([network.mode(m_id) for m_id in "q"]), # = access KNR links + 16: set([network.mode(m_id) for m_id in "j"]), # = egress KNR links + 17: set([network.mode(m_id) for m_id in "Q"]), # = access TNC links + 18: set([network.mode(m_id) for m_id in "J"]), # = egress TNC links + } + modes_gp_lanes = { + 0: set([]), + 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes + 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck + 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck + 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck + 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks + 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks + 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only + } + non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) + self._auto_mode_lookup = { + "GP": modes_gp_lanes, + "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), + "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), + "HOV3": set([network.mode(m_id) for m_id in "dim"]), + } + + def set_auto_modes(self, network, period): + # time periods + # need to update the modes from the XTRUCK for their time of day + # Note: only truck types 1, 3, 4, and 7 found in 2012 base network + truck = "@truck_%s" % period.lower() + toll = "@toll_%s" % period.lower() + lookup = self._auto_mode_lookup + for link in network.links(): + auto_modes = set([]) + if link.type == 10: # connector + auto_modes = lookup["GP"][link[truck]] + elif link.type in [11, 12]: + pass # no auto modes, rail only (11) or bus only (12) + elif link["@hov"] == 1: + auto_modes = lookup["GP"][link[truck]] + elif link["@hov"] in [2, 3]: + # managed lanes, free for HOV2 and HOV3+, tolls for SOV + if link[toll] > 0: + auto_modes = lookup["TOLL"][link[truck]] + # special case of I-15 managed lanes base year and 2020, no build + elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: + auto_modes = lookup["TOLL"][link[truck]] + elif link.type == 8 or link.type == 9: + auto_modes = lookup["TOLL"][link[truck]] + if link["@hov"] == 2: + auto_modes = auto_modes | lookup["HOV2"] + else: + auto_modes = auto_modes | lookup["HOV3"] + elif link["@hov"] == 4: + auto_modes = lookup["TOLL"][link[truck]] + link.modes = link.transit_modes | auto_modes + + def create_road_base(self, network, attr_map): + self._log.append({"type": "header", "content": "Import roadway base network from TNED_HwyNet %s" % self.source}) + hwy_data = gen_utils.DataTableProc("TNED_HwyNet", self.source) + # TEMP workaround: BN field is string + bn_index = hwy_data._attr_names.index("BN") + hwy_data._values[bn_index] = hwy_data._values[bn_index].astype(int) + + if self.save_data_tables: + hwy_data.save("%s_TNED_HwyNet" % self.data_table_name, self.overwrite) + + is_centroid = lambda arc, node : (arc["FC"] == 10) and (node == "AN") + link_attr_map = {} + for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): + if tcoved_type in ("TWO_WAY", "HWY_TWO_WAY", "ONE_WAY", "HWY_ONE_WAY"): + link_attr_map[field] = (name, tcoved_type.replace("HWY_", ""), emme_type, desc) + + def define_modes(arc): + if arc["FC"] in [11, 12] or arc["ABLNA"] == 0: #or ((arc["HOV"] < 1 or arc["HOV"] > 4) and arc["FC"] != 10): + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] + return [network.mode('d')] + + self._create_base_net( + hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map) + + hwy_node_data = gen_utils.DataTableProc("TNED_HwyNodes", self.source) + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + if v[1] in ("BOTH", "HWY")] + for record in hwy_node_data: + node = network.node(record["HNODE"]) + if node: + for src, dst in node_attrs: + node[dst] = record[src] + else: + self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) + self._log.append({"type": "text", "content": "Import traffic base network complete"}) + + def create_rail_base(self, network, attr_map): + self._log.append({"type": "header", "content": "Import rail base network from TNED_RailNet %s" % self.source}) + transit_data = gen_utils.DataTableProc("TNED_RailNet", self.source) + + if self.save_data_tables: + transit_data.save("%s_TNED_RailNet" % self.data_table_name, self.overwrite) + + link_attr_map = {} + for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): + if tcoved_type in ("TWO_WAY", "RAIL_TWO_WAY", "ONE_WAY", "RAIL_ONE_WAY"): + link_attr_map[field] = (name, tcoved_type.replace("RAIL_", ""), emme_type, desc) + + tier1_modes = set([network.mode(m_id) for m_id in "o"]) + tier1_rail_link_name = self._props["transit.newMode"] + + def define_modes(arc): + if arc["NM"] == tier1_rail_link_name: + return tier1_modes + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] + + self._create_base_net( + transit_data, network, mode_callback=define_modes, link_attr_map=link_attr_map) + + transit_node_data = gen_utils.DataTableProc("TNED_RailNodes", self.source) + # Load PARK, elevation, stop type data onto transit nodes + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + if v[1] in ("BOTH", "RAIL")] + for record in transit_node_data: + node = network.node(record["HNODE"]) + if node: + for src, dst in node_attrs: + node[dst] = record[src] + else: + self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) + + self._log.append({"type": "text", "content": "Import transit base network complete"}) + + def _create_base_net(self, data, network, link_attr_map, mode_callback, centroid_callback=None): + forward_attr_map = {} + reverse_attr_map = {} + arc_id_name = "HWYCOV0_ID" + arc_guid_name = "HWYSegGUID" + for field, (name, tcoved_type, emme_type, desc) in link_attr_map.iteritems(): + if field in [arc_id_name, arc_guid_name, "DIR"]: + # these attributes are special cases for reverse link + forward_attr_map[field] = name + elif tcoved_type in "TWO_WAY": + forward_attr_map[field] = name + reverse_attr_map[field] = name + elif tcoved_type in "ONE_WAY": + forward_attr_map["AB" + field] = name + reverse_attr_map["BA" + field] = name + + emme_id_name = forward_attr_map[arc_id_name] + emme_guid_name = forward_attr_map[arc_guid_name] + dir_name = forward_attr_map["DIR"] + reverse_dir_map = {1: 3, 3: 1, 2: 4, 4: 2, 0: 0} + new_node_id = max(data.values("AN").max(), data.values("BN").max()) + 1 + + if centroid_callback is None: + centroid_callback = lambda a,n: False + + # Create nodes and links + for arc in data: + if float(arc["AN"]) == 0 or float(arc["BN"]) == 0: + self._log.append({"type": "text", + "content": "Node ID 0 in AN (%s) or BN (%s) for link GUID/ID %s/%s." % + (arc["AN"], arc["BN"], arc[arc_guid_name], arc[arc_id_name])}) + continue + coordinates = arc["geo_coordinates"] + i_node = get_node(network, arc['AN'], coordinates[0], centroid_callback(arc, "AN")) + j_node = get_node(network, arc['BN'], coordinates[-1], centroid_callback(arc, "BN")) + link = network.link(i_node, j_node) + if link: + msg = "Duplicate link between AN %s and BN %s. Link GUID/IDs %s/%s and %s/%s." % \ + (arc["AN"], arc["BN"], link[emme_guid_name], link[emme_id_name], arc[arc_guid_name], arc[arc_id_name]) + self._log.append({"type": "text", "content": msg}) + if link[emme_guid_name] == arc[arc_guid_name]: + self._log.append({"type": "text", "content": "... but GUIDs match (not an error)"}) + else: + self._error.append(msg) + else: + modes = mode_callback(arc) + link = network.create_link(i_node, j_node, modes) + link.length = arc["LENGTH"] + if len(coordinates) > 2: + link.vertices = coordinates[1:-1] + for field, attr in forward_attr_map.iteritems(): + link[attr] = arc[field] + if arc["WAY"] == 2 or arc["WAY"] == 0: + reverse_link = network.link(j_node, i_node) + if not reverse_link: + reverse_link = network.create_link(j_node, i_node, modes) + reverse_link.length = link.length + reverse_link.vertices = list(reversed(link.vertices)) + for field, attr in reverse_attr_map.iteritems(): + reverse_link[attr] = arc[field] + reverse_link[emme_id_name] = -1*arc[arc_id_name] + reverse_link[emme_guid_name] = "-" + arc[arc_guid_name] + reverse_link[dir_name] = reverse_dir_map[arc["DIR"]] + + def create_transit_lines(self, network, attr_map): + self._log.append({"type": "header", "content": "Import transit lines"}) + fatal_errors = 0 + # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare + transit_line_data = gen_utils.DataTableProc("trrt", self.source) + # Route_ID,Link_ID,Link_GUID,Direction + transit_link_data = gen_utils.DataTableProc("trlink", self.source) + # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName + #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) + transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) + # From_line,To_line,Board_stop,Wait_time + # Note: Board_stop is not used + # Timed xfer data + periods = ['EA', 'AM', 'MD', 'PM', 'EV'] + timed_xfer_data = {} + for period in periods: + file_path = _join(_dir(self.source), FILE_NAMES["TIMEXFER"] % period) + if os.path.exists(file_path): + timed_xfer_data[period] = gen_utils.DataTableProc("timexfer_"+period, file_path) + else: + timed_xfer_data[period] = [] + + mode_properties = gen_utils.DataTableProc("MODE5TOD", _join(_dir(self.source), FILE_NAMES["MODE5TOD"]), convert_numeric=True) + mode_details = {} + for record in mode_properties: + mode_details[int(record["MODE_ID"])] = record + + if self.save_data_tables: + transit_link_data.save("%s_trlink" % self.data_table_name, self.overwrite) + transit_line_data.save("%s_trrt" % self.data_table_name, self.overwrite) + transit_stop_data.save("%s_trstop" % self.data_table_name, self.overwrite) + mode_properties.save("%s_MODE5TOD" % self.data_table_name, self.overwrite) + + coaster = network.create_transit_vehicle(40, 'c') # 4 coaster + trolley = network.create_transit_vehicle(50, 'l') # 5 sprinter/trolley + brt_yellow = network.create_transit_vehicle(60, 'y') # 6 BRT yellow line (future line) + brt_red = network.create_transit_vehicle(70, 'r') # 7 BRT red line (future line) + premium_bus = network.create_transit_vehicle(80, 'p') # 8 prem express + express_bus = network.create_transit_vehicle(90, 'e') # 9 regular express + local_bus = network.create_transit_vehicle(100, 'b') # 10 local bus + tier1 = network.create_transit_vehicle(45, 'o') # 11 Tier 1 + + brt_yellow.auto_equivalent = 3.0 + brt_red.auto_equivalent = 3.0 + premium_bus.auto_equivalent = 3.0 + express_bus.auto_equivalent = 3.0 + local_bus.auto_equivalent = 3.0 + + # Capacities - for reference / post-assignment analysis + tier1.seated_capacity, tier1.total_capacity = 7 * 142, 7 * 276 + trolley.seated_capacity, trolley.total_capacity = 4 * 64, 4 * 200 + brt_yellow.seated_capacity, brt_yellow.total_capacity = 32, 70 + brt_red.seated_capacity, brt_red.total_capacity = 32, 70 + premium_bus.seated_capacity, premium_bus.total_capacity = 32, 70 + express_bus.seated_capacity, express_bus.total_capacity = 32, 70 + local_bus.seated_capacity, local_bus.total_capacity = 32, 70 + + trrt_attrs = [] + mode5tod_attrs = [] + for elem_type in "TRANSIT_LINE", "TRANSIT_SEGMENT": + mapping = attr_map[elem_type] + for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): + if tcoved_type == "TRRT": + trrt_attrs.append((field, attr)) + elif tcoved_type == "MODE5TOD": + mode5tod_attrs.append((field, attr)) + network.create_attribute("TRANSIT_SEGMENT", "milepost") + + # Pre-process transit line (trrt) to know the route names for errors / warnings + transit_line_records = list(transit_line_data) + line_names = {} + for record in transit_line_records: + line_names[int(record["Route_ID"])] = str(record["Route_Name"]) + + links = dict((link["#hwyseg_guid"], link) for link in network.links()) + transit_routes = _defaultdict(lambda: []) + for record in transit_link_data: + line_ref = line_names.get(int(record["Route_ID"]), record["Route_ID"]) + link_id = record["Link_GUID"] + if "-" in record["Direction"]: + link_id = "-" + link_id + link = links.get(link_id) + if not link: + if "-" in record["Direction"]: + reverse_link = links.get("-" + link_id) + else: + reverse_link = links.get(link_id[1:]) + if reverse_link: + link = network.create_link(reverse_link.j_node, reverse_link.i_node, reverse_link.modes) + link.vertices = list(reversed(reverse_link.vertices)) + for attr in network.attributes("LINK"): + if attr not in set(["vertices"]): + link[attr] = reverse_link[attr] + link["@tcov_id"] = -1 * reverse_link["@tcov_id"] + link["#hwyseg_guid"] = link_id + links[link_id] = link + msg = "Transit line %s : Missing reverse link with ID %s (%s) (reverse link created)" % ( + line_ref, record["Link_GUID"], link) + self._log.append({"type": "text", "content": msg}) + self._error.append("Transit route import: " + msg) + link = reverse_link + if not link: + msg = "Transit line %s : No link with GUID %s, routing may not be correct" % ( + line_ref, record["Link_GUID"]) + self._log.append({"type": "text", "content": msg}) + self._error.append("Transit route import: " + msg) + fatal_errors += 1 + continue + + transit_routes[int(record["Route_ID"])].append(link) + + # lookup list of special tier 1 mode route names + tier1_rail_route_names = [str(n) for n in self._props["transit.newMode.route"]] + dummy_links = set([]) + transit_lines = {} + auto_mode = network.mode("d") + for record in transit_line_records: + try: + route = transit_routes[int(record["Route_ID"])] + # Find if name matches one of the names listed in transit.newMode.route and convert to tier 1 rail + is_tier1_rail = False + for name in tier1_rail_route_names: + if str(record["Route_Name"]).startswith(name): + is_tier1_rail = True + break + if is_tier1_rail: + vehicle_type = 45 + mode = network.transit_vehicle(vehicle_type).mode + else: + vehicle_type = int(record["Mode"]) * 10 + mode = network.transit_vehicle(vehicle_type).mode + prev_link = route[0] + itinerary = [prev_link] + for link in route[1:]: + if prev_link.j_node != link.i_node: # filling in the missing gap + msg = "Transit line %s (index %s): Links not adjacent, shortest path interpolation used (%s and %s)" % ( + record["Route_Name"], record["Route_ID"], prev_link["#hwyseg_guid"], link["#hwyseg_guid"]) + log_record = {"type": "text", "content": msg} + self._log.append(log_record) + sub_path = find_path(prev_link, link, mode) + itinerary.extend(sub_path) + log_record["content"] = log_record["content"] + " through %s links" % (len(sub_path)) + itinerary.append(link) + prev_link = link + + node_itinerary = [itinerary[0].i_node] + [l.j_node for l in itinerary] + missing_mode = 0 + for link in itinerary: + if mode not in link.modes: + link.modes |= set([mode]) + missing_mode += 1 + if missing_mode: + msg = "Transit line %s (index %s): missing mode added to %s link(s)" % ( + str(record["Route_Name"]), record["Route_ID"], missing_mode) + self._log.append({"type": "text", "content": msg}) + tline = network.create_transit_line( + str(record["Route_Name"]), vehicle_type, node_itinerary) + + for field, attr in trrt_attrs: + tline[attr] = float(record[field]) + if is_tier1_rail: + line_details = mode_details[11] + else: + line_details = mode_details[int(record["Mode"])] + for field, attr in mode5tod_attrs: + tline[attr] = float(line_details[field]) + #"XFERPENTM": "Transfer penalty time: " + #"WTXFERTM": "Transfer perception:" + # NOTE: an additional transfer penalty perception factor of 5.0 is included + # in assignment + tline["@transfer_penalty"] = float(line_details["XFERPENTM"]) * float(line_details["WTXFERTM"]) + tline.headway = tline["@headway_am"] if tline["@headway_am"] > 0 else 999 + tline.layover_time = 5 + + transit_lines[int(record["Route_ID"])] = tline + milepost = 0 + for segment in tline.segments(): + segment.milepost = milepost + milepost += segment.link.length + segment.allow_boardings = False + segment.allow_alightings = False + if auto_mode in segment.link.modes: + # segments on links with auto mode are ft1 = timau + segment.transit_time_func = 1 + else: + # ft2 = ul2 -> copied @trtime (fixed speed) + segment.transit_time_func = 2 + except Exception as error: + msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) + self._log.append({"type": "text", "content": msg}) + trace_text = _traceback.format_exc().replace("\n", "
      ") + self._log.append({"type": "text", "content": trace_text}) + self._error.append("Transit route import: line %s not created" % record["Route_Name"]) + fatal_errors += 1 + for link in dummy_links: + network.delete_link(link.i_node, link.j_node) + + line_stops = _defaultdict(lambda: []) + for record in transit_stop_data: + try: + line_name = line_names[int(record["Route_ID"])] + line_stops[line_name].append(record) + except KeyError: + self._log.append( + {"type": "text", + "content": "Stop %s: could not find transit line by ID %s (link GUID %s)" % ( + record["Stop_ID"], record["Route_ID"], record["Link_GUID"])}) + for stops in line_stops.itervalues(): + stops.sort(key=lambda stop: float(stop["Milepost"])) + + seg_float_attr_map = [] + seg_string_attr_map = [] + for field, (attr, t_type, e_type, desc) in attr_map["TRANSIT_SEGMENT"].iteritems(): + if t_type == "TRSTOP": + if e_type == "STRING": + seg_string_attr_map.append([field, attr]) + else: + seg_float_attr_map.append([field, attr]) + + for line_name, stops in line_stops.iteritems(): + tline = network.transit_line(line_name) + if not tline: + continue + itinerary = tline.segments(include_hidden=True) + segment = prev_segment = itinerary.next() + for stop in stops: + if "DUMMY" in stop["StopName"]: + continue + stop_link_id = stop['Link_GUID'] + node_id = int(stop['Node']) + while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: + segment = itinerary.next() + + if node_id == segment.i_node.number: + pass + elif segment.j_node and node_id == segment.j_node.number: + # if matches the J-node then the stop is on the next segment + segment = itinerary.next() + else: + if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: + msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( + line_name, stop["Route_ID"], segment, stop_link_id, node_id) + else: + msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( + line_name, stop["Route_ID"], stop_link_id, node_id) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + # reset iterator to start back from previous segment + itinerary = tline.segments(include_hidden=True) + segment = itinerary.next() + while segment.id != prev_segment.id: + segment = itinerary.next() + continue + segment.allow_boardings = True + segment.allow_alightings = True + segment.dwell_time = min(tline.default_dwell_time, 99.99) + for field, attr in seg_string_attr_map: + segment[attr] = stop[field] + for field, attr in seg_float_attr_map: + segment[attr] = float(stop[field]) + prev_segment = segment + + def lookup_line(ident): + line = network.transit_line(ident) + if line: + return line.id + line = transit_lines.get(int(ident)) + if line: + return line.id + raise Exception("'%s' is not a route name or route ID" % ident) + + # Normalizing the case of the headers as different examples have been seen + for period, data in timed_xfer_data.iteritems(): + norm_data = [] + for record in data: + norm_record = {} + for key, val in record.iteritems(): + norm_record[key.lower()] = val + norm_data.append(norm_record) + + from_line, to_line, wait_time = [], [], [] + for i, record in enumerate(norm_data, start=2): + try: + from_line.append(lookup_line(record["from_line"])) + to_line.append(lookup_line(record["to_line"])) + wait_time.append(float(record["wait_time"])) + except Exception as error: + msg = "Error processing timexfer_%s.csv on file line %s: %s" % (period, i, error) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + + timed_xfer = _dt.Data() + timed_xfer.add_attribute(_dt.Attribute("from_line", _np.array(from_line).astype("O"))) + timed_xfer.add_attribute(_dt.Attribute("to_line", _np.array(to_line).astype("O"))) + timed_xfer.add_attribute(_dt.Attribute("wait_time", _np.array(wait_time))) + # Creates and saves the new table + gen_utils.DataTableProc("%s_timed_xfer_%s" % (self.data_table_name, period), data=timed_xfer) + + if fatal_errors > 0: + raise Exception("Import of transit lines: %s fatal errors found" % fatal_errors) + self._log.append({"type": "text", "content": "Import transit lines complete"}) + + def calc_transit_attributes(self, network): + # for link in network.links(): + # if link.type == 0: # walk only links have FC ==0 + # link.type = 99 + + fares_file_name = FILE_NAMES["FARES"] + special_fare_path = _join(self.source, fares_file_name) + if not os.path.isfile(special_fare_path): + return + + # ON TRANSIT LINES + # Set 3-period headway based on revised headway calculation + for line in network.transit_lines(): + for period in ["ea", "am", "md", "pm", "ev"]: + line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) + + def get_line(line_id): + line = network.transit_line(line_id) + if line is None: + raise Exception("%s: line does not exist: %s" % (fares_file_name, line_id)) + return line + + # Special incremental boarding and in-vehicle fares + # to recreate the coaster zone fares + self._log.append({"type": "header", "content": "Apply special_fares to transit lines"}) + with open(special_fare_path) as fare_file: + self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) + special_fares = None + yaml_installed = True + try: + import yaml + special_fares = yaml.load(fare_file) + self._log.append({"type": "text", "content": yaml.dump(special_fares).replace("\n", "
      ")}) + except ImportError: + yaml_installed = False + except: + pass + if special_fares is None: + try: + import json + special_fares = json.load(fare_file) + self._log.append({"type": "text", "content": json.dumps(special_fares, indent=4).replace("\n", "
      ")}) + except: + pass + if special_fares is None: + msg = "YAML or JSON" if yaml_installed else "JSON (YAML parser not installed)" + raise Exception(fares_file_name + ": file could not be parsed as " + msg) + + + for record in special_fares["boarding_cost"]["base"]: + line = get_line(record["line"]) + line["@fare"] = 0 + for seg in line.segments(): + seg["@coaster_fare_board"] = record["cost"] + for record in special_fares["boarding_cost"].get("stop_increment", []): + line = get_line(record["line"]) + for seg in line.segments(True): + if record["stop"] in seg["#stop_name"]: + seg["@coaster_fare_board"] += record["cost"] + break + for record in special_fares["in_vehicle_cost"]: + line = get_line(record["line"]) + for seg in line.segments(True): + if record["from"] in seg["#stop_name"]: + seg["@coaster_fare_inveh"] = record["cost"] + break + pass_cost_keys = ['day_pass', 'regional_pass'] + pass_costs = [] + for key in pass_cost_keys: + cost = special_fares.get(key) + if cost is None: + raise Exception("key '%s' missing from %s" % (key, fares_file_name)) + pass_costs.append(cost) + pass_values = _dt.Data() + pass_values.add_attribute(_dt.Attribute("pass_type", _np.array(pass_cost_keys).astype("O"))) + pass_values.add_attribute(_dt.Attribute("cost", _np.array(pass_costs).astype("f8"))) + gen_utils.DataTableProc("%s_transit_passes" % self.data_table_name, data=pass_values) + self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) + + def renumber_base_nodes(self, network): + tracker = gen_utils.AvailableNodeIDTracker(network) + nodes = [n for n in network.nodes() if n.number > 999999] + nodes = sorted(nodes, key=lambda x: x.number, reverse=True) + if nodes: + self._log.append({"type": "text", "content": "Renumbered %s nodes" % len(nodes)}) + for n in nodes: + old_number = n.number + n.number = tracker.get_id() + self._log.append({"type": "text", "content": " - renumbered %s to %s " % (old_number, n.number)}) + + def create_turns(self, network): + self._log.append({"type": "header", "content": "Import turns and turn restrictions"}) + self._log.append({"type": "text", "content": "Process turns for turn prohibited by ID"}) + turn_data = gen_utils.DataTableProc("Turns", self.source) + if self.save_data_tables: + turn_data.save("%s_turns" % self.data_table_name, self.overwrite) + # Process turns.csv for prohibited turns penalty + for i, record in enumerate(turn_data): + from_node_id, to_node_id, at_node_id = record["FromNode"], record["ToNode"], record["MidNode"] + at_node = network.node(at_node_id) + if at_node and not at_node.is_intersection: + try: + network.create_intersection(at_node) + except Exception as error: + text = ("record %s turn from %s, at %s, to %s: cannot create intersection" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + trace_text = _traceback.format_exc().replace("\n", "
      ") + self._log.append({"type": "text", "content": trace_text}) + self._error.append(text) + continue + turn = network.turn(from_node_id, at_node_id, to_node_id) + if at_node is None: + text = ("record %s turn from %s, at %s, to %s: at node does not exist" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + self._error.append(text) + elif turn is None: + text = ("record %s turn from %s, at %s, to %s: does not form a turn" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + self._error.append(text) + else: + turn.penalty_func = 0 # prohibit turn + # NOTE: could support penalty value + # turn.penalty_func = 1 + # turn.data1 = float(record["penalty"]) + self._log.append({"type": "text", "content": "Import turns and turn prohibitions complete"}) + + def calc_traffic_attributes(self, network): + self._log.append({"type": "header", "content": "Calculate derived traffic attributes"}) + # "COST": "@cost_operating" + # "ITOLL": "@toll_flag" # ITOLL - Toll + 100 *[0,1] if managed lane (I-15 tolls) + # Note: toll_flag is no longer used + # "ITOLL2": "@toll" # ITOLL2 - Toll + # "ITOLL3": "@cost_auto" # ITOLL3 - Toll + AOC + # "@cost_hov" + # "ITOLL4": "@cost_med_truck" # ITOLL4 - Toll * 1.03 + AOC + # "ITOLL5": "@cost_hvy_truck" # ITOLL5 - Toll * 2.33 + AOC + fatal_errors = 0 + try: + aoc = float(self._props["aoc.fuel"]) + float(self._props["aoc.maintenance"]) + except ValueError: + raise Exception("Error during float conversion for aoc.fuel or aoc.maintenance from sandag_abm.properties file") + scenario_year = int(self._props["scenarioYear"]) + periods = ["EA", "AM", "MD", "PM", "EV"] + time_periods = ["_ea", "_am", "_md", "_pm", "_ev"] + src_time_periods = ["_op", "_am", "_op", "_pm", "_op"] + mode_d = network.mode('d') + + # Calculate upstream and downstream interchange distance + # First, label the intersection nodes as nodes with type 1 links (freeway) and + # type 8 links (freeway-to-freeway ramp) + network.create_attribute("NODE", "is_interchange") + interchange_points = [] + for node in network.nodes(): + adj_links = list(node.incoming_links()) + list(node.outgoing_links()) + has_freeway_links = bool( + [l for l in adj_links + if l.type == 1 and mode_d in l.modes]) + has_ramp_links = bool( + [l for l in adj_links + if l.type == 8 and mode_d in l.modes and not "HOV" in l["#name"]]) + if has_freeway_links and has_ramp_links: + node.is_interchange = True + interchange_points.append(node) + else: + node.is_interchange = False + for node in network.nodes(): + node["@interchange"] = node.is_interchange + + for link in network.links(): + if link.type == 1 and mode_d in link.modes: + link["@intdist_down"] = interchange_distance(link, "DOWNSTREAM") + link["@intdist_up"] = interchange_distance(link, "UPSTREAM") + self._log.append({"type": "text", "content": "Calculate of nearest interchange distance complete"}) + + # Static reliability parameters + # freeway coefficients + freeway_rel = { + "intercept": 0.1078, + "speed>70": 0.01393, + "upstream": 0.011, + "downstream": 0.0005445, + } + # arterial/ramp/other coefficients + road_rel = { + "intercept": 0.0546552, + "lanes": { + 1: 0.0, + 2: 0.0103589, + 3: 0.0361211, + 4: 0.0446958, + 5: 0.0 + }, + "speed": { + "<35": 0, + 35: 0.0075674, + 40: 0.0091012, + 45: 0.0080996, + 50: -0.0022938, + ">50": -0.0046211 + }, + "control": { + 0: 0, # Uncontrolled + 1: 0.0030973, # Signal + 2: -0.0063281, # Stop + 3: -0.0063281, # Stop + 4: 0.0127692, # Other, Railway, etc. + } + } + for link in network.links(): + # Change SR125 toll speed to 70MPH + if link["@hov"] == 4 and link.type == 1: + link["@speed_posted"] = 70 + link["@cost_operating"] = link.length * aoc + for time in time_periods: + # add link delay (30 sec=0.5mins) to HOV connectors to discourage travel + if link.type == 8 and (link["@hov"] == 2 or link["@hov"] == 3): + link["@time_link" + time] = link["@time_link" + time] + 0.375 + + # make speed on HOV lanes (70mph) the same as parallel GP lanes (65mph) + # - set speed back to posted speed - increase travel time by (speed_adj/speed_posted) + if link.type == 1 and (link["@hov"] == 2 or link["@hov"] == 3): + speed_adj = link["@speed_adjusted"] + speed_posted = link["@speed_posted"] + if speed_adj>0: + link["@time_link" + time] = (speed_adj/(speed_posted*1.0)) * link["@time_link" + time] + + # Required file + vehicle_class_factor_file = FILE_NAMES["VEHICLE_CLASS"] + facility_factors = _defaultdict(lambda: {}) + facility_factors["DEFAULT_FACTORS"] = { + "ALL": { + "auto": 1.0, + "hov2": 1.0, + "hov3": 1.0, + "lgt_truck": 1.0, + "med_truck": 1.03, + "hvy_truck": 2.03 + }, + "count": 0 + } + if os.path.exists(_join(self.source, vehicle_class_factor_file)): + msg = "Adjusting tolls based on factors from %s" % vehicle_class_factor_file + self._log.append({"type": "text", "content": msg}) + # NOTE: CSV Reader sets the field names to UPPERCASE for consistency + with gen_utils.CSVReader(_join(self.source, vehicle_class_factor_file)) as r: + for row in r: + if "YEAR" in r.fields and int(row["YEAR"]) != scenario_year: # optional year column + continue + name = row["FACILITY_NAME"] + # optional time-of-day entry, default to ALL if no column or blank + fac_time = row.get("TIME_OF_DAY") + if fac_time is None: + fac_time = "ALL" + facility_factors[name][fac_time] = { + "auto": float(row["DA_FACTOR"]), + "hov2": float(row["S2_FACTOR"]), + "hov3": float(row["S3_FACTOR"]), + "lgt_truck": float(row["TRK_L_FACTOR"]), + "med_truck": float(row["TRK_M_FACTOR"]), + "hvy_truck": float(row["TRK_H_FACTOR"]) + } + facility_factors[name]["count"] = 0 + + # validate ToD entry, either list EA, AM, MD, PM and EV, or ALL, but not both + for name, factors in facility_factors.iteritems(): + # default keys should be "ALL" and "count" + if "ALL" in factors: + if len(factors) > 2: + fatal_errors += 1 + msg = ("Individual time periods and 'ALL' (or blank) listed under " + "TIME_OF_DAY column in {} for facility {}").format(vehicle_class_factor_file, name) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + elif set(periods + ["count"]) != set(factors.keys()): + fatal_errors += 1 + msg = ("Missing time periods {} under TIME_OF_DAY column in {} for facility {}").format( + (set(periods) - set(factors.keys())), vehicle_class_factor_file, name) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + + def lookup_link_name(link): + for attr_name in ["#name", "#name_from", "#name_to"]: + for name, _factors in facility_factors.iteritems(): + if name in link[attr_name]: + return _factors + return facility_factors["DEFAULT_FACTORS"] + + def match_facility_factors(link): + factors = lookup_link_name(link) + factors["count"] += 1 + factors = _copy(factors) + del factors["count"] + # @hov = 2 or 3 overrides hov2 and hov3 costs + if link["@hov"] == 2: + for _, time_factors in factors.iteritems(): + time_factors["hov2"] = 0.0 + time_factors["hov3"] = 0.0 + elif link["@hov"] == 3: + for _, time_factors in factors.iteritems(): + time_factors["hov3"] = 0.0 + return factors + + vehicle_classes = ["auto", "hov2", "hov3", "lgt_truck", "med_truck", "hvy_truck"] + for link in network.links(): + if sum(link["@toll" + time] for time in time_periods) > 0: + factors = match_facility_factors(link) + for time, period in zip(time_periods, periods): + time_factors = factors.get(period, factors.get("ALL")) + for name in vehicle_classes: + link["@cost_" + name + time] = time_factors[name] * link["@toll" + time] + link["@cost_operating"] + else: + for time in time_periods: + for name in vehicle_classes: + link["@cost_" + name + time] = link["@cost_operating"] + for name, class_factors in facility_factors.iteritems(): + msg = "Facility name '%s' matched to %s links." % (name, class_factors["count"]) + self._log.append({"type": "text2", "content": msg}) + + self._log.append({ + "type": "text", + "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) + + # calculate static reliability + for link in network.links(): + for time in time_periods: + sta_reliability = "@sta_reliability" + time + # if freeway apply freeway parameters to this link + if link["type"] == 1 and link["@lane" + time] > 0: + high_speed_factor = freeway_rel["speed>70"] if link["@speed_posted"] >= 70 else 0.0 + upstream_factor = freeway_rel["upstream"] * 1 / link["@intdist_up"] + downstream_factor = freeway_rel["downstream"] * 1 / link["@intdist_down"] + link[sta_reliability] = ( + freeway_rel["intercept"] + high_speed_factor + upstream_factor + downstream_factor) + # arterial/ramp/other apply road parameters + elif link["type"] <= 9 and link["@lane" + time] > 0: + lane_factor = road_rel["lanes"].get(link["@lane" + time], 0.0) + speed_bin = int(link["@speed_posted"] / 5) * 5 # truncate to multiple of 5 + if speed_bin < 35: + speed_bin = "<35" + elif speed_bin > 50: + speed_bin = ">50" + speed_factor = road_rel["speed"][speed_bin] + control_bin = min(max(link["@traffic_control"], 0), 4) + control_factor = road_rel["control"][control_bin] + link[sta_reliability] = road_rel["intercept"] + lane_factor + speed_factor + control_factor + else: + link[sta_reliability] = 0.0 + self._log.append({"type": "text", "content": "Calculate of link static reliability factors complete"}) + + # Cycle length matrix + # Intersecting Link + # Approach Link 2 3 4 5 6 7 8 9 + # FC Description + # 2 Prime Arterial 2.5 2 2 2 2 2 2 2 + # 3 Major Arterial 2 2 2 2 2 2 2 2 + # 4 Collector 2 2 1.5 1.5 1.5 1.5 1.5 1.5 + # 5 Local Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 6 Rural Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 7 Local Road 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 8 Freeway connector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 9 Local Ramp 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + + # Volume-delay functions + # fd10: freeway node approach + # fd11: non-intersection node approach + # fd20: cycle length 1.25 + # fd21: cycle length 1.5 + # fd22: cycle length 2.0 + # fd23: cycle length 2.5 + # fd24: cycle length 2.5 and metered ramp + # fd25: freeway node approach AM and PM only + network.create_attribute("LINK", "green_to_cycle") + network.create_attribute("LINK", "cycle") + vdf_cycle_map = {1.25: 20, 1.5: 21, 2.0: 22, 2.5: 23} + for node in network.nodes(): + incoming = list(node.incoming_links()) + outgoing = list(node.outgoing_links()) + is_signal = False + for link in incoming: + if link["@green_to_cycle_init"] > 0: + is_signal = True + break + if is_signal: + lcs = [link.type for link in incoming + outgoing] + min_lc = max(lcs) # Note: minimum class is actually the HIGHEST value, + max_lc = min(lcs) # and maximum is the LOWEST + + for link in incoming: + # Metered ramps + if link["@traffic_control"] in [4, 5]: + link["cycle"] = 2.5 + link["green_to_cycle"] = 0.42 + link.volume_delay_func = 24 + # Stops + elif link["@traffic_control"] in [2, 3]: + link["cycle"] = 1.25 + link["green_to_cycle"] = 0.42 + link.volume_delay_func = 20 + elif link["@green_to_cycle_init"] > 0 and is_signal: + if link.type == 2: + c_len = 2.5 if min_lc == 2 else 2.0 + elif link.type == 3: + c_len = 2.0 # Major arterial & anything + elif link.type == 4: + c_len = 1.5 if max_lc > 2 else 2.0 + elif link.type > 4: + if max_lc > 4: + c_len = 1.25 + elif max_lc == 4: + c_len = 1.5 + else: + c_len = 2.0 + if link["@green_to_cycle_init"] > 10: + link["green_to_cycle"] = link["@green_to_cycle_init"] / 100.0 + if link["green_to_cycle"] > 1.0: + link["green_to_cycle"] = 1.0 + link["cycle"] = c_len + link.volume_delay_func = vdf_cycle_map[c_len] + elif link.type == 1: + link.volume_delay_func = 10 # freeway + else: + link.volume_delay_func = 11 # non-controlled approach + self._log.append({"type": "text", "content": "Derive cycle, green_to_cycle, and VDF by approach node complete"}) + + for link in network.links(): + if link.volume_delay_func in [10, 11]: + continue + if link["@traffic_control"] in [4, 5]: + # Ramp meter controlled links are only enabled during the peak periods + for time in ["_am", "_pm"]: + link["@cycle" + time] = link["cycle"] + link["@green_to_cycle" + time] = link["green_to_cycle"] + else: + for time in time_periods: + link["@cycle" + time] = link["cycle"] + link["@green_to_cycle" + time] = link["green_to_cycle"] + self._log.append({"type": "text", "content": "Setting of time period @cycle and @green_to_cycle complete"}) + + network.delete_attribute("LINK", "green_to_cycle") + network.delete_attribute("LINK", "cycle") + network.delete_attribute("NODE", "is_interchange") + self._log.append({"type": "text", "content": "Calculate derived traffic attributes complete"}) + if fatal_errors > 0: + raise Exception("%s fatal errors during calculation of traffic attributes" % fatal_errors) + return + + def check_zone_access(self, network, mode): + # Verify that every centroid has at least one available + # access and egress connector + for centroid in network.centroids(): + access = egress = False + for link in centroid.outgoing_links(): + if mode in link.modes: + if link.j_node.is_intersection: + for turn in link.outgoing_turns(): + if turn.i_node != turn.k_node and turn.penalty_func != 0: + egress = True + else: + egress = True + if not egress: + raise Exception("No egress permitted from zone %s" % centroid.id) + for link in centroid.incoming_links(): + if mode in link.modes: + if link.j_node.is_intersection: + for turn in link.incoming_turns(): + if turn.i_node != turn.k_node and turn.penalty_func != 0: + access = True + else: + access = True + if not access: + raise Exception("No access permitted to zone %s" % centroid.id) + + @_m.logbook_trace("Set database functions (VDF, TPF and TTF)") + def set_functions(self, scenario): + create_function = _m.Modeller().tool( + "inro.emme.data.function.create_function") + set_extra_function_params = _m.Modeller().tool( + "inro.emme.traffic_assignment.set_extra_function_parameters") + emmebank = self.emmebank + for f_id in ["fd10", "fd11", "fd20", "fd21", "fd22", "fd23", "fd24", "fd25", + "fp1", "ft1", "ft2", "ft3", "ft4"]: + function = emmebank.function(f_id) + if function: + emmebank.delete_function(function) + + smartSignalf_CL = self._props["smartSignal.factor.LC"] + smartSignalf_MA = self._props["smartSignal.factor.MA"] + smartSignalf_PA = self._props["smartSignal.factor.PA"] + atdmf = self._props["atdm.factor"] + + reliability_tmplt = ( + "* (1 + el2 + {0}*(".format(atdmf)+ + "( {factor[LOS_C]} * ( put(get(1).min.1.5) - {threshold[LOS_C]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_C]})" + "+ ( {factor[LOS_D]} * ( get(2) - {threshold[LOS_D]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_D]})" + "+ ( {factor[LOS_E]} * ( get(2) - {threshold[LOS_E]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_E]})" + "+ ( {factor[LOS_FL]} * ( get(2) - {threshold[LOS_FL]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FL]})" + "+ ( {factor[LOS_FH]} * ( get(2) - {threshold[LOS_FH]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FH]})" + "))") + parameters = { + "freeway": { + "factor": { + "LOS_C": 0.2429, "LOS_D": 0.1705, "LOS_E": -0.2278, "LOS_FL": -0.1983, "LOS_FH": 1.022 + }, + "threshold": { + "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 + }, + }, + "road": { # for arterials, ramps, collectors, local roads, etc. + "factor": { + "LOS_C": 0.1561, "LOS_D": 0.0, "LOS_E": 0.0, "LOS_FL": -0.449, "LOS_FH": 0.0 + }, + "threshold": { + "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 + }, + } + } + # freeway fd10 + create_function( + "fd10", + "(ul1 * (1.0 + 0.24 * put((volau + volad) / ul3) ** 5.5))" + + reliability_tmplt.format(**parameters["freeway"]), + emmebank=emmebank) + # non-freeway link which is not an intersection approach fd11 + create_function( + "fd11", + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0))" + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd20", # Local collector and lower intersection and stop controlled approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "1.25 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))" + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd21", # Collector intersection approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "{0} * 1.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_CL) + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd22", # Major arterial and major or prime arterial intersection approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "{0} * 2.0 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_MA) + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd23", # Primary arterial intersection approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "{0} * 2.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_PA) + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd24", # Metered ramps + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "2.5/ 2 * (1-el1) ** 2 * (1.0 + 6.0 * ( (volau + volad) / el3 ) ** 2.0))" + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + # freeway fd25 (AM and PM only) + create_function( + "fd25", + "(ul1 * (1.0 + 0.6 * put((volau + volad) / ul3) ** 4))" + + reliability_tmplt.format(**parameters["freeway"]), + emmebank=emmebank) + + set_extra_function_params( + el1="@green_to_cycle", el2="@sta_reliability", el3="@capacity_inter_am", + emmebank=emmebank) + + create_function("fp1", "up1", emmebank=emmebank) # fixed cost turns stored in turn data 1 (up1) + + # buses in mixed traffic, use auto time + create_function("ft1", "ul1", emmebank=emmebank) + # fixed speed for separate guideway operations + create_function("ft2", "ul2", emmebank=emmebank) + # special 0-cost segments for prohibition of walk to different stop from centroid + create_function("ft3", "0", emmebank=emmebank) + # fixed guideway systems according to vehicle speed (not used at the moment) + create_function("ft4", "60 * length / speed", emmebank=emmebank) + + @_m.logbook_trace("Traffic zone connectivity check") + def check_connectivity(self, scenario): + modeller = _m.Modeller() + sola_assign = modeller.tool( + "inro.emme.traffic_assignment.sola_traffic_assignment") + set_extra_function_para = modeller.tool( + "inro.emme.traffic_assignment.set_extra_function_parameters") + create_matrix = _m.Modeller().tool( + "inro.emme.data.matrix.create_matrix") + net_calc = gen_utils.NetworkCalculator(scenario) + + emmebank = scenario.emmebank + zone_index = dict(enumerate(scenario.zone_numbers)) + num_processors = dem_utils.parse_num_processors("MAX-1") + + # Note matrix is also created in initialize_matrices + create_matrix("ms1", "zero", "zero", scenario=scenario, overwrite=True) + with gen_utils.temp_matrices(emmebank, "FULL", 1) as (result_matrix,): + result_matrix.name = "TEMP_AUTO_TRAVEL_TIME" + set_extra_function_para( + el1="@green_to_cycle_am", + el2="@sta_reliability_am", + el3="@capacity_inter_am", emmebank=emmebank) + net_calc("ul1", "@time_link_am", "modes=d") + net_calc("ul3", "@capacity_link_am", "modes=d") + net_calc("lanes", "@lane_am", "modes=d") + spec = { + "type": "SOLA_TRAFFIC_ASSIGNMENT", + "background_traffic": None, + "classes": [ + { + "mode": "d", + "demand": 'ms"zero"', + "generalized_cost": None, + "results": { + "od_travel_times": {"shortest_paths": result_matrix.named_id} + } + } + ], + "stopping_criteria": { + "max_iterations": 0, "best_relative_gap": 0.0, + "relative_gap": 0.0, "normalized_gap": 0.0 + }, + "performance_settings": {"number_of_processors": num_processors}, + } + sola_assign(spec, scenario=scenario) + travel_time = result_matrix.get_numpy_data(scenario) + + is_disconnected = (travel_time == 1e20) + disconnected_pairs = is_disconnected.sum() + if disconnected_pairs > 0: + error_msg = "Connectivity error(s) between %s O-D pairs" % disconnected_pairs + self._log.append({"type": "header", "content": error_msg}) + count_disconnects = [] + for axis, term in [(0, "from"), (1, "to")]: + axis_totals = is_disconnected.sum(axis=axis) + for i, v in enumerate(axis_totals): + if v > 0: + count_disconnects.append((zone_index[i], term, v)) + count_disconnects.sort(key=lambda x: x[2], reverse=True) + for z, direction, count in count_disconnects[:50]: + msg ="Zone %s disconnected %s %d other zones" % (z, direction, count) + self._log.append({"type": "text", "content": msg}) + if disconnected_pairs > 50: + self._log.append({"type": "text", "content": "[List truncated]"}) + raise Exception(error_msg) + self._log.append({"type": "header", "content": + "Zone connectivity verified for AM period on SOV toll ('S') mode"}) + scenario.has_traffic_results = False + + def log_report(self): + report = _m.PageBuilder(title="Import network from TNED files report") + try: + if self._error: + report.add_html("
      Errors detected during import: %s
      " % len(self._error)) + error_msg = ["
        "] + for error in self._error: + error_msg.append("
      • %s
      • " % error) + error_msg.append("
      ") + report.add_html("".join(error_msg)) + else: + report.add_html("

      No errors detected during import :-)") + + for item in self._log: + if item["type"] == "text": + report.add_html("
      %s
      " % item["content"]) + if item["type"] == "text2": + report.add_html("
      %s
      " % item["content"]) + elif item["type"] == "header": + report.add_html("

      %s

      " % item["content"]) + elif item["type"] == "table": + table_msg = ["
      ", "

      %s

      " % item["title"]] + if "header" in item: + table_msg.append("") + for label in item["header"]: + table_msg.append("" % label) + table_msg.append("") + for row in item["content"]: + table_msg.append("") + for cell in row: + table_msg.append("" % cell) + table_msg.append("") + table_msg.append("
      %s
      %s
      ") + report.add_html("".join(table_msg)) + + except Exception as error: + # no raise during report to avoid masking real error + report.add_html("Error generating report") + report.add_html(unicode(error)) + report.add_html(_traceback.format_exc()) + + _m.logbook_write("Import network report", report.render()) + + +def get_node(network, number, coordinates, is_centroid=False): + node = network.node(number) + if not node: + node = network.create_node(number, is_centroid) + node.x, node.y = coordinates + return node + + +# shortest path interpolation +def find_path(orig_link, dest_link, mode): + visited = set([]) + visited_add = visited.add + back_links = {} + heap = [] + + for link in orig_link.j_node.outgoing_links(): + if mode in link.modes: + back_links[link] = None + _heapq.heappush(heap, (link["length"], link)) + + link_found = False + try: + while not link_found: + link_cost, link = _heapq.heappop(heap) + if link in visited: + continue + visited_add(link) + for outgoing in link.j_node.outgoing_links(): + if mode not in outgoing.modes: + continue + if outgoing in visited: + continue + back_links[outgoing] = link + if outgoing == dest_link: + link_found = True + break + outgoing_cost = link_cost + link["length"] + _heapq.heappush(heap, (outgoing_cost, outgoing)) + except IndexError: + pass # IndexError if heap is empty + if not link_found: + raise NoPathException( + "no path found between links with trcov_id %s and %s (Emme IDs %s and %s)" % ( + orig_link["@tcov_id"], dest_link["@tcov_id"], orig_link, dest_link)) + + prev_link = back_links[dest_link] + route = [] + while prev_link: + route.append(prev_link) + prev_link = back_links[prev_link] + return list(reversed(route)) + + +class NoPathException(Exception): + pass + + +def revised_headway(headway): + # CALCULATE REVISED HEADWAY + # new headway calculation is less aggressive; also only being used for initial wait + # It uses a negative exponential formula to calculate headway + # + if headway <= 10: + rev_headway = headway + else: + rev_headway = headway * (0.275 + 0.788 * _np.exp(-0.011*headway)) + return rev_headway + + +def interchange_distance(orig_link, direction): + visited = set([]) + visited_add = visited.add + back_links = {} + heap = [] + if direction == "DOWNSTREAM": + get_links = lambda l: l.j_node.outgoing_links() + check_far_node = lambda l: l.j_node.is_interchange + elif direction == "UPSTREAM": + get_links = lambda l: l.i_node.incoming_links() + check_far_node = lambda l: l.i_node.is_interchange + # Shortest path search for nearest interchange node along freeway + for link in get_links(orig_link): + _heapq.heappush(heap, (link["length"], link)) + interchange_found = False + try: + while not interchange_found: + link_cost, link = _heapq.heappop(heap) + if link in visited: + continue + visited_add(link) + if check_far_node(link): + interchange_found = True + break + for next_link in get_links(link): + if next_link in visited: + continue + next_cost = link_cost + link["length"] + _heapq.heappush(heap, (next_cost, next_link)) + except IndexError: + # IndexError if heap is empty + # case where start / end of highway, dist = 99 + return 99 + return orig_link["length"] / 2.0 + link_cost From e81789aba052c77dc15a3595078c89a5fea91603 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Tue, 19 Dec 2023 17:09:43 -0800 Subject: [PATCH 11/43] Update headway by TOD Update @headway_op variable to relevant time-of-day --- .../toolbox/assignment/traffic_assignment.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/main/emme/toolbox/assignment/traffic_assignment.py b/src/main/emme/toolbox/assignment/traffic_assignment.py index 042c49c89..4fcfdc39d 100644 --- a/src/main/emme/toolbox/assignment/traffic_assignment.py +++ b/src/main/emme/toolbox/assignment/traffic_assignment.py @@ -569,11 +569,11 @@ def run_assignment(self, period, relative_gap, max_iterations, num_processors, s with _m.logbook_trace("Transit line headway and background traffic"): # set headway for the period - hdw = {"ea": "@headway_op", + hdw = {"ea": "@headway_ea", "am": "@headway_am", - "md": "@headway_op", + "md": "@headway_md", "pm": "@headway_pm", - "ev": "@headway_op"} + "ev": "@headway_ev"} net_calc("hdw", hdw[p], {"transit_line": "all"}) # transit vehicle as background flow with periods @@ -738,11 +738,11 @@ def run_stochastic_assignment( with _m.logbook_trace("Transit line headway and background traffic"): # set headway for the period: format is (attribute_name, period duration in hours) - hdw = {"ea": ("@headway_op", 3), + hdw = {"ea": ("@headway_ea", 3), "am": ("@headway_am", 3), - "md": ("@headway_op", 6.5), + "md": ("@headway_md", 6.5), "pm": ("@headway_pm", 3.5), - "ev": ("@headway_op", 5)} + "ev": ("@headway_ev", 5)} net_calc('ul2', '0', {'link': 'all'}) net_calc('hdw', '9999.99', {'transit_line': 'all'}) net_calc( @@ -789,11 +789,11 @@ def run_stochastic_assignment( with _m.logbook_trace("Reset transit line headways"): # set headway for the period - hdw = {"ea": "@headway_op", + hdw = {"ea": "@headway_ea", "am": "@headway_am", - "md": "@headway_op", + "md": "@headway_md", "pm": "@headway_pm", - "ev": "@headway_op"} + "ev": "@headway_ev"} net_calc("hdw", hdw[p], {"transit_line": "all"}) return From ae6342f01c366f8c4f0d8fa8f1f57347d5d624e0 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 20 Dec 2023 16:20:22 -0500 Subject: [PATCH 12/43] Fixing skipping of @headway_rev calculation in case the special fares file was missing; also centralized the attribute auto_time copying to the same in-memory network object --- .../assignment/build_transit_scenario.py | 14 ++++---------- .../emme/toolbox/import/import_network.py | 19 +++++++++---------- 2 files changed, 13 insertions(+), 20 deletions(-) diff --git a/src/main/emme/toolbox/assignment/build_transit_scenario.py b/src/main/emme/toolbox/assignment/build_transit_scenario.py index 052602fa3..82949e324 100644 --- a/src/main/emme/toolbox/assignment/build_transit_scenario.py +++ b/src/main/emme/toolbox/assignment/build_transit_scenario.py @@ -328,24 +328,18 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari # The congested auto times for mixed traffic are in "@auto_time" # (output from traffic assignment) which needs to be copied to auto_time (a.k.a. timau) # (The auto_time attribute is generated from the VDF values which include reliability factor) + ## also copying auto_time to ul1, so it does not get wiped when transit connectors are created. + src_attrs = [params["fixed_link_time"]] dst_attrs = ["data2"] if scenario.has_traffic_results and "@auto_time" in scenario.attributes("LINK"): - src_attrs.append("@auto_time") - dst_attrs.append("auto_time") + src_attrs.extend(["@auto_time", "@auto_time"]) + dst_attrs.extend(["auto_time", "data1"]) values = network.get_attribute_values("LINK", src_attrs) network.set_attribute_values("LINK", dst_attrs, values) scenario.publish_network(network) self._node_id_tracker = None - - ##copying auto_time to ul1, so it does not get wiped when transit connectors are created. - if scenario.has_traffic_results and "@auto_time" in scenario.attributes("LINK"): - copy_att(from_attribute_name='timau', - to_attribute_name='ul1', - from_scenario=scenario, - to_scenario=scenario) - return scenario @_m.logbook_trace("Add timed-transfer links", save_arguments=True) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index ae328727a..b808abeda 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -1035,20 +1035,19 @@ def lookup_line(ident): self._log.append({"type": "text", "content": "Import transit lines complete"}) def calc_transit_attributes(self, network): - # for link in network.links(): - # if link.type == 0: # walk only links have FC ==0 - # link.type = 99 - - fares_file_name = FILE_NAMES["FARES"] - special_fare_path = _join(self.source, fares_file_name) - if not os.path.isfile(special_fare_path): - return - + self._log.append({"type": "header", "content": "Calculate derived transit line attributes"}) # ON TRANSIT LINES # Set 3-period headway based on revised headway calculation for line in network.transit_lines(): for period in ["ea", "am", "md", "pm", "ev"]: line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) + self._log.append({"type": "text", "content": "Revised headway calculation complete"}) + + fares_file_name = FILE_NAMES["FARES"] + special_fare_path = _join(self.source, fares_file_name) + if not os.path.isfile(special_fare_path): + self._log.append({"type": "text", "content": "Special fares file %s not found" % fares_file_name}) + return def get_line(line_id): line = network.transit_line(line_id) @@ -1698,7 +1697,7 @@ def log_report(self): error_msg.append("
    ") report.add_html("".join(error_msg)) else: - report.add_html("

    No errors detected during import :-)") + report.add_html("

    No errors detected during import :-)") for item in self._log: if item["type"] == "text": From e1950d0b235cbe42419c262a7bb874e046eb9b5b Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 10 Jan 2024 13:33:18 -0500 Subject: [PATCH 13/43] Updating mode definitions to match use in traffic assignment --- .../emme/toolbox/import/import_network.py | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index b808abeda..cbf84cda0 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -475,18 +475,18 @@ def create_modes(self, network): mode_table = { "AUTO": [("d", "dummy auto")], "AUX_AUTO": [ - ("h", "SOV"), - ("H", "HOV2"), + ("s", "SOV"), + ("h", "HOV2"), ("i", "HOV3+"), - ("I", "TRKL"), - ("s", "TRKM"), - ("S", "TRKH"), - ("v", "SOV TOLL"), - ("V", "HOV2 TOLL"), - ("m", "HOV3+ TOLL"), - ("M", "TRKL TOLL"), - ("t", "TRKM TOLL"), - ("T", "TRKH TOLL"), + ("t", "TRKL"), + ("m", "TRKM"), + ("v", "TRKH"), + ("S", "SOV TOLL"), + ("H", "HOV2 TOLL"), + ("I", "HOV3+ TOLL"), + ("T", "TRKL TOLL"), + ("M", "TRKM TOLL"), + ("V", "TRKH TOLL"), ], "TRANSIT": [ ("b", "BUS" ), # (vehicle type 100, PCE=3.0) @@ -541,20 +541,20 @@ def create_modes(self, network): } modes_gp_lanes = { 0: set([]), - 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes - 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck - 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck - 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck - 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks - 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks - 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only + 1: set([network.mode(m_id) for m_id in "dvmtshiVMTSHI"]), # all modes + 2: set([network.mode(m_id) for m_id in "dmtshiMTSHI"]), # no heavy truck + 3: set([network.mode(m_id) for m_id in "dtshiTSHI"]), # no heavy or medium truck + 4: set([network.mode(m_id) for m_id in "dshiSHI"]), # no truck + 5: set([network.mode(m_id) for m_id in "dvV"]), # only heavy trucks + 6: set([network.mode(m_id) for m_id in "dvmVM"]), # heavy and medium trucks + 7: set([network.mode(m_id) for m_id in "dvmtVMT"]), # all trucks only (no passenger cars) } - non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) + non_toll_modes = set([network.mode(m_id) for m_id in "vmtshi"]) self._auto_mode_lookup = { "GP": modes_gp_lanes, "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), - "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), - "HOV3": set([network.mode(m_id) for m_id in "dim"]), + "HOV2": set([network.mode(m_id) for m_id in "dhiHI"]), + "HOV3": set([network.mode(m_id) for m_id in "diI"]), } def set_auto_modes(self, network, period): From d1f4d80915649d18a3599ca2c5513a0593f04634 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 10 Jan 2024 13:36:04 -0500 Subject: [PATCH 14/43] Switching trrt and trlink to use the csv files instead of tables in geodatabase --- src/main/emme/toolbox/import/import_network.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index cbf84cda0..9f2e8d9f2 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -34,11 +34,11 @@ # - TNED_HwyNodes # - TNED_RailNet # - TNED_RailNodes -# - trrt -# - trlink # - Turns # The following files are also used (in the same directory as the *.gdb) # +# trrt: header data for the transit lines +# trlink: sequence of links (routing) of transit lines # trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period @@ -149,12 +149,12 @@ def page(self):
  • TNED_HwyNodes
  • TNED_RailNet
  • TNED_RailNodes
  • -
  • trrt
  • -
  • trlink
  • Turns
  • The following files are also used (in the same directory as the *.gdb):
      +
    • trrt.csv
    • +
    • trlink.csv
    • trstop.csv
    • mode5tod.csv
    • timexfer_.csv (optional)
    • @@ -734,9 +734,11 @@ def create_transit_lines(self, network, attr_map): self._log.append({"type": "header", "content": "Import transit lines"}) fatal_errors = 0 # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare - transit_line_data = gen_utils.DataTableProc("trrt", self.source) + #transit_line_data = gen_utils.DataTableProc("trrt", self.source) + transit_line_data = gen_utils.DataTableProc("trrt", _join(_dir(self.source), "trrt.csv")) # Route_ID,Link_ID,Link_GUID,Direction - transit_link_data = gen_utils.DataTableProc("trlink", self.source) + #transit_link_data = gen_utils.DataTableProc("trlink", self.source) + transit_link_data = gen_utils.DataTableProc("trlink", _join(_dir(self.source), "trlink.csv")) # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) From aa37b970a561df1daabd7f27ada06547149acad2 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 10 Jan 2024 15:25:32 -0500 Subject: [PATCH 15/43] Adding extra check of transit line stop on next link, in case of split link (resulting in adjacent links with same ID) --- .../emme/toolbox/import/import_network.py | 44 +++++++++++-------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 9f2e8d9f2..f70cd49e2 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -37,8 +37,8 @@ # - Turns # The following files are also used (in the same directory as the *.gdb) # -# trrt: header data for the transit lines -# trlink: sequence of links (routing) of transit lines +# trrt.csv: header data for the transit lines +# trlink.csv: sequence of links (routing) of transit lines # trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period @@ -961,31 +961,39 @@ def create_transit_lines(self, network, attr_map): if "DUMMY" in stop["StopName"]: continue stop_link_id = stop['Link_GUID'] - node_id = int(stop['Node']) + stop_node_id = int(stop['Node']) while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: segment = itinerary.next() - if node_id == segment.i_node.number: + if stop_node_id == segment.i_node.number: pass - elif segment.j_node and node_id == segment.j_node.number: + elif segment.j_node and stop_node_id == segment.j_node.number: # if matches the J-node then the stop is on the next segment segment = itinerary.next() else: - if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: - msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( - line_name, stop["Route_ID"], segment, stop_link_id, node_id) + next_segment = None + if segment.j_node: + next_segment = itinerary.next() + if next_segment and next_segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id and \ + stop_node_id == next_segment.j_node.number: + # split link case, where stop is at the end of the next segment + segment = next_segment else: - msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( - line_name, stop["Route_ID"], stop_link_id, node_id) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - # reset iterator to start back from previous segment - itinerary = tline.segments(include_hidden=True) - segment = itinerary.next() - while segment.id != prev_segment.id: + if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: + msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( + line_name, stop["Route_ID"], segment, stop_link_id, stop_node_id) + else: + msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( + line_name, stop["Route_ID"], stop_link_id, stop_node_id) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + # reset iterator to start back from previous segment + itinerary = tline.segments(include_hidden=True) segment = itinerary.next() - continue + while segment.id != prev_segment.id: + segment = itinerary.next() + continue segment.allow_boardings = True segment.allow_alightings = True segment.dwell_time = min(tline.default_dwell_time, 99.99) From 1c99edec7b18dc19177f651400d4937d3a4e22bf Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Fri, 12 Jan 2024 11:09:06 -0500 Subject: [PATCH 16/43] Updates to export_data_loader_network.py for compatibility with new TNED data standards. Dropped "I" from leading column names; TOLL, CP, CX, CH 3->5 periods; transit headway 4->5 periods; added link GUID --- .../export/export_data_loader_network.py | 51 ++++++++----------- 1 file changed, 20 insertions(+), 31 deletions(-) diff --git a/src/main/emme/toolbox/export/export_data_loader_network.py b/src/main/emme/toolbox/export/export_data_loader_network.py index c8907bffc..f54fc1c39 100644 --- a/src/main/emme/toolbox/export/export_data_loader_network.py +++ b/src/main/emme/toolbox/export/export_data_loader_network.py @@ -172,6 +172,7 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, # items are ("column name", "attribute name") or ("column name", ("attribute name", default)) hwylink_attrs = [ ("ID", "@tcov_id"), + ("HWYSegGUID", "#hwyseg_guid"), ("Length", "length"), ("Dir", "is_one_way"), ("hwycov-id:1", "@tcov_id"), @@ -205,20 +206,15 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, ("FFC", "type"), ("CLASS", "zero"), ("ASPD", "@speed_adjusted"), - ("IYR", "@year_open_traffic"), - ("IPROJ", "@project_code"), - ("IJUR", "@jurisdiction_type"), - ("IFC", "type"), - ("IHOV", "@hov"), - #("ITRUCK", "@truck_restriction"), - ("ISPD", "@speed_posted"), - ("ITSPD", "zero"), - ("IWAY", "iway"), - ("IMED", "@median"), + ("YR", "@year_open_traffic"), + ("PROJ", "@project_code"), + ("FC", "type"), + ("HOV", "@hov"), + ("SPD", "@speed_posted"), + ("TSPD", "zero"), + ("WAY", "iway"), + ("MED", "@median"), ("COST", "@cost_operating"), - ("ITOLLO", "@toll_md"), - ("ITOLLA", "@toll_am"), - ("ITOLLP", "@toll_pm"), ] directional_attrs = [ ("ABLNO", "@lane_md", "0"), @@ -236,15 +232,6 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, ("ABLLB", "zero", "0"), ("ABGC", "@green_to_cycle_init", "0"), ("ABPLC", "per_lane_capacity", "1900"), - ("ABCPO", "@capacity_link_md", "999999"), - ("ABCPA", "@capacity_link_am", "999999"), - ("ABCPP", "@capacity_link_pm", "999999"), - ("ABCXO", "@capacity_inter_md", "999999"), - ("ABCXA", "@capacity_inter_am", "999999"), - ("ABCXP", "@capacity_inter_pm", "999999"), - ("ABCHO", "@capacity_hourly_op", "0"), - ("ABCHA", "@capacity_hourly_am", "0"), - ("ABCHP", "@capacity_hourly_pm", "0"), ("ABTMO", "@time_link_md", "999"), ("ABTMA", "@time_link_am", "999"), ("ABTMP", "@time_link_pm", "999"), @@ -263,13 +250,14 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, hwylink_attrs.append(("relifac", "relifac")) time_period_atts = [ - ("ITOLL2", "@toll"), - ("ITOLL3", "@cost_auto"), - ("ITOLL4", "@cost_med_truck"), - ("ITOLL5", "@cost_hvy_truck"), - ("ITOLL", "toll_hov"), + ("TOLL2", "@toll"), + ("TOLL3", "@cost_auto"), + ("TOLL4", "@cost_med_truck"), + ("TOLL5", "@cost_hvy_truck"), + ("TOLL", "toll_hov"), ("ABCP", "@capacity_link", "999999"), ("ABCX", "@capacity_inter", "999999"), + ("ABCH", "@capacity_hourly", "0"), ("ABTM", "@time_link", "999"), ("ABTX", "@time_inter", "0"), ("ABLN", "@lane", "0"), @@ -502,8 +490,9 @@ def export_transit_results(self, export_path, input_path, transit_emmebank_dict, # Note: Node analysis for transfers is VERY time consuming # this implementation will be replaced when new Emme version is available - trrt_atts = ["Route_ID","Route_Name","Mode","AM_Headway","PM_Headway","OP_Headway","Night_Headway","Night_Hours","Config","Fare"] - trstop_atts = ["Stop_ID","Route_ID","Link_ID","Pass_Count","Milepost","Longitude","Latitude","NearNode","FareZone","StopName"] + trrt_atts = ["Route_ID","Route_Name","Mode","AM_Headway","PM_Headway","Midday_Headway","Evening_Headway","EarlyAM_Headway", + "Evening_Hours", "EarlyAM_Hours", "Config","Fare"] + trstop_atts = ["Stop_ID","Route_ID","Link_ID","Link_GUID","Pass_Count","Milepost","Longitude","Latitude","StopName"] #transit route file trrt_infile = os.path.join(input_path, "trrt.csv") @@ -1017,8 +1006,8 @@ def get_xfer_link(node, timed_xfer_link, is_outgoing=True): elif node["@network_adj"] == 3: orig_node = network.node(node["@network_adj_src"]) # Remove transfer walk links and copy data to source walk link - for link in node.outgoing_links(): - if xfer_mode in link.modes and link.j_node["@network_adj"] == 3: + for link in _chain(node.incoming_links(), node.outgoing_links()): + if xfer_mode in link.modes and link.j_node["@network_adj"] == 3 and link.i_node["@network_adj"] == 3: orig_xfer_link = get_xfer_link(orig_node, link) for attr in link_result_attrs: orig_xfer_link[attr] += link[attr] From f5cae4dada3ba8a54c19cbb18c81039be5ed3142 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Wed, 17 Jan 2024 12:33:29 -0800 Subject: [PATCH 17/43] Delete import_network.py.bak Cleanup by deleting unnecessary bak file. --- .../emme/toolbox/import/import_network.py.bak | 1836 ----------------- 1 file changed, 1836 deletions(-) delete mode 100644 src/main/emme/toolbox/import/import_network.py.bak diff --git a/src/main/emme/toolbox/import/import_network.py.bak b/src/main/emme/toolbox/import/import_network.py.bak deleted file mode 100644 index a32fdd5d1..000000000 --- a/src/main/emme/toolbox/import/import_network.py.bak +++ /dev/null @@ -1,1836 +0,0 @@ -#////////////////////////////////////////////////////////////////////////////// -#//// /// -#//// Copyright INRO, 2016-2017. /// -#//// Rights to use and modify are granted to the /// -#//// San Diego Association of Governments and partner agencies. /// -#//// This copyright notice must be preserved. /// -#//// /// -#//// import/import_network.py /// -#//// /// -#//// /// -#//// /// -#//// /// -#////////////////////////////////////////////////////////////////////////////// -# -# Imports the network from the input network files. -# -# -# Inputs: -# source: path to the location of the input network geodatabase -# traffic_scenario_id: optional scenario to store the imported network from the traffic files only -# transit_scenario_id: optional scenario to store the imported network from the transit files only -# merged_scenario_id: scenario to store the combined traffic and transit data from all network files -# title: the title to use for the imported scenario -# save_data_tables: if checked, create a data table for each reference file for viewing in the Emme Desktop -# data_table_name: prefix to use to identify all data tables -# overwrite: check to overwrite any existing data tables or scenarios with the same ID or name -# emmebank: the Emme database in which to create the scenario. Default is the current open database -# create_time_periods: if True (default), also create per-time period scenarios (required to run assignments) -# -# Files referenced: -# -# *.gdb: A Geodatabase file with the network data for both highway and transit. The following tables are used -# - TNED_HwyNet -# - TNED_HwyNodes -# - TNED_RailNet -# - TNED_RailNodes -# - trrt -# - trlink -# - Turns -# The following files are also used (in the same directory as the *.gdb) -# -# trstop.csv: stop data for the transit lines -# mode5tod.csv: global (per-mode) transit cost and perception attributes -# timexfer_.csv (optional): table of timed transfer pairs of lines, by period -# special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. -# off_peak_toll_factors.csv (optional): factors to calculate the toll for EA, MD, and EV periods from the OP toll input for specified facilities -# vehicle_class_toll_factors.csv (optional): factors to adjust the toll cost by facility name and class (DA, S2, S3, TRK_L, TRK_M, TRK_H) -# -# -# Script example: -""" - import os - modeller = inro.modeller.Modeller() - main_directory = os.path.dirname(os.path.dirname(modeller.desktop.project.path)) - source_file = os.path.join(main_directory, "input", "EMMEOutputs.gdb") - title = "Base 2012 scenario" - import_network = modeller.tool("sandag.import.import_network") - import_network(source_file, merged_scenario_id=100, title=title, - data_table_name="2012_base", overwrite=True) -""" - - -TOOLBOX_ORDER = 11 - - -import inro.modeller as _m -import inro.emme.datatable as _dt -import inro.emme.network as _network -from inro.emme.core.exception import Error as _NetworkError - -from itertools import izip as _izip -from collections import defaultdict as _defaultdict, OrderedDict -from contextlib import contextmanager as _context -import fiona as _fiona - -from math import ceil as _ceiling -from copy import deepcopy as _copy -import numpy as _np -import heapq as _heapq -import pandas as pd - -import traceback as _traceback -import os - -_join = os.path.join -_dir = os.path.dirname - - -gen_utils = _m.Modeller().module("sandag.utilities.general") -dem_utils = _m.Modeller().module("sandag.utilities.demand") - -FILE_NAMES = { - "FARES": "special_fares.txt", - "TIMEXFER": "timexfer_%s.csv", - "OFF_PEAK": "off_peak_toll_factors.csv", - "VEHICLE_CLASS": "vehicle_class_toll_factors.csv", - "MODE5TOD": "MODE5TOD.csv", -} - - -class ImportNetwork(_m.Tool(), gen_utils.Snapshot): - - source = _m.Attribute(unicode) - scenario_id = _m.Attribute(int) - overwrite = _m.Attribute(bool) - title = _m.Attribute(unicode) - save_data_tables = _m.Attribute(bool) - data_table_name = _m.Attribute(unicode) - create_time_periods = _m.Attribute(bool) - - tool_run_msg = "" - - @_m.method(return_type=_m.UnicodeType) - def tool_run_msg_status(self): - return self.tool_run_msg - - def __init__(self): - self._log = [] - self._error = [] - project_dir = _dir(_m.Modeller().desktop.project.path) - self.source = _join(_dir(project_dir), "input") - self.overwrite = False - self.title = "" - self.data_table_name = "" - self.create_time_periods = True - self.attributes = [ - "source", "scenario_id", "overwrite", "title", "save_data_tables", "data_table_name", "create_time_periods" - ] - - def page(self): - if not self.data_table_name: - try: - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) - self.data_table_name = props["scenarioYear"] - except: - pass - - pb = _m.ToolPageBuilder(self) - pb.title = "Import network" - pb.description = """ -
      - Create an Emme network from TNED geodatabase (*.gdb) and associated files. -
      -
      - The following layers in the gdb are used: -
        -
      • TNED_HwyNet
      • -
      • TNED_HwyNodes
      • -
      • TNED_RailNet
      • -
      • TNED_RailNodes
      • -
      • trrt
      • -
      • trlink
      • -
      • Turns
      • -
      - The following files are also used (in the same directory as the *.gdb): -
        -
      • trstop.csv
      • -
      • mode5tod.csv
      • -
      • timexfer_.csv (optional)
      • -
      • special_fares.txt (optional)
      • -
      • off_peak_toll_factors.csv (optional)
      • -
      • vehicle_class_toll_factors.csv (optional)
      • -
      -
      - """ - pb.branding_text = "- SANDAG - Import" - - if self.tool_run_msg != "": - pb.tool_run_status(self.tool_run_msg_status) - - pb.add_select_file("source", window_type="directory", file_filter="", - title="Source gdb:",) - - pb.add_text_box("scenario_id", size=6, title="Scenario ID for imported network:") - pb.add_text_box("title", size=80, title="Scenario title:") - pb.add_checkbox("save_data_tables", title=" ", label="Save reference data tables of file data") - pb.add_text_box("data_table_name", size=80, title="Name for data tables:", - note="Prefix name to use for all saved data tables") - pb.add_checkbox("overwrite", title=" ", label="Overwrite existing scenarios and data tables") - pb.add_checkbox("create_time_periods", title=" ", label="Copy base scenario to all time periods and set modes (required for assignments)") - - return pb.render() - - def run(self): - self.tool_run_msg = "" - try: - self.emmebank = _m.Modeller().emmebank - with self.setup(): - self.execute() - run_msg = "Network import complete" - if self._error: - run_msg += " with %s non-fatal errors. See logbook for details" % len(self._error) - self.tool_run_msg = _m.PageBuilder.format_info(run_msg, escape=False) - except Exception as error: - self.tool_run_msg = _m.PageBuilder.format_exception( - error, _traceback.format_exc()) - raise - - def __call__(self, source, scenario_id, - title="", save_data_tables=False, data_table_name="", overwrite=False, - emmebank=None, create_time_periods=True): - - self.source = source - self.scenario_id = scenario_id - self.title = title - self.save_data_tables = save_data_tables - self.data_table_name = data_table_name - self.overwrite = overwrite - if not emmebank: - self.emmebank = _m.Modeller().emmebank - else: - self.emmebank = emmebank - self.create_time_periods = create_time_periods - - with self.setup(): - self.execute() - - return self.emmebank.scenario(scenario_id) - - @_context - def setup(self): - self._log = [] - self._error = [] - fatal_error = False - attributes = OrderedDict([ - ("self", str(self)), - ("source", self.source), - ("scenario_id", self.scenario_id), - ("title", self.title), - ("save_data_tables", self.save_data_tables), - ("data_table_name", self.data_table_name), - ("overwrite", self.overwrite), - ("create_time_periods", self.create_time_periods) - ]) - self._log = [{ - "content": attributes.items(), - "type": "table", "header": ["name", "value"], - "title": "Tool input values" - }] - with _m.logbook_trace("Import network", attributes=attributes) as trace: - gen_utils.log_snapshot("Import network", str(self), attributes) - load_properties = _m.Modeller().tool('sandag.utilities.properties') - self._props = load_properties(_join(_dir(_dir(self.source)), "conf", "sandag_abm.properties")) - try: - yield - except Exception as error: - self._log.append({"type": "text", "content": error}) - trace_text = _traceback.format_exc().replace("\n", "
      ") - self._log.append({"type": "text", "content": trace_text}) - self._error.append(error) - fatal_error = True - raise - finally: - self._props = None - self.log_report() - self._auto_mode_lookup = None - self._transit_mode_lookup = None - if self._error: - if fatal_error: - trace.write("Import network failed (%s errors)" % len(self._error), attributes=attributes) - else: - trace.write("Import network completed (%s non-fatal errors)" % len(self._error), attributes=attributes) - - def execute(self): - attr_map = { - "NODE": OrderedDict([ - ("HNODE", ("@hnode", "BOTH", "EXTRA", "HNODE label from TNED" )), - ("TAP", ("@tap_id", "BOTH", "EXTRA", "TAP number")), - ("PARK", ("@park", "BOTH", "EXTRA", "parking indicator" )), - ("STOPTYPE", ("@stoptype", "BOTH", "EXTRA", "stop type indicator" )), - ("ELEV", ("@elev", "BOTH", "EXTRA", "station/stop elevation in feet")), - ("interchange", ("@interchange", "DERIVED", "EXTRA", "is interchange node")), - ]), - "LINK": OrderedDict([ - ("HWYCOV0_ID",("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), - ("SPHERE", ("@sphere", "HWY_TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), - ("HWYSegGUID",("#hwyseg_guid", "TWO_WAY", "STRING", "HWYSegGUID")), - ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), - ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), - ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), - ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), - ("ASPD", ("@speed_adjusted", "HWY_TWO_WAY", "EXTRA", "Adjusted link speed (miles/hr)")), - ("YR", ("@year_open_traffic", "HWY_TWO_WAY", "EXTRA", "The year the link opened to traffic")), - ("PROJ", ("@project_code", "HWY_TWO_WAY", "EXTRA", "Project number for use with hwyproj.xls")), - ("FC", ("type", "TWO_WAY", "STANDARD", "")), - ("HOV", ("@hov", "TWO_WAY", "EXTRA", "Link operation type")), - ("MINMODE", ("@minmode", "TWO_WAY", "EXTRA", "Transit mode type")), - ("EATRUCK", ("@truck_ea", "HWY_TWO_WAY", "EXTRA", "Early AM truck restriction code ")), - ("AMTRUCK", ("@truck_am", "HWY_TWO_WAY", "EXTRA", "AM Peak truck restriction code ")), - ("MDTRUCK", ("@truck_md", "HWY_TWO_WAY", "EXTRA", "Mid-day truck restriction code ")), - ("PMTRUCK", ("@truck_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak truck restriction code ")), - ("EVTRUCK", ("@truck_ev", "HWY_TWO_WAY", "EXTRA", "Evening truck restriction code ")), - ("TOLLEA", ("@toll_ea", "HWY_TWO_WAY", "EXTRA", "Early AM toll cost (cent)")), - ("TOLLA", ("@toll_am", "HWY_TWO_WAY", "EXTRA", "AM Peak toll cost (cent)")), - ("TOLLMD", ("@toll_md", "HWY_TWO_WAY", "EXTRA", "Mid-day toll cost (cent)")), - ("TOLLP", ("@toll_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak toll cost (cent)")), - ("TOLLEV", ("@toll_ev", "HWY_TWO_WAY", "EXTRA", "Evening toll cost (cent)")), - - ("SPD", ("@speed_posted", "HWY_TWO_WAY", "EXTRA", "Posted speed limit (mph)")), - ("MED", ("@median", "TWO_WAY", "EXTRA", "Median type")), - ("AU", ("@lane_auxiliary", "HWY_ONE_WAY", "EXTRA", "Number of auxiliary lanes")), - ("CNT", ("@traffic_control", "HWY_ONE_WAY", "EXTRA", "Intersection control type")), - ("TL", ("@turn_thru", "HWY_ONE_WAY", "EXTRA", "Intersection approach through lanes")), - ("RL", ("@turn_right", "HWY_ONE_WAY", "EXTRA", "Intersection approach right-turn lanes")), - ("LL", ("@turn_left", "HWY_ONE_WAY", "EXTRA", "Intersection approach left-turn lanes")), - ("GC", ("@green_to_cycle_init", "HWY_ONE_WAY", "EXTRA", "Initial green-to-cycle ratio")), - ("WAY", ("way", "HWY_TWO_WAY", "INTERNAL", "")), - ("TRANSIT_MODES", ("transit_modes", "DERIVED", "INTERNAL", "")), - ("@cost_operating", ("@cost_operating", "DERIVED", "EXTRA", "Fuel and maintenance cost")), - ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), - ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), - - ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), - ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), - - ]), - "TRANSIT_LINE": OrderedDict([ - ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), - ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), - ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), - ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), - ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), - ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), - ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), - ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), - ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), - ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), - ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), - ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), - ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), - ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), - ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), - ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), - ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), - ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), - ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), - ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), - ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), - ]), - "TRANSIT_SEGMENT": OrderedDict([ - ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), - ("Pass_Count", ("@pass_count", "TRSTOP", "EXTRA", "Number of times this stop is passed")), - ("Milepost", ("@milepost", "TRSTOP", "EXTRA", "Distance from start of line")), - ("StopName", ("#stop_name", "TRSTOP", "STRING", "Name of stop")), - ("@coaster_fare_board", ("@coaster_fare_board", "DERIVED", "EXTRA", "Boarding fare for coaster")), - ("@coaster_fare_inveh", ("@coaster_fare_inveh", "DERIVED", "EXTRA", "Incremental fare for Coaster")), - ]) - } - - time_name = { - "_ea": "Early AM ", "_am": "AM Peak ", "_md": "Mid-day ", "_pm": "PM Peak ", "_ev": "Evening " - } - time_name_dst = ["_ea", "_am", "_md", "_pm", "_ev"] - time_name_src = ["EA", "A", "MD", "P", "EV"] - time_period_attrs = [ - ("CP", "@capacity_link", "mid-link capacity"), - ("CX", "@capacity_inter", "approach capacity"), - ("CH", "@capacity_hourly", "hourly mid-link capacity"), - ("LN", "@lane", "number of lanes"), - ("TM", "@time_link", "link time in minutes"), - ("TX", "@time_inter", "intersection delay time"), - ] - for src_attr, dst_attr, desc_tmplt in time_period_attrs: - for time_s, time_d in zip(time_name_src, time_name_dst): - attr_map["LINK"][src_attr + time_s] = \ - (dst_attr + time_d, "HWY_ONE_WAY", "EXTRA", time_name[time_d] + desc_tmplt) - derived_period_attrs = [ - ("@cost_auto", "toll + cost autos"), - ("@cost_hov2", "toll (non-mngd) + cost HOV2"), - ("@cost_hov3", "toll (non-mngd) + cost HOV3+"), - ("@cost_lgt_truck", "toll + cost light trucks"), - ("@cost_med_truck", "toll + cost medium trucks"), - ("@cost_hvy_truck", "toll + cost heavy trucks"), - ("@cycle", "cycle length (minutes)"), - ("@green_to_cycle", "green to cycle ratio"), - ("@sta_reliability", "static reliability") - ] - for attr, desc_tmplt in derived_period_attrs: - for time in time_name_dst: - attr_map["LINK"][attr + time] = \ - (attr + time, "DERIVED", "EXTRA", time_name[time] + desc_tmplt) - - create_scenario = _m.Modeller().tool( - "inro.emme.data.scenario.create_scenario") - - title = self.title - if not title: - existing_scenario = self.emmebank.scenario(self.scenario_id) - if existing_scenario: - title = existing_scenario.title - - scenario = create_scenario(self.scenario_id, title, overwrite=self.overwrite, emmebank=self.emmebank) - scenarios = [scenario] - if self.create_time_periods: - periods=["EA", "AM", "MD", "PM", "EV"] - period_ids = list(enumerate(periods, start=int(self.scenario_id) + 1)) - for ident, period in period_ids: - scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), - overwrite=self.overwrite, emmebank=self.emmebank)) - # create attributes in scenario - for elem_type, mapping in attr_map.iteritems(): - for name, _tcoved_type, emme_type, desc in mapping.values(): - if emme_type == "EXTRA": - for s in scenarios: - if not s.extra_attribute(name): - xatt = s.create_extra_attribute(elem_type, name) - xatt.description = desc - elif emme_type == "STRING": - for s in scenarios: - if not s.network_field(elem_type, name): - s.create_network_field(elem_type, name, 'STRING', description=desc) - - log_content = [] - for k, v in mapping.iteritems(): - if v[3] == "DERIVED": - k = "--" - log_content.append([k] + list(v)) - self._log.append({ - "content": log_content, - "type": "table", - "header": ["TNED", "Emme", "Source", "Type", "Description"], - "title": "Network %s attributes" % elem_type.lower().replace("_", " "), - "disclosure": True - }) - - network = _network.Network() - for elem_type, mapping in attr_map.iteritems(): - for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): - if emme_type == "STANDARD": - continue - default = "" if emme_type == "STRING" else 0 - network.create_attribute(elem_type, attr, default) - try: - self.create_modes(network) - self.create_road_base(network, attr_map) - self.create_turns(network) - self.calc_traffic_attributes(network) - self.check_zone_access(network, network.mode("d")) - self.create_rail_base(network, attr_map) - self.create_transit_lines(network, attr_map) - self.calc_transit_attributes(network) - finally: - # TAP connectors included in network, fix type setting and renumber node IDs - for link in network.links(): - if link.type <= 0: - link.type = 99 - self.renumber_base_nodes(network) - scenario.publish_network(network, resolve_attributes=True) - - self.set_functions(scenario) - self.check_connectivity(scenario) - - if "modify_network.py" in os.listdir(os.getcwd()): - try: - with _m.logbook_trace("Modify network script"): - import modify_network - reload(modify_network) - modify_network.run(base_scenario) - except ImportError as e: - pass - network = base_scenario.get_network() - network.create_attribute("LINK", "transit_modes") - - if self.create_time_periods: - for link in network.links(): - link.transit_modes = link.modes - for ident, period in period_ids: - self.set_auto_modes(network, period) - scenario = self.emmebank.scenario(ident) - scenario.publish_network(network, resolve_attributes=True) - - def create_modes(self, network): - # combined traffic and transit mode creation - mode_table = { - "AUTO": [("d", "dummy auto")], - "AUX_AUTO": [ - ("h", "SOV"), - ("H", "HOV2"), - ("i", "HOV3+"), - ("I", "TRKL"), - ("s", "TRKM"), - ("S", "TRKH"), - ("v", "SOV TOLL"), - ("V", "HOV2 TOLL"), - ("m", "HOV3+ TOLL"), - ("M", "TRKL TOLL"), - ("t", "TRKM TOLL"), - ("T", "TRKH TOLL"), - ], - "TRANSIT": [ - ("b", "BUS" ), # (vehicle type 100, PCE=3.0) - ("e", "EXP BUS"), # (vehicle type 90 , PCE=3.0) - ("p", "LTDEXP BUS"), # (vehicle type 80 , PCE=3.0) - ("l", "LRT"), # (vehicle type 50) - ("y", "BRT YEL"), # (vehicle type 60 , PCE=3.0) - ("r", "BRT RED"), # (vehicle type 70 , PCE=3.0) - ("c", "CMR"), # (vehicle type 40) - ("o", "TIER1"), # (vehicle type 45) - ], - "AUX_TRANSIT": [ - ("a", "ACCESS", 3), - ("x", "TRANSFER", 3), - ("w", "WALK", 3), - ("u", "ACCESS_WLK", 3), - ("k", "EGRESS_WLK", 3), - ("f", "ACCESS_PNR", 25), - ("g", "EGRESS_PNR", 25), - ("q", "ACCESS_KNR", 25), - ("j", "EGRESS_KNR", 25), - ("Q", "ACCESS_TNC", 25), - ("J", "EGRESS_TNC", 25), - ], - } - for mode_type, modes in mode_table.iteritems(): - for mode_info in modes: - mode = network.create_mode(mode_type, mode_info[0]) - mode.description = mode_info[1] - if len(mode_info) == 3: - mode.speed = mode_info[2] - self._transit_mode_lookup = { - 0: set([]), - 1: set([network.mode(m_id) for m_id in "x"]), # 1 = special transfer walk links between certain nearby stops - 2: set([network.mode(m_id) for m_id in "w"]), # 2 = walk links in the downtown area - 3: set([network.mode(m_id) for m_id in "a"]), # 3 = the special TAP connectors - 400: set([network.mode(m_id) for m_id in "c"]), # 4 = Coaster Rail Line - 500: set([network.mode(m_id) for m_id in "l"]), # 5 = Trolley & Light Rail Transit (LRT) - 600: set([network.mode(m_id) for m_id in "bpeyr"]), # 6 = Yellow Car Bus Rapid Transit (BRT) - 700: set([network.mode(m_id) for m_id in "bpeyr"]), # 7 = Red Car Bus Rapid Transit (BRT) - 800: set([network.mode(m_id) for m_id in "bpe"]), # 8 = Limited Express Bus - 900: set([network.mode(m_id) for m_id in "bpe"]), # 9 = Express Bus - 1000: set([network.mode(m_id) for m_id in "bpe"]), # 10 = Local Bus - 11: set([network.mode(m_id) for m_id in "u"]), # = access walk links - 12: set([network.mode(m_id) for m_id in "k"]), # = egress walk links - 13: set([network.mode(m_id) for m_id in "f"]), # = access PNR links - 14: set([network.mode(m_id) for m_id in "g"]), # = egress PNR links - 15: set([network.mode(m_id) for m_id in "q"]), # = access KNR links - 16: set([network.mode(m_id) for m_id in "j"]), # = egress KNR links - 17: set([network.mode(m_id) for m_id in "Q"]), # = access TNC links - 18: set([network.mode(m_id) for m_id in "J"]), # = egress TNC links - } - modes_gp_lanes = { - 0: set([]), - 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes - 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck - 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck - 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck - 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks - 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks - 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only - } - non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) - self._auto_mode_lookup = { - "GP": modes_gp_lanes, - "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), - "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), - "HOV3": set([network.mode(m_id) for m_id in "dim"]), - } - - def set_auto_modes(self, network, period): - # time periods - # need to update the modes from the XTRUCK for their time of day - # Note: only truck types 1, 3, 4, and 7 found in 2012 base network - truck = "@truck_%s" % period.lower() - toll = "@toll_%s" % period.lower() - lookup = self._auto_mode_lookup - for link in network.links(): - auto_modes = set([]) - if link.type == 10: # connector - auto_modes = lookup["GP"][link[truck]] - elif link.type in [11, 12]: - pass # no auto modes, rail only (11) or bus only (12) - elif link["@hov"] == 1: - auto_modes = lookup["GP"][link[truck]] - elif link["@hov"] in [2, 3]: - # managed lanes, free for HOV2 and HOV3+, tolls for SOV - if link[toll] > 0: - auto_modes = lookup["TOLL"][link[truck]] - # special case of I-15 managed lanes base year and 2020, no build - elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: - auto_modes = lookup["TOLL"][link[truck]] - elif link.type == 8 or link.type == 9: - auto_modes = lookup["TOLL"][link[truck]] - if link["@hov"] == 2: - auto_modes = auto_modes | lookup["HOV2"] - else: - auto_modes = auto_modes | lookup["HOV3"] - elif link["@hov"] == 4: - auto_modes = lookup["TOLL"][link[truck]] - link.modes = link.transit_modes | auto_modes - - def create_road_base(self, network, attr_map): - self._log.append({"type": "header", "content": "Import roadway base network from TNED_HwyNet %s" % self.source}) - hwy_data = gen_utils.DataTableProc("TNED_HwyNet", self.source) - # TEMP workaround: BN field is string - bn_index = hwy_data._attr_names.index("BN") - hwy_data._values[bn_index] = hwy_data._values[bn_index].astype(int) - - if self.save_data_tables: - hwy_data.save("%s_TNED_HwyNet" % self.data_table_name, self.overwrite) - - is_centroid = lambda arc, node : (arc["FC"] == 10) and (node == "AN") - link_attr_map = {} - for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): - if tcoved_type in ("TWO_WAY", "HWY_TWO_WAY", "ONE_WAY", "HWY_ONE_WAY"): - link_attr_map[field] = (name, tcoved_type.replace("HWY_", ""), emme_type, desc) - - def define_modes(arc): - if arc["FC"] in [11, 12] or arc["ABLNA"] == 0: #or ((arc["HOV"] < 1 or arc["HOV"] > 4) and arc["FC"] != 10): - vehicle_index = int(arc["MINMODE"] / 100)*100 - aux_index = int(arc["MINMODE"] % 100) - return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] - return [network.mode('d')] - - self._create_base_net( - hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map) - - hwy_node_data = gen_utils.DataTableProc("TNED_HwyNodes", self.source) - node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() - if v[1] in ("BOTH", "HWY")] - for record in hwy_node_data: - node = network.node(record["HNODE"]) - if node: - for src, dst in node_attrs: - node[dst] = record[src] - else: - self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) - self._log.append({"type": "text", "content": "Import traffic base network complete"}) - - def create_rail_base(self, network, attr_map): - self._log.append({"type": "header", "content": "Import rail base network from TNED_RailNet %s" % self.source}) - transit_data = gen_utils.DataTableProc("TNED_RailNet", self.source) - - if self.save_data_tables: - transit_data.save("%s_TNED_RailNet" % self.data_table_name, self.overwrite) - - link_attr_map = {} - for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): - if tcoved_type in ("TWO_WAY", "RAIL_TWO_WAY", "ONE_WAY", "RAIL_ONE_WAY"): - link_attr_map[field] = (name, tcoved_type.replace("RAIL_", ""), emme_type, desc) - - tier1_modes = set([network.mode(m_id) for m_id in "o"]) - tier1_rail_link_name = self._props["transit.newMode"] - - def define_modes(arc): - if arc["NM"] == tier1_rail_link_name: - return tier1_modes - vehicle_index = int(arc["MINMODE"] / 100)*100 - aux_index = int(arc["MINMODE"] % 100) - return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] - - self._create_base_net( - transit_data, network, mode_callback=define_modes, link_attr_map=link_attr_map) - - transit_node_data = gen_utils.DataTableProc("TNED_RailNodes", self.source) - # Load PARK, elevation, stop type data onto transit nodes - node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() - if v[1] in ("BOTH", "RAIL")] - for record in transit_node_data: - node = network.node(record["HNODE"]) - if node: - for src, dst in node_attrs: - node[dst] = record[src] - else: - self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) - - self._log.append({"type": "text", "content": "Import transit base network complete"}) - - def _create_base_net(self, data, network, link_attr_map, mode_callback, centroid_callback=None): - forward_attr_map = {} - reverse_attr_map = {} - arc_id_name = "HWYCOV0_ID" - arc_guid_name = "HWYSegGUID" - for field, (name, tcoved_type, emme_type, desc) in link_attr_map.iteritems(): - if field in [arc_id_name, arc_guid_name, "DIR"]: - # these attributes are special cases for reverse link - forward_attr_map[field] = name - elif tcoved_type in "TWO_WAY": - forward_attr_map[field] = name - reverse_attr_map[field] = name - elif tcoved_type in "ONE_WAY": - forward_attr_map["AB" + field] = name - reverse_attr_map["BA" + field] = name - - emme_id_name = forward_attr_map[arc_id_name] - emme_guid_name = forward_attr_map[arc_guid_name] - dir_name = forward_attr_map["DIR"] - reverse_dir_map = {1: 3, 3: 1, 2: 4, 4: 2, 0: 0} - new_node_id = max(data.values("AN").max(), data.values("BN").max()) + 1 - - if centroid_callback is None: - centroid_callback = lambda a,n: False - - # Create nodes and links - for arc in data: - if float(arc["AN"]) == 0 or float(arc["BN"]) == 0: - self._log.append({"type": "text", - "content": "Node ID 0 in AN (%s) or BN (%s) for link GUID/ID %s/%s." % - (arc["AN"], arc["BN"], arc[arc_guid_name], arc[arc_id_name])}) - continue - coordinates = arc["geo_coordinates"] - i_node = get_node(network, arc['AN'], coordinates[0], centroid_callback(arc, "AN")) - j_node = get_node(network, arc['BN'], coordinates[-1], centroid_callback(arc, "BN")) - link = network.link(i_node, j_node) - if link: - msg = "Duplicate link between AN %s and BN %s. Link GUID/IDs %s/%s and %s/%s." % \ - (arc["AN"], arc["BN"], link[emme_guid_name], link[emme_id_name], arc[arc_guid_name], arc[arc_id_name]) - self._log.append({"type": "text", "content": msg}) - if link[emme_guid_name] == arc[arc_guid_name]: - self._log.append({"type": "text", "content": "... but GUIDs match (not an error)"}) - else: - self._error.append(msg) - else: - modes = mode_callback(arc) - link = network.create_link(i_node, j_node, modes) - link.length = arc["LENGTH"] - if len(coordinates) > 2: - link.vertices = coordinates[1:-1] - for field, attr in forward_attr_map.iteritems(): - link[attr] = arc[field] - if arc["WAY"] == 2 or arc["WAY"] == 0: - reverse_link = network.link(j_node, i_node) - if not reverse_link: - reverse_link = network.create_link(j_node, i_node, modes) - reverse_link.length = link.length - reverse_link.vertices = list(reversed(link.vertices)) - for field, attr in reverse_attr_map.iteritems(): - reverse_link[attr] = arc[field] - reverse_link[emme_id_name] = -1*arc[arc_id_name] - reverse_link[emme_guid_name] = "-" + arc[arc_guid_name] - reverse_link[dir_name] = reverse_dir_map[arc["DIR"]] - - def create_transit_lines(self, network, attr_map): - self._log.append({"type": "header", "content": "Import transit lines"}) - fatal_errors = 0 - # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare - transit_line_data = gen_utils.DataTableProc("trrt", self.source) - # Route_ID,Link_ID,Link_GUID,Direction - transit_link_data = gen_utils.DataTableProc("trlink", self.source) - # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName - #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) - transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) - # From_line,To_line,Board_stop,Wait_time - # Note: Board_stop is not used - # Timed xfer data - periods = ['EA', 'AM', 'MD', 'PM', 'EV'] - timed_xfer_data = {} - for period in periods: - file_path = _join(_dir(self.source), FILE_NAMES["TIMEXFER"] % period) - if os.path.exists(file_path): - timed_xfer_data[period] = gen_utils.DataTableProc("timexfer_"+period, file_path) - else: - timed_xfer_data[period] = [] - - mode_properties = gen_utils.DataTableProc("MODE5TOD", _join(_dir(self.source), FILE_NAMES["MODE5TOD"]), convert_numeric=True) - mode_details = {} - for record in mode_properties: - mode_details[int(record["MODE_ID"])] = record - - if self.save_data_tables: - transit_link_data.save("%s_trlink" % self.data_table_name, self.overwrite) - transit_line_data.save("%s_trrt" % self.data_table_name, self.overwrite) - transit_stop_data.save("%s_trstop" % self.data_table_name, self.overwrite) - mode_properties.save("%s_MODE5TOD" % self.data_table_name, self.overwrite) - - coaster = network.create_transit_vehicle(40, 'c') # 4 coaster - trolley = network.create_transit_vehicle(50, 'l') # 5 sprinter/trolley - brt_yellow = network.create_transit_vehicle(60, 'y') # 6 BRT yellow line (future line) - brt_red = network.create_transit_vehicle(70, 'r') # 7 BRT red line (future line) - premium_bus = network.create_transit_vehicle(80, 'p') # 8 prem express - express_bus = network.create_transit_vehicle(90, 'e') # 9 regular express - local_bus = network.create_transit_vehicle(100, 'b') # 10 local bus - tier1 = network.create_transit_vehicle(45, 'o') # 11 Tier 1 - - brt_yellow.auto_equivalent = 3.0 - brt_red.auto_equivalent = 3.0 - premium_bus.auto_equivalent = 3.0 - express_bus.auto_equivalent = 3.0 - local_bus.auto_equivalent = 3.0 - - # Capacities - for reference / post-assignment analysis - tier1.seated_capacity, tier1.total_capacity = 7 * 142, 7 * 276 - trolley.seated_capacity, trolley.total_capacity = 4 * 64, 4 * 200 - brt_yellow.seated_capacity, brt_yellow.total_capacity = 32, 70 - brt_red.seated_capacity, brt_red.total_capacity = 32, 70 - premium_bus.seated_capacity, premium_bus.total_capacity = 32, 70 - express_bus.seated_capacity, express_bus.total_capacity = 32, 70 - local_bus.seated_capacity, local_bus.total_capacity = 32, 70 - - trrt_attrs = [] - mode5tod_attrs = [] - for elem_type in "TRANSIT_LINE", "TRANSIT_SEGMENT": - mapping = attr_map[elem_type] - for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): - if tcoved_type == "TRRT": - trrt_attrs.append((field, attr)) - elif tcoved_type == "MODE5TOD": - mode5tod_attrs.append((field, attr)) - network.create_attribute("TRANSIT_SEGMENT", "milepost") - - # Pre-process transit line (trrt) to know the route names for errors / warnings - transit_line_records = list(transit_line_data) - line_names = {} - for record in transit_line_records: - line_names[int(record["Route_ID"])] = str(record["Route_Name"]) - - links = dict((link["#hwyseg_guid"], link) for link in network.links()) - transit_routes = _defaultdict(lambda: []) - for record in transit_link_data: - line_ref = line_names.get(int(record["Route_ID"]), record["Route_ID"]) - link_id = record["Link_GUID"] - if "-" in record["Direction"]: - link_id = "-" + link_id - link = links.get(link_id) - if not link: - if "-" in record["Direction"]: - reverse_link = links.get("-" + link_id) - else: - reverse_link = links.get(link_id[1:]) - if reverse_link: - link = network.create_link(reverse_link.j_node, reverse_link.i_node, reverse_link.modes) - link.vertices = list(reversed(reverse_link.vertices)) - for attr in network.attributes("LINK"): - if attr not in set(["vertices"]): - link[attr] = reverse_link[attr] - link["@tcov_id"] = -1 * reverse_link["@tcov_id"] - link["#hwyseg_guid"] = link_id - links[link_id] = link - msg = "Transit line %s : Missing reverse link with ID %s (%s) (reverse link created)" % ( - line_ref, record["Link_GUID"], link) - self._log.append({"type": "text", "content": msg}) - self._error.append("Transit route import: " + msg) - link = reverse_link - if not link: - msg = "Transit line %s : No link with GUID %s, routing may not be correct" % ( - line_ref, record["Link_GUID"]) - self._log.append({"type": "text", "content": msg}) - self._error.append("Transit route import: " + msg) - fatal_errors += 1 - continue - - transit_routes[int(record["Route_ID"])].append(link) - - # lookup list of special tier 1 mode route names - tier1_rail_route_names = [str(n) for n in self._props["transit.newMode.route"]] - dummy_links = set([]) - transit_lines = {} - auto_mode = network.mode("d") - for record in transit_line_records: - try: - route = transit_routes[int(record["Route_ID"])] - # Find if name matches one of the names listed in transit.newMode.route and convert to tier 1 rail - is_tier1_rail = False - for name in tier1_rail_route_names: - if str(record["Route_Name"]).startswith(name): - is_tier1_rail = True - break - if is_tier1_rail: - vehicle_type = 45 - mode = network.transit_vehicle(vehicle_type).mode - else: - vehicle_type = int(record["Mode"]) * 10 - mode = network.transit_vehicle(vehicle_type).mode - prev_link = route[0] - itinerary = [prev_link] - for link in route[1:]: - if prev_link.j_node != link.i_node: # filling in the missing gap - msg = "Transit line %s (index %s): Links not adjacent, shortest path interpolation used (%s and %s)" % ( - record["Route_Name"], record["Route_ID"], prev_link["#hwyseg_guid"], link["#hwyseg_guid"]) - log_record = {"type": "text", "content": msg} - self._log.append(log_record) - sub_path = find_path(prev_link, link, mode) - itinerary.extend(sub_path) - log_record["content"] = log_record["content"] + " through %s links" % (len(sub_path)) - itinerary.append(link) - prev_link = link - - node_itinerary = [itinerary[0].i_node] + [l.j_node for l in itinerary] - missing_mode = 0 - for link in itinerary: - if mode not in link.modes: - link.modes |= set([mode]) - missing_mode += 1 - if missing_mode: - msg = "Transit line %s (index %s): missing mode added to %s link(s)" % ( - str(record["Route_Name"]), record["Route_ID"], missing_mode) - self._log.append({"type": "text", "content": msg}) - tline = network.create_transit_line( - str(record["Route_Name"]), vehicle_type, node_itinerary) - - for field, attr in trrt_attrs: - tline[attr] = float(record[field]) - if is_tier1_rail: - line_details = mode_details[11] - else: - line_details = mode_details[int(record["Mode"])] - for field, attr in mode5tod_attrs: - tline[attr] = float(line_details[field]) - #"XFERPENTM": "Transfer penalty time: " - #"WTXFERTM": "Transfer perception:" - # NOTE: an additional transfer penalty perception factor of 5.0 is included - # in assignment - tline["@transfer_penalty"] = float(line_details["XFERPENTM"]) * float(line_details["WTXFERTM"]) - tline.headway = tline["@headway_am"] if tline["@headway_am"] > 0 else 999 - tline.layover_time = 5 - - transit_lines[int(record["Route_ID"])] = tline - milepost = 0 - for segment in tline.segments(): - segment.milepost = milepost - milepost += segment.link.length - segment.allow_boardings = False - segment.allow_alightings = False - if auto_mode in segment.link.modes: - # segments on links with auto mode are ft1 = timau - segment.transit_time_func = 1 - else: - # ft2 = ul2 -> copied @trtime (fixed speed) - segment.transit_time_func = 2 - except Exception as error: - msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) - self._log.append({"type": "text", "content": msg}) - trace_text = _traceback.format_exc().replace("\n", "
      ") - self._log.append({"type": "text", "content": trace_text}) - self._error.append("Transit route import: line %s not created" % record["Route_Name"]) - fatal_errors += 1 - for link in dummy_links: - network.delete_link(link.i_node, link.j_node) - - line_stops = _defaultdict(lambda: []) - for record in transit_stop_data: - try: - line_name = line_names[int(record["Route_ID"])] - line_stops[line_name].append(record) - except KeyError: - self._log.append( - {"type": "text", - "content": "Stop %s: could not find transit line by ID %s (link GUID %s)" % ( - record["Stop_ID"], record["Route_ID"], record["Link_GUID"])}) - for stops in line_stops.itervalues(): - stops.sort(key=lambda stop: float(stop["Milepost"])) - - seg_float_attr_map = [] - seg_string_attr_map = [] - for field, (attr, t_type, e_type, desc) in attr_map["TRANSIT_SEGMENT"].iteritems(): - if t_type == "TRSTOP": - if e_type == "STRING": - seg_string_attr_map.append([field, attr]) - else: - seg_float_attr_map.append([field, attr]) - - for line_name, stops in line_stops.iteritems(): - tline = network.transit_line(line_name) - if not tline: - continue - itinerary = tline.segments(include_hidden=True) - segment = prev_segment = itinerary.next() - for stop in stops: - if "DUMMY" in stop["StopName"]: - continue - stop_link_id = stop['Link_GUID'] - node_id = int(stop['Node']) - while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: - segment = itinerary.next() - - if node_id == segment.i_node.number: - pass - elif segment.j_node and node_id == segment.j_node.number: - # if matches the J-node then the stop is on the next segment - segment = itinerary.next() - else: - if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: - msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( - line_name, stop["Route_ID"], segment, stop_link_id, node_id) - else: - msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( - line_name, stop["Route_ID"], stop_link_id, node_id) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - # reset iterator to start back from previous segment - itinerary = tline.segments(include_hidden=True) - segment = itinerary.next() - while segment.id != prev_segment.id: - segment = itinerary.next() - continue - segment.allow_boardings = True - segment.allow_alightings = True - segment.dwell_time = min(tline.default_dwell_time, 99.99) - for field, attr in seg_string_attr_map: - segment[attr] = stop[field] - for field, attr in seg_float_attr_map: - segment[attr] = float(stop[field]) - prev_segment = segment - - def lookup_line(ident): - line = network.transit_line(ident) - if line: - return line.id - line = transit_lines.get(int(ident)) - if line: - return line.id - raise Exception("'%s' is not a route name or route ID" % ident) - - # Normalizing the case of the headers as different examples have been seen - for period, data in timed_xfer_data.iteritems(): - norm_data = [] - for record in data: - norm_record = {} - for key, val in record.iteritems(): - norm_record[key.lower()] = val - norm_data.append(norm_record) - - from_line, to_line, wait_time = [], [], [] - for i, record in enumerate(norm_data, start=2): - try: - from_line.append(lookup_line(record["from_line"])) - to_line.append(lookup_line(record["to_line"])) - wait_time.append(float(record["wait_time"])) - except Exception as error: - msg = "Error processing timexfer_%s.csv on file line %s: %s" % (period, i, error) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - - timed_xfer = _dt.Data() - timed_xfer.add_attribute(_dt.Attribute("from_line", _np.array(from_line).astype("O"))) - timed_xfer.add_attribute(_dt.Attribute("to_line", _np.array(to_line).astype("O"))) - timed_xfer.add_attribute(_dt.Attribute("wait_time", _np.array(wait_time))) - # Creates and saves the new table - gen_utils.DataTableProc("%s_timed_xfer_%s" % (self.data_table_name, period), data=timed_xfer) - - if fatal_errors > 0: - raise Exception("Import of transit lines: %s fatal errors found" % fatal_errors) - self._log.append({"type": "text", "content": "Import transit lines complete"}) - - def calc_transit_attributes(self, network): - # for link in network.links(): - # if link.type == 0: # walk only links have FC ==0 - # link.type = 99 - - fares_file_name = FILE_NAMES["FARES"] - special_fare_path = _join(self.source, fares_file_name) - if not os.path.isfile(special_fare_path): - return - - # ON TRANSIT LINES - # Set 3-period headway based on revised headway calculation - for line in network.transit_lines(): - for period in ["ea", "am", "md", "pm", "ev"]: - line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) - - def get_line(line_id): - line = network.transit_line(line_id) - if line is None: - raise Exception("%s: line does not exist: %s" % (fares_file_name, line_id)) - return line - - # Special incremental boarding and in-vehicle fares - # to recreate the coaster zone fares - self._log.append({"type": "header", "content": "Apply special_fares to transit lines"}) - with open(special_fare_path) as fare_file: - self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) - special_fares = None - yaml_installed = True - try: - import yaml - special_fares = yaml.load(fare_file) - self._log.append({"type": "text", "content": yaml.dump(special_fares).replace("\n", "
      ")}) - except ImportError: - yaml_installed = False - except: - pass - if special_fares is None: - try: - import json - special_fares = json.load(fare_file) - self._log.append({"type": "text", "content": json.dumps(special_fares, indent=4).replace("\n", "
      ")}) - except: - pass - if special_fares is None: - msg = "YAML or JSON" if yaml_installed else "JSON (YAML parser not installed)" - raise Exception(fares_file_name + ": file could not be parsed as " + msg) - - - for record in special_fares["boarding_cost"]["base"]: - line = get_line(record["line"]) - line["@fare"] = 0 - for seg in line.segments(): - seg["@coaster_fare_board"] = record["cost"] - for record in special_fares["boarding_cost"].get("stop_increment", []): - line = get_line(record["line"]) - for seg in line.segments(True): - if record["stop"] in seg["#stop_name"]: - seg["@coaster_fare_board"] += record["cost"] - break - for record in special_fares["in_vehicle_cost"]: - line = get_line(record["line"]) - for seg in line.segments(True): - if record["from"] in seg["#stop_name"]: - seg["@coaster_fare_inveh"] = record["cost"] - break - pass_cost_keys = ['day_pass', 'regional_pass'] - pass_costs = [] - for key in pass_cost_keys: - cost = special_fares.get(key) - if cost is None: - raise Exception("key '%s' missing from %s" % (key, fares_file_name)) - pass_costs.append(cost) - pass_values = _dt.Data() - pass_values.add_attribute(_dt.Attribute("pass_type", _np.array(pass_cost_keys).astype("O"))) - pass_values.add_attribute(_dt.Attribute("cost", _np.array(pass_costs).astype("f8"))) - gen_utils.DataTableProc("%s_transit_passes" % self.data_table_name, data=pass_values) - self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) - - def renumber_base_nodes(self, network): - tracker = gen_utils.AvailableNodeIDTracker(network) - nodes = [n for n in network.nodes() if n.number > 999999] - nodes = sorted(nodes, key=lambda x: x.number, reverse=True) - if nodes: - self._log.append({"type": "text", "content": "Renumbered %s nodes" % len(nodes)}) - for n in nodes: - old_number = n.number - n.number = tracker.get_id() - self._log.append({"type": "text", "content": " - renumbered %s to %s " % (old_number, n.number)}) - - def create_turns(self, network): - self._log.append({"type": "header", "content": "Import turns and turn restrictions"}) - self._log.append({"type": "text", "content": "Process turns for turn prohibited by ID"}) - turn_data = gen_utils.DataTableProc("Turns", self.source) - if self.save_data_tables: - turn_data.save("%s_turns" % self.data_table_name, self.overwrite) - # Process turns.csv for prohibited turns penalty - for i, record in enumerate(turn_data): - from_node_id, to_node_id, at_node_id = record["FromNode"], record["ToNode"], record["MidNode"] - at_node = network.node(at_node_id) - if at_node and not at_node.is_intersection: - try: - network.create_intersection(at_node) - except Exception as error: - text = ("record %s turn from %s, at %s, to %s: cannot create intersection" % - (i, from_node_id, at_node_id, to_node_id)) - self._log.append({"type": "text", "content": text}) - trace_text = _traceback.format_exc().replace("\n", "
      ") - self._log.append({"type": "text", "content": trace_text}) - self._error.append(text) - continue - turn = network.turn(from_node_id, at_node_id, to_node_id) - if at_node is None: - text = ("record %s turn from %s, at %s, to %s: at node does not exist" % - (i, from_node_id, at_node_id, to_node_id)) - self._log.append({"type": "text", "content": text}) - self._error.append(text) - elif turn is None: - text = ("record %s turn from %s, at %s, to %s: does not form a turn" % - (i, from_node_id, at_node_id, to_node_id)) - self._log.append({"type": "text", "content": text}) - self._error.append(text) - else: - turn.penalty_func = 0 # prohibit turn - # NOTE: could support penalty value - # turn.penalty_func = 1 - # turn.data1 = float(record["penalty"]) - self._log.append({"type": "text", "content": "Import turns and turn prohibitions complete"}) - - def calc_traffic_attributes(self, network): - self._log.append({"type": "header", "content": "Calculate derived traffic attributes"}) - # "COST": "@cost_operating" - # "ITOLL": "@toll_flag" # ITOLL - Toll + 100 *[0,1] if managed lane (I-15 tolls) - # Note: toll_flag is no longer used - # "ITOLL2": "@toll" # ITOLL2 - Toll - # "ITOLL3": "@cost_auto" # ITOLL3 - Toll + AOC - # "@cost_hov" - # "ITOLL4": "@cost_med_truck" # ITOLL4 - Toll * 1.03 + AOC - # "ITOLL5": "@cost_hvy_truck" # ITOLL5 - Toll * 2.33 + AOC - fatal_errors = 0 - try: - aoc = float(self._props["aoc.fuel"]) + float(self._props["aoc.maintenance"]) - except ValueError: - raise Exception("Error during float conversion for aoc.fuel or aoc.maintenance from sandag_abm.properties file") - scenario_year = int(self._props["scenarioYear"]) - periods = ["EA", "AM", "MD", "PM", "EV"] - time_periods = ["_ea", "_am", "_md", "_pm", "_ev"] - src_time_periods = ["_op", "_am", "_op", "_pm", "_op"] - mode_d = network.mode('d') - - # Calculate upstream and downstream interchange distance - # First, label the intersection nodes as nodes with type 1 links (freeway) and - # type 8 links (freeway-to-freeway ramp) - network.create_attribute("NODE", "is_interchange") - interchange_points = [] - for node in network.nodes(): - adj_links = list(node.incoming_links()) + list(node.outgoing_links()) - has_freeway_links = bool( - [l for l in adj_links - if l.type == 1 and mode_d in l.modes]) - has_ramp_links = bool( - [l for l in adj_links - if l.type == 8 and mode_d in l.modes and not "HOV" in l["#name"]]) - if has_freeway_links and has_ramp_links: - node.is_interchange = True - interchange_points.append(node) - else: - node.is_interchange = False - for node in network.nodes(): - node["@interchange"] = node.is_interchange - - for link in network.links(): - if link.type == 1 and mode_d in link.modes: - link["@intdist_down"] = interchange_distance(link, "DOWNSTREAM") - link["@intdist_up"] = interchange_distance(link, "UPSTREAM") - self._log.append({"type": "text", "content": "Calculate of nearest interchange distance complete"}) - - # Static reliability parameters - # freeway coefficients - freeway_rel = { - "intercept": 0.1078, - "speed>70": 0.01393, - "upstream": 0.011, - "downstream": 0.0005445, - } - # arterial/ramp/other coefficients - road_rel = { - "intercept": 0.0546552, - "lanes": { - 1: 0.0, - 2: 0.0103589, - 3: 0.0361211, - 4: 0.0446958, - 5: 0.0 - }, - "speed": { - "<35": 0, - 35: 0.0075674, - 40: 0.0091012, - 45: 0.0080996, - 50: -0.0022938, - ">50": -0.0046211 - }, - "control": { - 0: 0, # Uncontrolled - 1: 0.0030973, # Signal - 2: -0.0063281, # Stop - 3: -0.0063281, # Stop - 4: 0.0127692, # Other, Railway, etc. - } - } - for link in network.links(): - # Change SR125 toll speed to 70MPH - if link["@hov"] == 4 and link.type == 1: - link["@speed_posted"] = 70 - link["@cost_operating"] = link.length * aoc - for time in time_periods: - # add link delay (30 sec=0.5mins) to HOV connectors to discourage travel - if link.type == 8 and (link["@hov"] == 2 or link["@hov"] == 3): - link["@time_link" + time] = link["@time_link" + time] + 0.375 - - # make speed on HOV lanes (70mph) the same as parallel GP lanes (65mph) - # - set speed back to posted speed - increase travel time by (speed_adj/speed_posted) - if link.type == 1 and (link["@hov"] == 2 or link["@hov"] == 3): - speed_adj = link["@speed_adjusted"] - speed_posted = link["@speed_posted"] - if speed_adj>0: - link["@time_link" + time] = (speed_adj/(speed_posted*1.0)) * link["@time_link" + time] - - # Required file - vehicle_class_factor_file = FILE_NAMES["VEHICLE_CLASS"] - facility_factors = _defaultdict(lambda: {}) - facility_factors["DEFAULT_FACTORS"] = { - "ALL": { - "auto": 1.0, - "hov2": 1.0, - "hov3": 1.0, - "lgt_truck": 1.0, - "med_truck": 1.03, - "hvy_truck": 2.03 - }, - "count": 0 - } - if os.path.exists(_join(self.source, vehicle_class_factor_file)): - msg = "Adjusting tolls based on factors from %s" % vehicle_class_factor_file - self._log.append({"type": "text", "content": msg}) - # NOTE: CSV Reader sets the field names to UPPERCASE for consistency - with gen_utils.CSVReader(_join(self.source, vehicle_class_factor_file)) as r: - for row in r: - if "YEAR" in r.fields and int(row["YEAR"]) != scenario_year: # optional year column - continue - name = row["FACILITY_NAME"] - # optional time-of-day entry, default to ALL if no column or blank - fac_time = row.get("TIME_OF_DAY") - if fac_time is None: - fac_time = "ALL" - facility_factors[name][fac_time] = { - "auto": float(row["DA_FACTOR"]), - "hov2": float(row["S2_FACTOR"]), - "hov3": float(row["S3_FACTOR"]), - "lgt_truck": float(row["TRK_L_FACTOR"]), - "med_truck": float(row["TRK_M_FACTOR"]), - "hvy_truck": float(row["TRK_H_FACTOR"]) - } - facility_factors[name]["count"] = 0 - - # validate ToD entry, either list EA, AM, MD, PM and EV, or ALL, but not both - for name, factors in facility_factors.iteritems(): - # default keys should be "ALL" and "count" - if "ALL" in factors: - if len(factors) > 2: - fatal_errors += 1 - msg = ("Individual time periods and 'ALL' (or blank) listed under " - "TIME_OF_DAY column in {} for facility {}").format(vehicle_class_factor_file, name) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - elif set(periods + ["count"]) != set(factors.keys()): - fatal_errors += 1 - msg = ("Missing time periods {} under TIME_OF_DAY column in {} for facility {}").format( - (set(periods) - set(factors.keys())), vehicle_class_factor_file, name) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - - def lookup_link_name(link): - for attr_name in ["#name", "#name_from", "#name_to"]: - for name, _factors in facility_factors.iteritems(): - if name in link[attr_name]: - return _factors - return facility_factors["DEFAULT_FACTORS"] - - def match_facility_factors(link): - factors = lookup_link_name(link) - factors["count"] += 1 - factors = _copy(factors) - del factors["count"] - # @hov = 2 or 3 overrides hov2 and hov3 costs - if link["@hov"] == 2: - for _, time_factors in factors.iteritems(): - time_factors["hov2"] = 0.0 - time_factors["hov3"] = 0.0 - elif link["@hov"] == 3: - for _, time_factors in factors.iteritems(): - time_factors["hov3"] = 0.0 - return factors - - vehicle_classes = ["auto", "hov2", "hov3", "lgt_truck", "med_truck", "hvy_truck"] - for link in network.links(): - if sum(link["@toll" + time] for time in time_periods) > 0: - factors = match_facility_factors(link) - for time, period in zip(time_periods, periods): - time_factors = factors.get(period, factors.get("ALL")) - for name in vehicle_classes: - link["@cost_" + name + time] = time_factors[name] * link["@toll" + time] + link["@cost_operating"] - else: - for time in time_periods: - for name in vehicle_classes: - link["@cost_" + name + time] = link["@cost_operating"] - for name, class_factors in facility_factors.iteritems(): - msg = "Facility name '%s' matched to %s links." % (name, class_factors["count"]) - self._log.append({"type": "text2", "content": msg}) - - self._log.append({ - "type": "text", - "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) - - # calculate static reliability - for link in network.links(): - for time in time_periods: - sta_reliability = "@sta_reliability" + time - # if freeway apply freeway parameters to this link - if link["type"] == 1 and link["@lane" + time] > 0: - high_speed_factor = freeway_rel["speed>70"] if link["@speed_posted"] >= 70 else 0.0 - upstream_factor = freeway_rel["upstream"] * 1 / link["@intdist_up"] - downstream_factor = freeway_rel["downstream"] * 1 / link["@intdist_down"] - link[sta_reliability] = ( - freeway_rel["intercept"] + high_speed_factor + upstream_factor + downstream_factor) - # arterial/ramp/other apply road parameters - elif link["type"] <= 9 and link["@lane" + time] > 0: - lane_factor = road_rel["lanes"].get(link["@lane" + time], 0.0) - speed_bin = int(link["@speed_posted"] / 5) * 5 # truncate to multiple of 5 - if speed_bin < 35: - speed_bin = "<35" - elif speed_bin > 50: - speed_bin = ">50" - speed_factor = road_rel["speed"][speed_bin] - control_bin = min(max(link["@traffic_control"], 0), 4) - control_factor = road_rel["control"][control_bin] - link[sta_reliability] = road_rel["intercept"] + lane_factor + speed_factor + control_factor - else: - link[sta_reliability] = 0.0 - self._log.append({"type": "text", "content": "Calculate of link static reliability factors complete"}) - - # Cycle length matrix - # Intersecting Link - # Approach Link 2 3 4 5 6 7 8 9 - # FC Description - # 2 Prime Arterial 2.5 2 2 2 2 2 2 2 - # 3 Major Arterial 2 2 2 2 2 2 2 2 - # 4 Collector 2 2 1.5 1.5 1.5 1.5 1.5 1.5 - # 5 Local Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 6 Rural Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 7 Local Road 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 8 Freeway connector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 9 Local Ramp 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - - # Volume-delay functions - # fd10: freeway node approach - # fd11: non-intersection node approach - # fd20: cycle length 1.25 - # fd21: cycle length 1.5 - # fd22: cycle length 2.0 - # fd23: cycle length 2.5 - # fd24: cycle length 2.5 and metered ramp - # fd25: freeway node approach AM and PM only - network.create_attribute("LINK", "green_to_cycle") - network.create_attribute("LINK", "cycle") - vdf_cycle_map = {1.25: 20, 1.5: 21, 2.0: 22, 2.5: 23} - for node in network.nodes(): - incoming = list(node.incoming_links()) - outgoing = list(node.outgoing_links()) - is_signal = False - for link in incoming: - if link["@green_to_cycle_init"] > 0: - is_signal = True - break - if is_signal: - lcs = [link.type for link in incoming + outgoing] - min_lc = max(lcs) # Note: minimum class is actually the HIGHEST value, - max_lc = min(lcs) # and maximum is the LOWEST - - for link in incoming: - # Metered ramps - if link["@traffic_control"] in [4, 5]: - link["cycle"] = 2.5 - link["green_to_cycle"] = 0.42 - link.volume_delay_func = 24 - # Stops - elif link["@traffic_control"] in [2, 3]: - link["cycle"] = 1.25 - link["green_to_cycle"] = 0.42 - link.volume_delay_func = 20 - elif link["@green_to_cycle_init"] > 0 and is_signal: - if link.type == 2: - c_len = 2.5 if min_lc == 2 else 2.0 - elif link.type == 3: - c_len = 2.0 # Major arterial & anything - elif link.type == 4: - c_len = 1.5 if max_lc > 2 else 2.0 - elif link.type > 4: - if max_lc > 4: - c_len = 1.25 - elif max_lc == 4: - c_len = 1.5 - else: - c_len = 2.0 - if link["@green_to_cycle_init"] > 10: - link["green_to_cycle"] = link["@green_to_cycle_init"] / 100.0 - if link["green_to_cycle"] > 1.0: - link["green_to_cycle"] = 1.0 - link["cycle"] = c_len - link.volume_delay_func = vdf_cycle_map[c_len] - elif link.type == 1: - link.volume_delay_func = 10 # freeway - else: - link.volume_delay_func = 11 # non-controlled approach - self._log.append({"type": "text", "content": "Derive cycle, green_to_cycle, and VDF by approach node complete"}) - - for link in network.links(): - if link.volume_delay_func in [10, 11]: - continue - if link["@traffic_control"] in [4, 5]: - # Ramp meter controlled links are only enabled during the peak periods - for time in ["_am", "_pm"]: - link["@cycle" + time] = link["cycle"] - link["@green_to_cycle" + time] = link["green_to_cycle"] - else: - for time in time_periods: - link["@cycle" + time] = link["cycle"] - link["@green_to_cycle" + time] = link["green_to_cycle"] - self._log.append({"type": "text", "content": "Setting of time period @cycle and @green_to_cycle complete"}) - - network.delete_attribute("LINK", "green_to_cycle") - network.delete_attribute("LINK", "cycle") - network.delete_attribute("NODE", "is_interchange") - self._log.append({"type": "text", "content": "Calculate derived traffic attributes complete"}) - if fatal_errors > 0: - raise Exception("%s fatal errors during calculation of traffic attributes" % fatal_errors) - return - - def check_zone_access(self, network, mode): - # Verify that every centroid has at least one available - # access and egress connector - for centroid in network.centroids(): - access = egress = False - for link in centroid.outgoing_links(): - if mode in link.modes: - if link.j_node.is_intersection: - for turn in link.outgoing_turns(): - if turn.i_node != turn.k_node and turn.penalty_func != 0: - egress = True - else: - egress = True - if not egress: - raise Exception("No egress permitted from zone %s" % centroid.id) - for link in centroid.incoming_links(): - if mode in link.modes: - if link.j_node.is_intersection: - for turn in link.incoming_turns(): - if turn.i_node != turn.k_node and turn.penalty_func != 0: - access = True - else: - access = True - if not access: - raise Exception("No access permitted to zone %s" % centroid.id) - - @_m.logbook_trace("Set database functions (VDF, TPF and TTF)") - def set_functions(self, scenario): - create_function = _m.Modeller().tool( - "inro.emme.data.function.create_function") - set_extra_function_params = _m.Modeller().tool( - "inro.emme.traffic_assignment.set_extra_function_parameters") - emmebank = self.emmebank - for f_id in ["fd10", "fd11", "fd20", "fd21", "fd22", "fd23", "fd24", "fd25", - "fp1", "ft1", "ft2", "ft3", "ft4"]: - function = emmebank.function(f_id) - if function: - emmebank.delete_function(function) - - smartSignalf_CL = self._props["smartSignal.factor.LC"] - smartSignalf_MA = self._props["smartSignal.factor.MA"] - smartSignalf_PA = self._props["smartSignal.factor.PA"] - atdmf = self._props["atdm.factor"] - - reliability_tmplt = ( - "* (1 + el2 + {0}*(".format(atdmf)+ - "( {factor[LOS_C]} * ( put(get(1).min.1.5) - {threshold[LOS_C]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_C]})" - "+ ( {factor[LOS_D]} * ( get(2) - {threshold[LOS_D]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_D]})" - "+ ( {factor[LOS_E]} * ( get(2) - {threshold[LOS_E]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_E]})" - "+ ( {factor[LOS_FL]} * ( get(2) - {threshold[LOS_FL]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FL]})" - "+ ( {factor[LOS_FH]} * ( get(2) - {threshold[LOS_FH]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FH]})" - "))") - parameters = { - "freeway": { - "factor": { - "LOS_C": 0.2429, "LOS_D": 0.1705, "LOS_E": -0.2278, "LOS_FL": -0.1983, "LOS_FH": 1.022 - }, - "threshold": { - "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 - }, - }, - "road": { # for arterials, ramps, collectors, local roads, etc. - "factor": { - "LOS_C": 0.1561, "LOS_D": 0.0, "LOS_E": 0.0, "LOS_FL": -0.449, "LOS_FH": 0.0 - }, - "threshold": { - "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 - }, - } - } - # freeway fd10 - create_function( - "fd10", - "(ul1 * (1.0 + 0.24 * put((volau + volad) / ul3) ** 5.5))" - + reliability_tmplt.format(**parameters["freeway"]), - emmebank=emmebank) - # non-freeway link which is not an intersection approach fd11 - create_function( - "fd11", - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0))" - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd20", # Local collector and lower intersection and stop controlled approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "1.25 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))" - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd21", # Collector intersection approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "{0} * 1.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_CL) - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd22", # Major arterial and major or prime arterial intersection approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "{0} * 2.0 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_MA) - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd23", # Primary arterial intersection approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "{0} * 2.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_PA) - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd24", # Metered ramps - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "2.5/ 2 * (1-el1) ** 2 * (1.0 + 6.0 * ( (volau + volad) / el3 ) ** 2.0))" - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - # freeway fd25 (AM and PM only) - create_function( - "fd25", - "(ul1 * (1.0 + 0.6 * put((volau + volad) / ul3) ** 4))" - + reliability_tmplt.format(**parameters["freeway"]), - emmebank=emmebank) - - set_extra_function_params( - el1="@green_to_cycle", el2="@sta_reliability", el3="@capacity_inter_am", - emmebank=emmebank) - - create_function("fp1", "up1", emmebank=emmebank) # fixed cost turns stored in turn data 1 (up1) - - # buses in mixed traffic, use auto time - create_function("ft1", "ul1", emmebank=emmebank) - # fixed speed for separate guideway operations - create_function("ft2", "ul2", emmebank=emmebank) - # special 0-cost segments for prohibition of walk to different stop from centroid - create_function("ft3", "0", emmebank=emmebank) - # fixed guideway systems according to vehicle speed (not used at the moment) - create_function("ft4", "60 * length / speed", emmebank=emmebank) - - @_m.logbook_trace("Traffic zone connectivity check") - def check_connectivity(self, scenario): - modeller = _m.Modeller() - sola_assign = modeller.tool( - "inro.emme.traffic_assignment.sola_traffic_assignment") - set_extra_function_para = modeller.tool( - "inro.emme.traffic_assignment.set_extra_function_parameters") - create_matrix = _m.Modeller().tool( - "inro.emme.data.matrix.create_matrix") - net_calc = gen_utils.NetworkCalculator(scenario) - - emmebank = scenario.emmebank - zone_index = dict(enumerate(scenario.zone_numbers)) - num_processors = dem_utils.parse_num_processors("MAX-1") - - # Note matrix is also created in initialize_matrices - create_matrix("ms1", "zero", "zero", scenario=scenario, overwrite=True) - with gen_utils.temp_matrices(emmebank, "FULL", 1) as (result_matrix,): - result_matrix.name = "TEMP_AUTO_TRAVEL_TIME" - set_extra_function_para( - el1="@green_to_cycle_am", - el2="@sta_reliability_am", - el3="@capacity_inter_am", emmebank=emmebank) - net_calc("ul1", "@time_link_am", "modes=d") - net_calc("ul3", "@capacity_link_am", "modes=d") - net_calc("lanes", "@lane_am", "modes=d") - spec = { - "type": "SOLA_TRAFFIC_ASSIGNMENT", - "background_traffic": None, - "classes": [ - { - "mode": "d", - "demand": 'ms"zero"', - "generalized_cost": None, - "results": { - "od_travel_times": {"shortest_paths": result_matrix.named_id} - } - } - ], - "stopping_criteria": { - "max_iterations": 0, "best_relative_gap": 0.0, - "relative_gap": 0.0, "normalized_gap": 0.0 - }, - "performance_settings": {"number_of_processors": num_processors}, - } - sola_assign(spec, scenario=scenario) - travel_time = result_matrix.get_numpy_data(scenario) - - is_disconnected = (travel_time == 1e20) - disconnected_pairs = is_disconnected.sum() - if disconnected_pairs > 0: - error_msg = "Connectivity error(s) between %s O-D pairs" % disconnected_pairs - self._log.append({"type": "header", "content": error_msg}) - count_disconnects = [] - for axis, term in [(0, "from"), (1, "to")]: - axis_totals = is_disconnected.sum(axis=axis) - for i, v in enumerate(axis_totals): - if v > 0: - count_disconnects.append((zone_index[i], term, v)) - count_disconnects.sort(key=lambda x: x[2], reverse=True) - for z, direction, count in count_disconnects[:50]: - msg ="Zone %s disconnected %s %d other zones" % (z, direction, count) - self._log.append({"type": "text", "content": msg}) - if disconnected_pairs > 50: - self._log.append({"type": "text", "content": "[List truncated]"}) - raise Exception(error_msg) - self._log.append({"type": "header", "content": - "Zone connectivity verified for AM period on SOV toll ('S') mode"}) - scenario.has_traffic_results = False - - def log_report(self): - report = _m.PageBuilder(title="Import network from TNED files report") - try: - if self._error: - report.add_html("
      Errors detected during import: %s
      " % len(self._error)) - error_msg = ["
        "] - for error in self._error: - error_msg.append("
      • %s
      • " % error) - error_msg.append("
      ") - report.add_html("".join(error_msg)) - else: - report.add_html("

      No errors detected during import :-)") - - for item in self._log: - if item["type"] == "text": - report.add_html("
      %s
      " % item["content"]) - if item["type"] == "text2": - report.add_html("
      %s
      " % item["content"]) - elif item["type"] == "header": - report.add_html("

      %s

      " % item["content"]) - elif item["type"] == "table": - table_msg = ["
      ", "

      %s

      " % item["title"]] - if "header" in item: - table_msg.append("") - for label in item["header"]: - table_msg.append("" % label) - table_msg.append("") - for row in item["content"]: - table_msg.append("") - for cell in row: - table_msg.append("" % cell) - table_msg.append("") - table_msg.append("
      %s
      %s
      ") - report.add_html("".join(table_msg)) - - except Exception as error: - # no raise during report to avoid masking real error - report.add_html("Error generating report") - report.add_html(unicode(error)) - report.add_html(_traceback.format_exc()) - - _m.logbook_write("Import network report", report.render()) - - -def get_node(network, number, coordinates, is_centroid=False): - node = network.node(number) - if not node: - node = network.create_node(number, is_centroid) - node.x, node.y = coordinates - return node - - -# shortest path interpolation -def find_path(orig_link, dest_link, mode): - visited = set([]) - visited_add = visited.add - back_links = {} - heap = [] - - for link in orig_link.j_node.outgoing_links(): - if mode in link.modes: - back_links[link] = None - _heapq.heappush(heap, (link["length"], link)) - - link_found = False - try: - while not link_found: - link_cost, link = _heapq.heappop(heap) - if link in visited: - continue - visited_add(link) - for outgoing in link.j_node.outgoing_links(): - if mode not in outgoing.modes: - continue - if outgoing in visited: - continue - back_links[outgoing] = link - if outgoing == dest_link: - link_found = True - break - outgoing_cost = link_cost + link["length"] - _heapq.heappush(heap, (outgoing_cost, outgoing)) - except IndexError: - pass # IndexError if heap is empty - if not link_found: - raise NoPathException( - "no path found between links with trcov_id %s and %s (Emme IDs %s and %s)" % ( - orig_link["@tcov_id"], dest_link["@tcov_id"], orig_link, dest_link)) - - prev_link = back_links[dest_link] - route = [] - while prev_link: - route.append(prev_link) - prev_link = back_links[prev_link] - return list(reversed(route)) - - -class NoPathException(Exception): - pass - - -def revised_headway(headway): - # CALCULATE REVISED HEADWAY - # new headway calculation is less aggressive; also only being used for initial wait - # It uses a negative exponential formula to calculate headway - # - if headway <= 10: - rev_headway = headway - else: - rev_headway = headway * (0.275 + 0.788 * _np.exp(-0.011*headway)) - return rev_headway - - -def interchange_distance(orig_link, direction): - visited = set([]) - visited_add = visited.add - back_links = {} - heap = [] - if direction == "DOWNSTREAM": - get_links = lambda l: l.j_node.outgoing_links() - check_far_node = lambda l: l.j_node.is_interchange - elif direction == "UPSTREAM": - get_links = lambda l: l.i_node.incoming_links() - check_far_node = lambda l: l.i_node.is_interchange - # Shortest path search for nearest interchange node along freeway - for link in get_links(orig_link): - _heapq.heappush(heap, (link["length"], link)) - interchange_found = False - try: - while not interchange_found: - link_cost, link = _heapq.heappop(heap) - if link in visited: - continue - visited_add(link) - if check_far_node(link): - interchange_found = True - break - for next_link in get_links(link): - if next_link in visited: - continue - next_cost = link_cost + link["length"] - _heapq.heappush(heap, (next_cost, next_link)) - except IndexError: - # IndexError if heap is empty - # case where start / end of highway, dist = 99 - return 99 - return orig_link["length"] / 2.0 + link_cost From a5e4eb9a6bbc86edf4cad2f7a6b016b66ab3bf9b Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 7 Dec 2023 16:04:27 -0500 Subject: [PATCH 18/43] New import_network from TNED --- .../emme/toolbox/import/import_network.py | 168 +++++++++--------- 1 file changed, 80 insertions(+), 88 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index f70cd49e2..189604bf3 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -34,12 +34,12 @@ # - TNED_HwyNodes # - TNED_RailNet # - TNED_RailNodes +# - trrt +# - trlink +# - trstop # - Turns # The following files are also used (in the same directory as the *.gdb) # -# trrt.csv: header data for the transit lines -# trlink.csv: sequence of links (routing) of transit lines -# trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period # special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. @@ -149,13 +149,13 @@ def page(self):
    • TNED_HwyNodes
    • TNED_RailNet
    • TNED_RailNodes
    • +
    • trrt
    • +
    • trlink
    • +
    • trstop
    • Turns
    The following files are also used (in the same directory as the *.gdb):
      -
    • trrt.csv
    • -
    • trlink.csv
    • -
    • trstop.csv
    • mode5tod.csv
    • timexfer_.csv (optional)
    • special_fares.txt (optional)
    • @@ -311,32 +311,28 @@ def execute(self): ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), - ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes")), ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), ]), "TRANSIT_LINE": OrderedDict([ - ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), - ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), - ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), - ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), - ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), - ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), - ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), - ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), - ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), - ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), - ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), - ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), - ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), - ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), + ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), + ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), + ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), + ("Evening_Headway", ("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), + ("EarlyAM_Headway", ("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), + ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), + ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), + ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), + ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), + ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), + ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), ("DWELLTIME", ("default_dwell_time", "MODE5TOD", "INTERNAL", "")), - ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), - ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), - ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), - ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), - ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), - ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), + ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), + ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), + ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), + ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), + ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), + ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), ]), "TRANSIT_SEGMENT": OrderedDict([ ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), @@ -475,18 +471,18 @@ def create_modes(self, network): mode_table = { "AUTO": [("d", "dummy auto")], "AUX_AUTO": [ - ("s", "SOV"), - ("h", "HOV2"), + ("h", "SOV"), + ("H", "HOV2"), ("i", "HOV3+"), - ("t", "TRKL"), - ("m", "TRKM"), - ("v", "TRKH"), - ("S", "SOV TOLL"), - ("H", "HOV2 TOLL"), - ("I", "HOV3+ TOLL"), - ("T", "TRKL TOLL"), - ("M", "TRKM TOLL"), - ("V", "TRKH TOLL"), + ("I", "TRKL"), + ("s", "TRKM"), + ("S", "TRKH"), + ("v", "SOV TOLL"), + ("V", "HOV2 TOLL"), + ("m", "HOV3+ TOLL"), + ("M", "TRKL TOLL"), + ("t", "TRKM TOLL"), + ("T", "TRKH TOLL"), ], "TRANSIT": [ ("b", "BUS" ), # (vehicle type 100, PCE=3.0) @@ -541,20 +537,20 @@ def create_modes(self, network): } modes_gp_lanes = { 0: set([]), - 1: set([network.mode(m_id) for m_id in "dvmtshiVMTSHI"]), # all modes - 2: set([network.mode(m_id) for m_id in "dmtshiMTSHI"]), # no heavy truck - 3: set([network.mode(m_id) for m_id in "dtshiTSHI"]), # no heavy or medium truck - 4: set([network.mode(m_id) for m_id in "dshiSHI"]), # no truck - 5: set([network.mode(m_id) for m_id in "dvV"]), # only heavy trucks - 6: set([network.mode(m_id) for m_id in "dvmVM"]), # heavy and medium trucks - 7: set([network.mode(m_id) for m_id in "dvmtVMT"]), # all trucks only (no passenger cars) + 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes + 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck + 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck + 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck + 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks + 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks + 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only } - non_toll_modes = set([network.mode(m_id) for m_id in "vmtshi"]) + non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) self._auto_mode_lookup = { "GP": modes_gp_lanes, "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), - "HOV2": set([network.mode(m_id) for m_id in "dhiHI"]), - "HOV3": set([network.mode(m_id) for m_id in "diI"]), + "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), + "HOV3": set([network.mode(m_id) for m_id in "dim"]), } def set_auto_modes(self, network, period): @@ -562,7 +558,6 @@ def set_auto_modes(self, network, period): # need to update the modes from the XTRUCK for their time of day # Note: only truck types 1, 3, 4, and 7 found in 2012 base network truck = "@truck_%s" % period.lower() - toll = "@toll_%s" % period.lower() lookup = self._auto_mode_lookup for link in network.links(): auto_modes = set([]) @@ -574,7 +569,7 @@ def set_auto_modes(self, network, period): auto_modes = lookup["GP"][link[truck]] elif link["@hov"] in [2, 3]: # managed lanes, free for HOV2 and HOV3+, tolls for SOV - if link[toll] > 0: + if link["@toll_ea"] + link["@toll_am"] + link["@toll_md"] + link["@toll_pm"] + link["@toll_ev"] > 0: auto_modes = lookup["TOLL"][link[truck]] # special case of I-15 managed lanes base year and 2020, no build elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: @@ -734,11 +729,9 @@ def create_transit_lines(self, network, attr_map): self._log.append({"type": "header", "content": "Import transit lines"}) fatal_errors = 0 # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare - #transit_line_data = gen_utils.DataTableProc("trrt", self.source) - transit_line_data = gen_utils.DataTableProc("trrt", _join(_dir(self.source), "trrt.csv")) + transit_line_data = gen_utils.DataTableProc("trrt", self.source) # Route_ID,Link_ID,Link_GUID,Direction - #transit_link_data = gen_utils.DataTableProc("trlink", self.source) - transit_link_data = gen_utils.DataTableProc("trlink", _join(_dir(self.source), "trlink.csv")) + transit_link_data = gen_utils.DataTableProc("trlink", self.source) # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) @@ -917,7 +910,7 @@ def create_transit_lines(self, network, attr_map): # segments on links with auto mode are ft1 = timau segment.transit_time_func = 1 else: - # ft2 = ul2 -> copied @trtime (fixed speed) + # ft2 = ul2 -> copied @trtime_link_XX (fixed speed) segment.transit_time_func = 2 except Exception as error: msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) @@ -961,39 +954,31 @@ def create_transit_lines(self, network, attr_map): if "DUMMY" in stop["StopName"]: continue stop_link_id = stop['Link_GUID'] - stop_node_id = int(stop['Node']) + node_id = int(stop['Node']) while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: segment = itinerary.next() if stop_node_id == segment.i_node.number: pass - elif segment.j_node and stop_node_id == segment.j_node.number: + elif segment.j_node and node_id == segment.j_node.number: # if matches the J-node then the stop is on the next segment segment = itinerary.next() else: - next_segment = None - if segment.j_node: - next_segment = itinerary.next() - if next_segment and next_segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id and \ - stop_node_id == next_segment.j_node.number: - # split link case, where stop is at the end of the next segment - segment = next_segment + if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: + msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( + line_name, stop["Route_ID"], segment, stop_link_id, node_id) else: - if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: - msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( - line_name, stop["Route_ID"], segment, stop_link_id, stop_node_id) - else: - msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( - line_name, stop["Route_ID"], stop_link_id, stop_node_id) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - # reset iterator to start back from previous segment - itinerary = tline.segments(include_hidden=True) + msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( + line_name, stop["Route_ID"], stop_link_id, node_id) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + # reset iterator to start back from previous segment + itinerary = tline.segments(include_hidden=True) + segment = itinerary.next() + while segment.id != prev_segment.id: segment = itinerary.next() - while segment.id != prev_segment.id: - segment = itinerary.next() - continue + continue segment.allow_boardings = True segment.allow_alightings = True segment.dwell_time = min(tline.default_dwell_time, 99.99) @@ -1045,18 +1030,13 @@ def lookup_line(ident): self._log.append({"type": "text", "content": "Import transit lines complete"}) def calc_transit_attributes(self, network): - self._log.append({"type": "header", "content": "Calculate derived transit line attributes"}) - # ON TRANSIT LINES - # Set 3-period headway based on revised headway calculation - for line in network.transit_lines(): - for period in ["ea", "am", "md", "pm", "ev"]: - line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) - self._log.append({"type": "text", "content": "Revised headway calculation complete"}) + # for link in network.links(): + # if link.type == 0: # walk only links have FC ==0 + # link.type = 99 fares_file_name = FILE_NAMES["FARES"] special_fare_path = _join(self.source, fares_file_name) if not os.path.isfile(special_fare_path): - self._log.append({"type": "text", "content": "Special fares file %s not found" % fares_file_name}) return def get_line(line_id): @@ -1123,7 +1103,8 @@ def get_line(line_id): self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) def renumber_base_nodes(self, network): - tracker = gen_utils.AvailableNodeIDTracker(network) + # TODO: log node renumberings + tracker = AvailableNodeIDTracker(network) nodes = [n for n in network.nodes() if n.number > 999999] nodes = sorted(nodes, key=lambda x: x.number, reverse=True) if nodes: @@ -1707,7 +1688,7 @@ def log_report(self): error_msg.append("
    ") report.add_html("".join(error_msg)) else: - report.add_html("

    No errors detected during import :-)") + report.add_html("

    No errors detected during import :-)") for item in self._log: if item["type"] == "text": @@ -1793,6 +1774,17 @@ def find_path(orig_link, dest_link, mode): return list(reversed(route)) +class AvailableNodeIDTracker(object): + def __init__(self, network, start=999999): + self._network = network + self._node_id = start + + def get_id(self): + while self._network.node(self._node_id): + self._node_id -= 1 + return self._node_id + + class NoPathException(Exception): pass From 50969f432db72a00d995fde07465e255d22a38f3 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 7 Dec 2023 16:06:59 -0500 Subject: [PATCH 19/43] Changed @lane_restriction to @hov, and removed @truck_restriction (now per time period) --- .../export/export_data_loader_network.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/main/emme/toolbox/export/export_data_loader_network.py b/src/main/emme/toolbox/export/export_data_loader_network.py index f54fc1c39..3c77ebfec 100644 --- a/src/main/emme/toolbox/export/export_data_loader_network.py +++ b/src/main/emme/toolbox/export/export_data_loader_network.py @@ -206,14 +206,16 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, ("FFC", "type"), ("CLASS", "zero"), ("ASPD", "@speed_adjusted"), - ("YR", "@year_open_traffic"), - ("PROJ", "@project_code"), - ("FC", "type"), - ("HOV", "@hov"), - ("SPD", "@speed_posted"), - ("TSPD", "zero"), - ("WAY", "iway"), - ("MED", "@median"), + ("IYR", "@year_open_traffic"), + ("IPROJ", "@project_code"), + ("IJUR", "@jurisdiction_type"), + ("IFC", "type"), + ("IHOV", "@hov"), + #("ITRUCK", "@truck_restriction"), + ("ISPD", "@speed_posted"), + ("ITSPD", "zero"), + ("IWAY", "iway"), + ("IMED", "@median"), ("COST", "@cost_operating"), ] directional_attrs = [ From 2e5011a521d706d57fec2a2913e07a93eda53934 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Mon, 11 Dec 2023 13:44:58 -0500 Subject: [PATCH 20/43] Updating master run to have compatible call to new Import network (for TNED); Import network: correcting mode setting for tolls to be by time of day period --- src/main/emme/toolbox/import/import_network.py | 3 ++- src/main/emme/toolbox/master_run.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 189604bf3..e359c5a68 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -558,6 +558,7 @@ def set_auto_modes(self, network, period): # need to update the modes from the XTRUCK for their time of day # Note: only truck types 1, 3, 4, and 7 found in 2012 base network truck = "@truck_%s" % period.lower() + toll = "@toll_%s" % period.lower() lookup = self._auto_mode_lookup for link in network.links(): auto_modes = set([]) @@ -569,7 +570,7 @@ def set_auto_modes(self, network, period): auto_modes = lookup["GP"][link[truck]] elif link["@hov"] in [2, 3]: # managed lanes, free for HOV2 and HOV3+, tolls for SOV - if link["@toll_ea"] + link["@toll_am"] + link["@toll_md"] + link["@toll_pm"] + link["@toll_ev"] > 0: + if link[toll] > 0: auto_modes = lookup["TOLL"][link[truck]] # special case of I-15 managed lanes base year and 2020, no build elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: diff --git a/src/main/emme/toolbox/master_run.py b/src/main/emme/toolbox/master_run.py index 7908045fb..c3f607e42 100644 --- a/src/main/emme/toolbox/master_run.py +++ b/src/main/emme/toolbox/master_run.py @@ -86,6 +86,7 @@ import pyodbc import win32com.client as win32 import shutil +import glob import multiprocessing @@ -490,7 +491,7 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, self.complete_work(scenarioYear, input_dir, output_dir, mgraFile, "walkMgraEquivMinutes.csv") if not skipBuildNetwork: - source_gdb = _glob.glob(os.path.join(input_dir, "*.gdb")) + source_gdb = glog.glob(os.path.join(input_dir, "*.gdb")) if len(source_gdb) > 1: raise Exception("Multiple *.gdb files found in input directory") if len(source_gdb) < 1: From 8316515bc856b383b2ca4401e5f6ba02ce3b215f Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 14 Dec 2023 14:55:05 -0500 Subject: [PATCH 21/43] Updated comments in import_network to reference trstop.csv; used already imported glob as _glob in master_run --- src/main/emme/toolbox/import/import_network.py | 4 ++-- src/main/emme/toolbox/master_run.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index e359c5a68..ea35628aa 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -36,10 +36,10 @@ # - TNED_RailNodes # - trrt # - trlink -# - trstop # - Turns # The following files are also used (in the same directory as the *.gdb) # +# trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period # special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. @@ -151,11 +151,11 @@ def page(self):
  • TNED_RailNodes
  • trrt
  • trlink
  • -
  • trstop
  • Turns
  • The following files are also used (in the same directory as the *.gdb):
      +
    • trstop.csv
    • mode5tod.csv
    • timexfer_.csv (optional)
    • special_fares.txt (optional)
    • diff --git a/src/main/emme/toolbox/master_run.py b/src/main/emme/toolbox/master_run.py index c3f607e42..084e7fac7 100644 --- a/src/main/emme/toolbox/master_run.py +++ b/src/main/emme/toolbox/master_run.py @@ -86,7 +86,6 @@ import pyodbc import win32com.client as win32 import shutil -import glob import multiprocessing @@ -491,7 +490,7 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, self.complete_work(scenarioYear, input_dir, output_dir, mgraFile, "walkMgraEquivMinutes.csv") if not skipBuildNetwork: - source_gdb = glog.glob(os.path.join(input_dir, "*.gdb")) + source_gdb = _glog.glob(os.path.join(input_dir, "*.gdb")) if len(source_gdb) > 1: raise Exception("Multiple *.gdb files found in input directory") if len(source_gdb) < 1: From 7f0d1fed6f54ff99f13bf4cb9b39c058315e9f43 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 14 Dec 2023 16:01:02 -0500 Subject: [PATCH 22/43] Adding back in TMO -> @trtime, removing references to @trtime_link_

      , now using @trtime for all time periods --- src/main/emme/toolbox/import/import_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index ea35628aa..8dc5bdcb3 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -311,6 +311,7 @@ def execute(self): ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), + ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), ]), @@ -911,7 +912,7 @@ def create_transit_lines(self, network, attr_map): # segments on links with auto mode are ft1 = timau segment.transit_time_func = 1 else: - # ft2 = ul2 -> copied @trtime_link_XX (fixed speed) + # ft2 = ul2 -> copied @trtime (fixed speed) segment.transit_time_func = 2 except Exception as error: msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) From f26f10c520f0c5417c8bb6f2e96455bcc41bc566 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Thu, 14 Dec 2023 16:09:37 -0500 Subject: [PATCH 23/43] Fixed typo glog to glob --- src/main/emme/toolbox/master_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/emme/toolbox/master_run.py b/src/main/emme/toolbox/master_run.py index 084e7fac7..7908045fb 100644 --- a/src/main/emme/toolbox/master_run.py +++ b/src/main/emme/toolbox/master_run.py @@ -490,7 +490,7 @@ def __call__(self, main_directory, scenario_id, scenario_title, emmebank_title, self.complete_work(scenarioYear, input_dir, output_dir, mgraFile, "walkMgraEquivMinutes.csv") if not skipBuildNetwork: - source_gdb = _glog.glob(os.path.join(input_dir, "*.gdb")) + source_gdb = _glob.glob(os.path.join(input_dir, "*.gdb")) if len(source_gdb) > 1: raise Exception("Multiple *.gdb files found in input directory") if len(source_gdb) < 1: From 75d7e1a9d9d1d542854755cb0947aae29feee9d4 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Tue, 19 Dec 2023 11:45:53 -0500 Subject: [PATCH 24/43] Adjusting Available node ID logic in build_transit_scenario to match import_network: counting down from 999999 to find available node ID. Centralized implementation in general.py --- .../toolbox/assignment/build_transit_scenario.py | 8 ++++++++ src/main/emme/toolbox/import/import_network.py | 14 +------------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/src/main/emme/toolbox/assignment/build_transit_scenario.py b/src/main/emme/toolbox/assignment/build_transit_scenario.py index 82949e324..d3c05a62d 100644 --- a/src/main/emme/toolbox/assignment/build_transit_scenario.py +++ b/src/main/emme/toolbox/assignment/build_transit_scenario.py @@ -340,6 +340,14 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari scenario.publish_network(network) self._node_id_tracker = None + + ##copying auto_time to ul1, so it does not get wiped when transit connectors are created. + if scenario.has_traffic_results and "@auto_time" in scenario.attributes("LINK"): + copy_att(from_attribute_name='timau', + to_attribute_name='ul1', + from_scenario=scenario, + to_scenario=scenario) + return scenario @_m.logbook_trace("Add timed-transfer links", save_arguments=True) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 8dc5bdcb3..ef15af7a3 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -1105,8 +1105,7 @@ def get_line(line_id): self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) def renumber_base_nodes(self, network): - # TODO: log node renumberings - tracker = AvailableNodeIDTracker(network) + tracker = gen_utils.AvailableNodeIDTracker(network) nodes = [n for n in network.nodes() if n.number > 999999] nodes = sorted(nodes, key=lambda x: x.number, reverse=True) if nodes: @@ -1776,17 +1775,6 @@ def find_path(orig_link, dest_link, mode): return list(reversed(route)) -class AvailableNodeIDTracker(object): - def __init__(self, network, start=999999): - self._network = network - self._node_id = start - - def get_id(self): - while self._network.node(self._node_id): - self._node_id -= 1 - return self._node_id - - class NoPathException(Exception): pass From 326ae829b84ef30deb0ffe17ac899dc0e73ee729 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Tue, 19 Dec 2023 14:23:30 -0500 Subject: [PATCH 25/43] Assignment: split off-peak (op) times to ea, md and pm; added back calculation of revised_headway to import network --- .../emme/toolbox/import/import_network.py | 45 +++++++++++-------- 1 file changed, 27 insertions(+), 18 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index ef15af7a3..2bfdf0825 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -316,24 +316,27 @@ def execute(self): ]), "TRANSIT_LINE": OrderedDict([ - ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), - ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), - ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), - ("Evening_Headway", ("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), - ("EarlyAM_Headway", ("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), - ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), - ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), - ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), - ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), - ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), - ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), - ("DWELLTIME", ("default_dwell_time", "MODE5TOD", "INTERNAL", "")), - ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), - ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), - ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), - ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), - ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), - ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), + ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), + ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), + ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), + ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), + ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), + ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), + ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), + ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), + ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), + ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), + ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), + ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), + ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), + ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), + ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), + ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), + ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), + ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), + ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), + ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), + ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), ]), "TRANSIT_SEGMENT": OrderedDict([ ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), @@ -1041,6 +1044,12 @@ def calc_transit_attributes(self, network): if not os.path.isfile(special_fare_path): return + # ON TRANSIT LINES + # Set 3-period headway based on revised headway calculation + for line in network.transit_lines(): + for period in ["ea", "am", "md", "pm", "ev"]: + line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) + def get_line(line_id): line = network.transit_line(line_id) if line is None: From d16c5074d85927e822471893f6e3dc9bfd8640d7 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Tue, 19 Dec 2023 16:01:30 -0800 Subject: [PATCH 26/43] Typos under import_network.py: - extra comma in line 314 - missing comma in line 333 --- .../emme/toolbox/import/import_network.py | 4 +- .../emme/toolbox/import/import_network.py.bak | 1836 +++++++++++++++++ 2 files changed, 1838 insertions(+), 2 deletions(-) create mode 100644 src/main/emme/toolbox/import/import_network.py.bak diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 2bfdf0825..a14dcb956 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -311,7 +311,7 @@ def execute(self): ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), - ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), + ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes")), ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), ]), @@ -330,7 +330,7 @@ def execute(self): ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), - ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), + ("DWELLTIME", ("default_dwell_time", "MODE5TOD", "INTERNAL", "")), ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), diff --git a/src/main/emme/toolbox/import/import_network.py.bak b/src/main/emme/toolbox/import/import_network.py.bak new file mode 100644 index 000000000..a32fdd5d1 --- /dev/null +++ b/src/main/emme/toolbox/import/import_network.py.bak @@ -0,0 +1,1836 @@ +#////////////////////////////////////////////////////////////////////////////// +#//// /// +#//// Copyright INRO, 2016-2017. /// +#//// Rights to use and modify are granted to the /// +#//// San Diego Association of Governments and partner agencies. /// +#//// This copyright notice must be preserved. /// +#//// /// +#//// import/import_network.py /// +#//// /// +#//// /// +#//// /// +#//// /// +#////////////////////////////////////////////////////////////////////////////// +# +# Imports the network from the input network files. +# +# +# Inputs: +# source: path to the location of the input network geodatabase +# traffic_scenario_id: optional scenario to store the imported network from the traffic files only +# transit_scenario_id: optional scenario to store the imported network from the transit files only +# merged_scenario_id: scenario to store the combined traffic and transit data from all network files +# title: the title to use for the imported scenario +# save_data_tables: if checked, create a data table for each reference file for viewing in the Emme Desktop +# data_table_name: prefix to use to identify all data tables +# overwrite: check to overwrite any existing data tables or scenarios with the same ID or name +# emmebank: the Emme database in which to create the scenario. Default is the current open database +# create_time_periods: if True (default), also create per-time period scenarios (required to run assignments) +# +# Files referenced: +# +# *.gdb: A Geodatabase file with the network data for both highway and transit. The following tables are used +# - TNED_HwyNet +# - TNED_HwyNodes +# - TNED_RailNet +# - TNED_RailNodes +# - trrt +# - trlink +# - Turns +# The following files are also used (in the same directory as the *.gdb) +# +# trstop.csv: stop data for the transit lines +# mode5tod.csv: global (per-mode) transit cost and perception attributes +# timexfer_.csv (optional): table of timed transfer pairs of lines, by period +# special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. +# off_peak_toll_factors.csv (optional): factors to calculate the toll for EA, MD, and EV periods from the OP toll input for specified facilities +# vehicle_class_toll_factors.csv (optional): factors to adjust the toll cost by facility name and class (DA, S2, S3, TRK_L, TRK_M, TRK_H) +# +# +# Script example: +""" + import os + modeller = inro.modeller.Modeller() + main_directory = os.path.dirname(os.path.dirname(modeller.desktop.project.path)) + source_file = os.path.join(main_directory, "input", "EMMEOutputs.gdb") + title = "Base 2012 scenario" + import_network = modeller.tool("sandag.import.import_network") + import_network(source_file, merged_scenario_id=100, title=title, + data_table_name="2012_base", overwrite=True) +""" + + +TOOLBOX_ORDER = 11 + + +import inro.modeller as _m +import inro.emme.datatable as _dt +import inro.emme.network as _network +from inro.emme.core.exception import Error as _NetworkError + +from itertools import izip as _izip +from collections import defaultdict as _defaultdict, OrderedDict +from contextlib import contextmanager as _context +import fiona as _fiona + +from math import ceil as _ceiling +from copy import deepcopy as _copy +import numpy as _np +import heapq as _heapq +import pandas as pd + +import traceback as _traceback +import os + +_join = os.path.join +_dir = os.path.dirname + + +gen_utils = _m.Modeller().module("sandag.utilities.general") +dem_utils = _m.Modeller().module("sandag.utilities.demand") + +FILE_NAMES = { + "FARES": "special_fares.txt", + "TIMEXFER": "timexfer_%s.csv", + "OFF_PEAK": "off_peak_toll_factors.csv", + "VEHICLE_CLASS": "vehicle_class_toll_factors.csv", + "MODE5TOD": "MODE5TOD.csv", +} + + +class ImportNetwork(_m.Tool(), gen_utils.Snapshot): + + source = _m.Attribute(unicode) + scenario_id = _m.Attribute(int) + overwrite = _m.Attribute(bool) + title = _m.Attribute(unicode) + save_data_tables = _m.Attribute(bool) + data_table_name = _m.Attribute(unicode) + create_time_periods = _m.Attribute(bool) + + tool_run_msg = "" + + @_m.method(return_type=_m.UnicodeType) + def tool_run_msg_status(self): + return self.tool_run_msg + + def __init__(self): + self._log = [] + self._error = [] + project_dir = _dir(_m.Modeller().desktop.project.path) + self.source = _join(_dir(project_dir), "input") + self.overwrite = False + self.title = "" + self.data_table_name = "" + self.create_time_periods = True + self.attributes = [ + "source", "scenario_id", "overwrite", "title", "save_data_tables", "data_table_name", "create_time_periods" + ] + + def page(self): + if not self.data_table_name: + try: + load_properties = _m.Modeller().tool('sandag.utilities.properties') + props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) + self.data_table_name = props["scenarioYear"] + except: + pass + + pb = _m.ToolPageBuilder(self) + pb.title = "Import network" + pb.description = """ +

      + Create an Emme network from TNED geodatabase (*.gdb) and associated files. +
      +
      + The following layers in the gdb are used: +
        +
      • TNED_HwyNet
      • +
      • TNED_HwyNodes
      • +
      • TNED_RailNet
      • +
      • TNED_RailNodes
      • +
      • trrt
      • +
      • trlink
      • +
      • Turns
      • +
      + The following files are also used (in the same directory as the *.gdb): +
        +
      • trstop.csv
      • +
      • mode5tod.csv
      • +
      • timexfer_.csv (optional)
      • +
      • special_fares.txt (optional)
      • +
      • off_peak_toll_factors.csv (optional)
      • +
      • vehicle_class_toll_factors.csv (optional)
      • +
      +
      + """ + pb.branding_text = "- SANDAG - Import" + + if self.tool_run_msg != "": + pb.tool_run_status(self.tool_run_msg_status) + + pb.add_select_file("source", window_type="directory", file_filter="", + title="Source gdb:",) + + pb.add_text_box("scenario_id", size=6, title="Scenario ID for imported network:") + pb.add_text_box("title", size=80, title="Scenario title:") + pb.add_checkbox("save_data_tables", title=" ", label="Save reference data tables of file data") + pb.add_text_box("data_table_name", size=80, title="Name for data tables:", + note="Prefix name to use for all saved data tables") + pb.add_checkbox("overwrite", title=" ", label="Overwrite existing scenarios and data tables") + pb.add_checkbox("create_time_periods", title=" ", label="Copy base scenario to all time periods and set modes (required for assignments)") + + return pb.render() + + def run(self): + self.tool_run_msg = "" + try: + self.emmebank = _m.Modeller().emmebank + with self.setup(): + self.execute() + run_msg = "Network import complete" + if self._error: + run_msg += " with %s non-fatal errors. See logbook for details" % len(self._error) + self.tool_run_msg = _m.PageBuilder.format_info(run_msg, escape=False) + except Exception as error: + self.tool_run_msg = _m.PageBuilder.format_exception( + error, _traceback.format_exc()) + raise + + def __call__(self, source, scenario_id, + title="", save_data_tables=False, data_table_name="", overwrite=False, + emmebank=None, create_time_periods=True): + + self.source = source + self.scenario_id = scenario_id + self.title = title + self.save_data_tables = save_data_tables + self.data_table_name = data_table_name + self.overwrite = overwrite + if not emmebank: + self.emmebank = _m.Modeller().emmebank + else: + self.emmebank = emmebank + self.create_time_periods = create_time_periods + + with self.setup(): + self.execute() + + return self.emmebank.scenario(scenario_id) + + @_context + def setup(self): + self._log = [] + self._error = [] + fatal_error = False + attributes = OrderedDict([ + ("self", str(self)), + ("source", self.source), + ("scenario_id", self.scenario_id), + ("title", self.title), + ("save_data_tables", self.save_data_tables), + ("data_table_name", self.data_table_name), + ("overwrite", self.overwrite), + ("create_time_periods", self.create_time_periods) + ]) + self._log = [{ + "content": attributes.items(), + "type": "table", "header": ["name", "value"], + "title": "Tool input values" + }] + with _m.logbook_trace("Import network", attributes=attributes) as trace: + gen_utils.log_snapshot("Import network", str(self), attributes) + load_properties = _m.Modeller().tool('sandag.utilities.properties') + self._props = load_properties(_join(_dir(_dir(self.source)), "conf", "sandag_abm.properties")) + try: + yield + except Exception as error: + self._log.append({"type": "text", "content": error}) + trace_text = _traceback.format_exc().replace("\n", "
      ") + self._log.append({"type": "text", "content": trace_text}) + self._error.append(error) + fatal_error = True + raise + finally: + self._props = None + self.log_report() + self._auto_mode_lookup = None + self._transit_mode_lookup = None + if self._error: + if fatal_error: + trace.write("Import network failed (%s errors)" % len(self._error), attributes=attributes) + else: + trace.write("Import network completed (%s non-fatal errors)" % len(self._error), attributes=attributes) + + def execute(self): + attr_map = { + "NODE": OrderedDict([ + ("HNODE", ("@hnode", "BOTH", "EXTRA", "HNODE label from TNED" )), + ("TAP", ("@tap_id", "BOTH", "EXTRA", "TAP number")), + ("PARK", ("@park", "BOTH", "EXTRA", "parking indicator" )), + ("STOPTYPE", ("@stoptype", "BOTH", "EXTRA", "stop type indicator" )), + ("ELEV", ("@elev", "BOTH", "EXTRA", "station/stop elevation in feet")), + ("interchange", ("@interchange", "DERIVED", "EXTRA", "is interchange node")), + ]), + "LINK": OrderedDict([ + ("HWYCOV0_ID",("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), + ("SPHERE", ("@sphere", "HWY_TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), + ("HWYSegGUID",("#hwyseg_guid", "TWO_WAY", "STRING", "HWYSegGUID")), + ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), + ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), + ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), + ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), + ("ASPD", ("@speed_adjusted", "HWY_TWO_WAY", "EXTRA", "Adjusted link speed (miles/hr)")), + ("YR", ("@year_open_traffic", "HWY_TWO_WAY", "EXTRA", "The year the link opened to traffic")), + ("PROJ", ("@project_code", "HWY_TWO_WAY", "EXTRA", "Project number for use with hwyproj.xls")), + ("FC", ("type", "TWO_WAY", "STANDARD", "")), + ("HOV", ("@hov", "TWO_WAY", "EXTRA", "Link operation type")), + ("MINMODE", ("@minmode", "TWO_WAY", "EXTRA", "Transit mode type")), + ("EATRUCK", ("@truck_ea", "HWY_TWO_WAY", "EXTRA", "Early AM truck restriction code ")), + ("AMTRUCK", ("@truck_am", "HWY_TWO_WAY", "EXTRA", "AM Peak truck restriction code ")), + ("MDTRUCK", ("@truck_md", "HWY_TWO_WAY", "EXTRA", "Mid-day truck restriction code ")), + ("PMTRUCK", ("@truck_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak truck restriction code ")), + ("EVTRUCK", ("@truck_ev", "HWY_TWO_WAY", "EXTRA", "Evening truck restriction code ")), + ("TOLLEA", ("@toll_ea", "HWY_TWO_WAY", "EXTRA", "Early AM toll cost (cent)")), + ("TOLLA", ("@toll_am", "HWY_TWO_WAY", "EXTRA", "AM Peak toll cost (cent)")), + ("TOLLMD", ("@toll_md", "HWY_TWO_WAY", "EXTRA", "Mid-day toll cost (cent)")), + ("TOLLP", ("@toll_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak toll cost (cent)")), + ("TOLLEV", ("@toll_ev", "HWY_TWO_WAY", "EXTRA", "Evening toll cost (cent)")), + + ("SPD", ("@speed_posted", "HWY_TWO_WAY", "EXTRA", "Posted speed limit (mph)")), + ("MED", ("@median", "TWO_WAY", "EXTRA", "Median type")), + ("AU", ("@lane_auxiliary", "HWY_ONE_WAY", "EXTRA", "Number of auxiliary lanes")), + ("CNT", ("@traffic_control", "HWY_ONE_WAY", "EXTRA", "Intersection control type")), + ("TL", ("@turn_thru", "HWY_ONE_WAY", "EXTRA", "Intersection approach through lanes")), + ("RL", ("@turn_right", "HWY_ONE_WAY", "EXTRA", "Intersection approach right-turn lanes")), + ("LL", ("@turn_left", "HWY_ONE_WAY", "EXTRA", "Intersection approach left-turn lanes")), + ("GC", ("@green_to_cycle_init", "HWY_ONE_WAY", "EXTRA", "Initial green-to-cycle ratio")), + ("WAY", ("way", "HWY_TWO_WAY", "INTERNAL", "")), + ("TRANSIT_MODES", ("transit_modes", "DERIVED", "INTERNAL", "")), + ("@cost_operating", ("@cost_operating", "DERIVED", "EXTRA", "Fuel and maintenance cost")), + ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), + ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), + + ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), + ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), + + ]), + "TRANSIT_LINE": OrderedDict([ + ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), + ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), + ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), + ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), + ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), + ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), + ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), + ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), + ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), + ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), + ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), + ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), + ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), + ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), + ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), + ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), + ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), + ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), + ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), + ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), + ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), + ]), + "TRANSIT_SEGMENT": OrderedDict([ + ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), + ("Pass_Count", ("@pass_count", "TRSTOP", "EXTRA", "Number of times this stop is passed")), + ("Milepost", ("@milepost", "TRSTOP", "EXTRA", "Distance from start of line")), + ("StopName", ("#stop_name", "TRSTOP", "STRING", "Name of stop")), + ("@coaster_fare_board", ("@coaster_fare_board", "DERIVED", "EXTRA", "Boarding fare for coaster")), + ("@coaster_fare_inveh", ("@coaster_fare_inveh", "DERIVED", "EXTRA", "Incremental fare for Coaster")), + ]) + } + + time_name = { + "_ea": "Early AM ", "_am": "AM Peak ", "_md": "Mid-day ", "_pm": "PM Peak ", "_ev": "Evening " + } + time_name_dst = ["_ea", "_am", "_md", "_pm", "_ev"] + time_name_src = ["EA", "A", "MD", "P", "EV"] + time_period_attrs = [ + ("CP", "@capacity_link", "mid-link capacity"), + ("CX", "@capacity_inter", "approach capacity"), + ("CH", "@capacity_hourly", "hourly mid-link capacity"), + ("LN", "@lane", "number of lanes"), + ("TM", "@time_link", "link time in minutes"), + ("TX", "@time_inter", "intersection delay time"), + ] + for src_attr, dst_attr, desc_tmplt in time_period_attrs: + for time_s, time_d in zip(time_name_src, time_name_dst): + attr_map["LINK"][src_attr + time_s] = \ + (dst_attr + time_d, "HWY_ONE_WAY", "EXTRA", time_name[time_d] + desc_tmplt) + derived_period_attrs = [ + ("@cost_auto", "toll + cost autos"), + ("@cost_hov2", "toll (non-mngd) + cost HOV2"), + ("@cost_hov3", "toll (non-mngd) + cost HOV3+"), + ("@cost_lgt_truck", "toll + cost light trucks"), + ("@cost_med_truck", "toll + cost medium trucks"), + ("@cost_hvy_truck", "toll + cost heavy trucks"), + ("@cycle", "cycle length (minutes)"), + ("@green_to_cycle", "green to cycle ratio"), + ("@sta_reliability", "static reliability") + ] + for attr, desc_tmplt in derived_period_attrs: + for time in time_name_dst: + attr_map["LINK"][attr + time] = \ + (attr + time, "DERIVED", "EXTRA", time_name[time] + desc_tmplt) + + create_scenario = _m.Modeller().tool( + "inro.emme.data.scenario.create_scenario") + + title = self.title + if not title: + existing_scenario = self.emmebank.scenario(self.scenario_id) + if existing_scenario: + title = existing_scenario.title + + scenario = create_scenario(self.scenario_id, title, overwrite=self.overwrite, emmebank=self.emmebank) + scenarios = [scenario] + if self.create_time_periods: + periods=["EA", "AM", "MD", "PM", "EV"] + period_ids = list(enumerate(periods, start=int(self.scenario_id) + 1)) + for ident, period in period_ids: + scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), + overwrite=self.overwrite, emmebank=self.emmebank)) + # create attributes in scenario + for elem_type, mapping in attr_map.iteritems(): + for name, _tcoved_type, emme_type, desc in mapping.values(): + if emme_type == "EXTRA": + for s in scenarios: + if not s.extra_attribute(name): + xatt = s.create_extra_attribute(elem_type, name) + xatt.description = desc + elif emme_type == "STRING": + for s in scenarios: + if not s.network_field(elem_type, name): + s.create_network_field(elem_type, name, 'STRING', description=desc) + + log_content = [] + for k, v in mapping.iteritems(): + if v[3] == "DERIVED": + k = "--" + log_content.append([k] + list(v)) + self._log.append({ + "content": log_content, + "type": "table", + "header": ["TNED", "Emme", "Source", "Type", "Description"], + "title": "Network %s attributes" % elem_type.lower().replace("_", " "), + "disclosure": True + }) + + network = _network.Network() + for elem_type, mapping in attr_map.iteritems(): + for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): + if emme_type == "STANDARD": + continue + default = "" if emme_type == "STRING" else 0 + network.create_attribute(elem_type, attr, default) + try: + self.create_modes(network) + self.create_road_base(network, attr_map) + self.create_turns(network) + self.calc_traffic_attributes(network) + self.check_zone_access(network, network.mode("d")) + self.create_rail_base(network, attr_map) + self.create_transit_lines(network, attr_map) + self.calc_transit_attributes(network) + finally: + # TAP connectors included in network, fix type setting and renumber node IDs + for link in network.links(): + if link.type <= 0: + link.type = 99 + self.renumber_base_nodes(network) + scenario.publish_network(network, resolve_attributes=True) + + self.set_functions(scenario) + self.check_connectivity(scenario) + + if "modify_network.py" in os.listdir(os.getcwd()): + try: + with _m.logbook_trace("Modify network script"): + import modify_network + reload(modify_network) + modify_network.run(base_scenario) + except ImportError as e: + pass + network = base_scenario.get_network() + network.create_attribute("LINK", "transit_modes") + + if self.create_time_periods: + for link in network.links(): + link.transit_modes = link.modes + for ident, period in period_ids: + self.set_auto_modes(network, period) + scenario = self.emmebank.scenario(ident) + scenario.publish_network(network, resolve_attributes=True) + + def create_modes(self, network): + # combined traffic and transit mode creation + mode_table = { + "AUTO": [("d", "dummy auto")], + "AUX_AUTO": [ + ("h", "SOV"), + ("H", "HOV2"), + ("i", "HOV3+"), + ("I", "TRKL"), + ("s", "TRKM"), + ("S", "TRKH"), + ("v", "SOV TOLL"), + ("V", "HOV2 TOLL"), + ("m", "HOV3+ TOLL"), + ("M", "TRKL TOLL"), + ("t", "TRKM TOLL"), + ("T", "TRKH TOLL"), + ], + "TRANSIT": [ + ("b", "BUS" ), # (vehicle type 100, PCE=3.0) + ("e", "EXP BUS"), # (vehicle type 90 , PCE=3.0) + ("p", "LTDEXP BUS"), # (vehicle type 80 , PCE=3.0) + ("l", "LRT"), # (vehicle type 50) + ("y", "BRT YEL"), # (vehicle type 60 , PCE=3.0) + ("r", "BRT RED"), # (vehicle type 70 , PCE=3.0) + ("c", "CMR"), # (vehicle type 40) + ("o", "TIER1"), # (vehicle type 45) + ], + "AUX_TRANSIT": [ + ("a", "ACCESS", 3), + ("x", "TRANSFER", 3), + ("w", "WALK", 3), + ("u", "ACCESS_WLK", 3), + ("k", "EGRESS_WLK", 3), + ("f", "ACCESS_PNR", 25), + ("g", "EGRESS_PNR", 25), + ("q", "ACCESS_KNR", 25), + ("j", "EGRESS_KNR", 25), + ("Q", "ACCESS_TNC", 25), + ("J", "EGRESS_TNC", 25), + ], + } + for mode_type, modes in mode_table.iteritems(): + for mode_info in modes: + mode = network.create_mode(mode_type, mode_info[0]) + mode.description = mode_info[1] + if len(mode_info) == 3: + mode.speed = mode_info[2] + self._transit_mode_lookup = { + 0: set([]), + 1: set([network.mode(m_id) for m_id in "x"]), # 1 = special transfer walk links between certain nearby stops + 2: set([network.mode(m_id) for m_id in "w"]), # 2 = walk links in the downtown area + 3: set([network.mode(m_id) for m_id in "a"]), # 3 = the special TAP connectors + 400: set([network.mode(m_id) for m_id in "c"]), # 4 = Coaster Rail Line + 500: set([network.mode(m_id) for m_id in "l"]), # 5 = Trolley & Light Rail Transit (LRT) + 600: set([network.mode(m_id) for m_id in "bpeyr"]), # 6 = Yellow Car Bus Rapid Transit (BRT) + 700: set([network.mode(m_id) for m_id in "bpeyr"]), # 7 = Red Car Bus Rapid Transit (BRT) + 800: set([network.mode(m_id) for m_id in "bpe"]), # 8 = Limited Express Bus + 900: set([network.mode(m_id) for m_id in "bpe"]), # 9 = Express Bus + 1000: set([network.mode(m_id) for m_id in "bpe"]), # 10 = Local Bus + 11: set([network.mode(m_id) for m_id in "u"]), # = access walk links + 12: set([network.mode(m_id) for m_id in "k"]), # = egress walk links + 13: set([network.mode(m_id) for m_id in "f"]), # = access PNR links + 14: set([network.mode(m_id) for m_id in "g"]), # = egress PNR links + 15: set([network.mode(m_id) for m_id in "q"]), # = access KNR links + 16: set([network.mode(m_id) for m_id in "j"]), # = egress KNR links + 17: set([network.mode(m_id) for m_id in "Q"]), # = access TNC links + 18: set([network.mode(m_id) for m_id in "J"]), # = egress TNC links + } + modes_gp_lanes = { + 0: set([]), + 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes + 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck + 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck + 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck + 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks + 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks + 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only + } + non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) + self._auto_mode_lookup = { + "GP": modes_gp_lanes, + "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), + "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), + "HOV3": set([network.mode(m_id) for m_id in "dim"]), + } + + def set_auto_modes(self, network, period): + # time periods + # need to update the modes from the XTRUCK for their time of day + # Note: only truck types 1, 3, 4, and 7 found in 2012 base network + truck = "@truck_%s" % period.lower() + toll = "@toll_%s" % period.lower() + lookup = self._auto_mode_lookup + for link in network.links(): + auto_modes = set([]) + if link.type == 10: # connector + auto_modes = lookup["GP"][link[truck]] + elif link.type in [11, 12]: + pass # no auto modes, rail only (11) or bus only (12) + elif link["@hov"] == 1: + auto_modes = lookup["GP"][link[truck]] + elif link["@hov"] in [2, 3]: + # managed lanes, free for HOV2 and HOV3+, tolls for SOV + if link[toll] > 0: + auto_modes = lookup["TOLL"][link[truck]] + # special case of I-15 managed lanes base year and 2020, no build + elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: + auto_modes = lookup["TOLL"][link[truck]] + elif link.type == 8 or link.type == 9: + auto_modes = lookup["TOLL"][link[truck]] + if link["@hov"] == 2: + auto_modes = auto_modes | lookup["HOV2"] + else: + auto_modes = auto_modes | lookup["HOV3"] + elif link["@hov"] == 4: + auto_modes = lookup["TOLL"][link[truck]] + link.modes = link.transit_modes | auto_modes + + def create_road_base(self, network, attr_map): + self._log.append({"type": "header", "content": "Import roadway base network from TNED_HwyNet %s" % self.source}) + hwy_data = gen_utils.DataTableProc("TNED_HwyNet", self.source) + # TEMP workaround: BN field is string + bn_index = hwy_data._attr_names.index("BN") + hwy_data._values[bn_index] = hwy_data._values[bn_index].astype(int) + + if self.save_data_tables: + hwy_data.save("%s_TNED_HwyNet" % self.data_table_name, self.overwrite) + + is_centroid = lambda arc, node : (arc["FC"] == 10) and (node == "AN") + link_attr_map = {} + for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): + if tcoved_type in ("TWO_WAY", "HWY_TWO_WAY", "ONE_WAY", "HWY_ONE_WAY"): + link_attr_map[field] = (name, tcoved_type.replace("HWY_", ""), emme_type, desc) + + def define_modes(arc): + if arc["FC"] in [11, 12] or arc["ABLNA"] == 0: #or ((arc["HOV"] < 1 or arc["HOV"] > 4) and arc["FC"] != 10): + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] + return [network.mode('d')] + + self._create_base_net( + hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map) + + hwy_node_data = gen_utils.DataTableProc("TNED_HwyNodes", self.source) + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + if v[1] in ("BOTH", "HWY")] + for record in hwy_node_data: + node = network.node(record["HNODE"]) + if node: + for src, dst in node_attrs: + node[dst] = record[src] + else: + self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) + self._log.append({"type": "text", "content": "Import traffic base network complete"}) + + def create_rail_base(self, network, attr_map): + self._log.append({"type": "header", "content": "Import rail base network from TNED_RailNet %s" % self.source}) + transit_data = gen_utils.DataTableProc("TNED_RailNet", self.source) + + if self.save_data_tables: + transit_data.save("%s_TNED_RailNet" % self.data_table_name, self.overwrite) + + link_attr_map = {} + for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): + if tcoved_type in ("TWO_WAY", "RAIL_TWO_WAY", "ONE_WAY", "RAIL_ONE_WAY"): + link_attr_map[field] = (name, tcoved_type.replace("RAIL_", ""), emme_type, desc) + + tier1_modes = set([network.mode(m_id) for m_id in "o"]) + tier1_rail_link_name = self._props["transit.newMode"] + + def define_modes(arc): + if arc["NM"] == tier1_rail_link_name: + return tier1_modes + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] + + self._create_base_net( + transit_data, network, mode_callback=define_modes, link_attr_map=link_attr_map) + + transit_node_data = gen_utils.DataTableProc("TNED_RailNodes", self.source) + # Load PARK, elevation, stop type data onto transit nodes + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + if v[1] in ("BOTH", "RAIL")] + for record in transit_node_data: + node = network.node(record["HNODE"]) + if node: + for src, dst in node_attrs: + node[dst] = record[src] + else: + self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) + + self._log.append({"type": "text", "content": "Import transit base network complete"}) + + def _create_base_net(self, data, network, link_attr_map, mode_callback, centroid_callback=None): + forward_attr_map = {} + reverse_attr_map = {} + arc_id_name = "HWYCOV0_ID" + arc_guid_name = "HWYSegGUID" + for field, (name, tcoved_type, emme_type, desc) in link_attr_map.iteritems(): + if field in [arc_id_name, arc_guid_name, "DIR"]: + # these attributes are special cases for reverse link + forward_attr_map[field] = name + elif tcoved_type in "TWO_WAY": + forward_attr_map[field] = name + reverse_attr_map[field] = name + elif tcoved_type in "ONE_WAY": + forward_attr_map["AB" + field] = name + reverse_attr_map["BA" + field] = name + + emme_id_name = forward_attr_map[arc_id_name] + emme_guid_name = forward_attr_map[arc_guid_name] + dir_name = forward_attr_map["DIR"] + reverse_dir_map = {1: 3, 3: 1, 2: 4, 4: 2, 0: 0} + new_node_id = max(data.values("AN").max(), data.values("BN").max()) + 1 + + if centroid_callback is None: + centroid_callback = lambda a,n: False + + # Create nodes and links + for arc in data: + if float(arc["AN"]) == 0 or float(arc["BN"]) == 0: + self._log.append({"type": "text", + "content": "Node ID 0 in AN (%s) or BN (%s) for link GUID/ID %s/%s." % + (arc["AN"], arc["BN"], arc[arc_guid_name], arc[arc_id_name])}) + continue + coordinates = arc["geo_coordinates"] + i_node = get_node(network, arc['AN'], coordinates[0], centroid_callback(arc, "AN")) + j_node = get_node(network, arc['BN'], coordinates[-1], centroid_callback(arc, "BN")) + link = network.link(i_node, j_node) + if link: + msg = "Duplicate link between AN %s and BN %s. Link GUID/IDs %s/%s and %s/%s." % \ + (arc["AN"], arc["BN"], link[emme_guid_name], link[emme_id_name], arc[arc_guid_name], arc[arc_id_name]) + self._log.append({"type": "text", "content": msg}) + if link[emme_guid_name] == arc[arc_guid_name]: + self._log.append({"type": "text", "content": "... but GUIDs match (not an error)"}) + else: + self._error.append(msg) + else: + modes = mode_callback(arc) + link = network.create_link(i_node, j_node, modes) + link.length = arc["LENGTH"] + if len(coordinates) > 2: + link.vertices = coordinates[1:-1] + for field, attr in forward_attr_map.iteritems(): + link[attr] = arc[field] + if arc["WAY"] == 2 or arc["WAY"] == 0: + reverse_link = network.link(j_node, i_node) + if not reverse_link: + reverse_link = network.create_link(j_node, i_node, modes) + reverse_link.length = link.length + reverse_link.vertices = list(reversed(link.vertices)) + for field, attr in reverse_attr_map.iteritems(): + reverse_link[attr] = arc[field] + reverse_link[emme_id_name] = -1*arc[arc_id_name] + reverse_link[emme_guid_name] = "-" + arc[arc_guid_name] + reverse_link[dir_name] = reverse_dir_map[arc["DIR"]] + + def create_transit_lines(self, network, attr_map): + self._log.append({"type": "header", "content": "Import transit lines"}) + fatal_errors = 0 + # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare + transit_line_data = gen_utils.DataTableProc("trrt", self.source) + # Route_ID,Link_ID,Link_GUID,Direction + transit_link_data = gen_utils.DataTableProc("trlink", self.source) + # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName + #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) + transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) + # From_line,To_line,Board_stop,Wait_time + # Note: Board_stop is not used + # Timed xfer data + periods = ['EA', 'AM', 'MD', 'PM', 'EV'] + timed_xfer_data = {} + for period in periods: + file_path = _join(_dir(self.source), FILE_NAMES["TIMEXFER"] % period) + if os.path.exists(file_path): + timed_xfer_data[period] = gen_utils.DataTableProc("timexfer_"+period, file_path) + else: + timed_xfer_data[period] = [] + + mode_properties = gen_utils.DataTableProc("MODE5TOD", _join(_dir(self.source), FILE_NAMES["MODE5TOD"]), convert_numeric=True) + mode_details = {} + for record in mode_properties: + mode_details[int(record["MODE_ID"])] = record + + if self.save_data_tables: + transit_link_data.save("%s_trlink" % self.data_table_name, self.overwrite) + transit_line_data.save("%s_trrt" % self.data_table_name, self.overwrite) + transit_stop_data.save("%s_trstop" % self.data_table_name, self.overwrite) + mode_properties.save("%s_MODE5TOD" % self.data_table_name, self.overwrite) + + coaster = network.create_transit_vehicle(40, 'c') # 4 coaster + trolley = network.create_transit_vehicle(50, 'l') # 5 sprinter/trolley + brt_yellow = network.create_transit_vehicle(60, 'y') # 6 BRT yellow line (future line) + brt_red = network.create_transit_vehicle(70, 'r') # 7 BRT red line (future line) + premium_bus = network.create_transit_vehicle(80, 'p') # 8 prem express + express_bus = network.create_transit_vehicle(90, 'e') # 9 regular express + local_bus = network.create_transit_vehicle(100, 'b') # 10 local bus + tier1 = network.create_transit_vehicle(45, 'o') # 11 Tier 1 + + brt_yellow.auto_equivalent = 3.0 + brt_red.auto_equivalent = 3.0 + premium_bus.auto_equivalent = 3.0 + express_bus.auto_equivalent = 3.0 + local_bus.auto_equivalent = 3.0 + + # Capacities - for reference / post-assignment analysis + tier1.seated_capacity, tier1.total_capacity = 7 * 142, 7 * 276 + trolley.seated_capacity, trolley.total_capacity = 4 * 64, 4 * 200 + brt_yellow.seated_capacity, brt_yellow.total_capacity = 32, 70 + brt_red.seated_capacity, brt_red.total_capacity = 32, 70 + premium_bus.seated_capacity, premium_bus.total_capacity = 32, 70 + express_bus.seated_capacity, express_bus.total_capacity = 32, 70 + local_bus.seated_capacity, local_bus.total_capacity = 32, 70 + + trrt_attrs = [] + mode5tod_attrs = [] + for elem_type in "TRANSIT_LINE", "TRANSIT_SEGMENT": + mapping = attr_map[elem_type] + for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): + if tcoved_type == "TRRT": + trrt_attrs.append((field, attr)) + elif tcoved_type == "MODE5TOD": + mode5tod_attrs.append((field, attr)) + network.create_attribute("TRANSIT_SEGMENT", "milepost") + + # Pre-process transit line (trrt) to know the route names for errors / warnings + transit_line_records = list(transit_line_data) + line_names = {} + for record in transit_line_records: + line_names[int(record["Route_ID"])] = str(record["Route_Name"]) + + links = dict((link["#hwyseg_guid"], link) for link in network.links()) + transit_routes = _defaultdict(lambda: []) + for record in transit_link_data: + line_ref = line_names.get(int(record["Route_ID"]), record["Route_ID"]) + link_id = record["Link_GUID"] + if "-" in record["Direction"]: + link_id = "-" + link_id + link = links.get(link_id) + if not link: + if "-" in record["Direction"]: + reverse_link = links.get("-" + link_id) + else: + reverse_link = links.get(link_id[1:]) + if reverse_link: + link = network.create_link(reverse_link.j_node, reverse_link.i_node, reverse_link.modes) + link.vertices = list(reversed(reverse_link.vertices)) + for attr in network.attributes("LINK"): + if attr not in set(["vertices"]): + link[attr] = reverse_link[attr] + link["@tcov_id"] = -1 * reverse_link["@tcov_id"] + link["#hwyseg_guid"] = link_id + links[link_id] = link + msg = "Transit line %s : Missing reverse link with ID %s (%s) (reverse link created)" % ( + line_ref, record["Link_GUID"], link) + self._log.append({"type": "text", "content": msg}) + self._error.append("Transit route import: " + msg) + link = reverse_link + if not link: + msg = "Transit line %s : No link with GUID %s, routing may not be correct" % ( + line_ref, record["Link_GUID"]) + self._log.append({"type": "text", "content": msg}) + self._error.append("Transit route import: " + msg) + fatal_errors += 1 + continue + + transit_routes[int(record["Route_ID"])].append(link) + + # lookup list of special tier 1 mode route names + tier1_rail_route_names = [str(n) for n in self._props["transit.newMode.route"]] + dummy_links = set([]) + transit_lines = {} + auto_mode = network.mode("d") + for record in transit_line_records: + try: + route = transit_routes[int(record["Route_ID"])] + # Find if name matches one of the names listed in transit.newMode.route and convert to tier 1 rail + is_tier1_rail = False + for name in tier1_rail_route_names: + if str(record["Route_Name"]).startswith(name): + is_tier1_rail = True + break + if is_tier1_rail: + vehicle_type = 45 + mode = network.transit_vehicle(vehicle_type).mode + else: + vehicle_type = int(record["Mode"]) * 10 + mode = network.transit_vehicle(vehicle_type).mode + prev_link = route[0] + itinerary = [prev_link] + for link in route[1:]: + if prev_link.j_node != link.i_node: # filling in the missing gap + msg = "Transit line %s (index %s): Links not adjacent, shortest path interpolation used (%s and %s)" % ( + record["Route_Name"], record["Route_ID"], prev_link["#hwyseg_guid"], link["#hwyseg_guid"]) + log_record = {"type": "text", "content": msg} + self._log.append(log_record) + sub_path = find_path(prev_link, link, mode) + itinerary.extend(sub_path) + log_record["content"] = log_record["content"] + " through %s links" % (len(sub_path)) + itinerary.append(link) + prev_link = link + + node_itinerary = [itinerary[0].i_node] + [l.j_node for l in itinerary] + missing_mode = 0 + for link in itinerary: + if mode not in link.modes: + link.modes |= set([mode]) + missing_mode += 1 + if missing_mode: + msg = "Transit line %s (index %s): missing mode added to %s link(s)" % ( + str(record["Route_Name"]), record["Route_ID"], missing_mode) + self._log.append({"type": "text", "content": msg}) + tline = network.create_transit_line( + str(record["Route_Name"]), vehicle_type, node_itinerary) + + for field, attr in trrt_attrs: + tline[attr] = float(record[field]) + if is_tier1_rail: + line_details = mode_details[11] + else: + line_details = mode_details[int(record["Mode"])] + for field, attr in mode5tod_attrs: + tline[attr] = float(line_details[field]) + #"XFERPENTM": "Transfer penalty time: " + #"WTXFERTM": "Transfer perception:" + # NOTE: an additional transfer penalty perception factor of 5.0 is included + # in assignment + tline["@transfer_penalty"] = float(line_details["XFERPENTM"]) * float(line_details["WTXFERTM"]) + tline.headway = tline["@headway_am"] if tline["@headway_am"] > 0 else 999 + tline.layover_time = 5 + + transit_lines[int(record["Route_ID"])] = tline + milepost = 0 + for segment in tline.segments(): + segment.milepost = milepost + milepost += segment.link.length + segment.allow_boardings = False + segment.allow_alightings = False + if auto_mode in segment.link.modes: + # segments on links with auto mode are ft1 = timau + segment.transit_time_func = 1 + else: + # ft2 = ul2 -> copied @trtime (fixed speed) + segment.transit_time_func = 2 + except Exception as error: + msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) + self._log.append({"type": "text", "content": msg}) + trace_text = _traceback.format_exc().replace("\n", "
      ") + self._log.append({"type": "text", "content": trace_text}) + self._error.append("Transit route import: line %s not created" % record["Route_Name"]) + fatal_errors += 1 + for link in dummy_links: + network.delete_link(link.i_node, link.j_node) + + line_stops = _defaultdict(lambda: []) + for record in transit_stop_data: + try: + line_name = line_names[int(record["Route_ID"])] + line_stops[line_name].append(record) + except KeyError: + self._log.append( + {"type": "text", + "content": "Stop %s: could not find transit line by ID %s (link GUID %s)" % ( + record["Stop_ID"], record["Route_ID"], record["Link_GUID"])}) + for stops in line_stops.itervalues(): + stops.sort(key=lambda stop: float(stop["Milepost"])) + + seg_float_attr_map = [] + seg_string_attr_map = [] + for field, (attr, t_type, e_type, desc) in attr_map["TRANSIT_SEGMENT"].iteritems(): + if t_type == "TRSTOP": + if e_type == "STRING": + seg_string_attr_map.append([field, attr]) + else: + seg_float_attr_map.append([field, attr]) + + for line_name, stops in line_stops.iteritems(): + tline = network.transit_line(line_name) + if not tline: + continue + itinerary = tline.segments(include_hidden=True) + segment = prev_segment = itinerary.next() + for stop in stops: + if "DUMMY" in stop["StopName"]: + continue + stop_link_id = stop['Link_GUID'] + node_id = int(stop['Node']) + while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: + segment = itinerary.next() + + if node_id == segment.i_node.number: + pass + elif segment.j_node and node_id == segment.j_node.number: + # if matches the J-node then the stop is on the next segment + segment = itinerary.next() + else: + if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: + msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( + line_name, stop["Route_ID"], segment, stop_link_id, node_id) + else: + msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( + line_name, stop["Route_ID"], stop_link_id, node_id) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + # reset iterator to start back from previous segment + itinerary = tline.segments(include_hidden=True) + segment = itinerary.next() + while segment.id != prev_segment.id: + segment = itinerary.next() + continue + segment.allow_boardings = True + segment.allow_alightings = True + segment.dwell_time = min(tline.default_dwell_time, 99.99) + for field, attr in seg_string_attr_map: + segment[attr] = stop[field] + for field, attr in seg_float_attr_map: + segment[attr] = float(stop[field]) + prev_segment = segment + + def lookup_line(ident): + line = network.transit_line(ident) + if line: + return line.id + line = transit_lines.get(int(ident)) + if line: + return line.id + raise Exception("'%s' is not a route name or route ID" % ident) + + # Normalizing the case of the headers as different examples have been seen + for period, data in timed_xfer_data.iteritems(): + norm_data = [] + for record in data: + norm_record = {} + for key, val in record.iteritems(): + norm_record[key.lower()] = val + norm_data.append(norm_record) + + from_line, to_line, wait_time = [], [], [] + for i, record in enumerate(norm_data, start=2): + try: + from_line.append(lookup_line(record["from_line"])) + to_line.append(lookup_line(record["to_line"])) + wait_time.append(float(record["wait_time"])) + except Exception as error: + msg = "Error processing timexfer_%s.csv on file line %s: %s" % (period, i, error) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + + timed_xfer = _dt.Data() + timed_xfer.add_attribute(_dt.Attribute("from_line", _np.array(from_line).astype("O"))) + timed_xfer.add_attribute(_dt.Attribute("to_line", _np.array(to_line).astype("O"))) + timed_xfer.add_attribute(_dt.Attribute("wait_time", _np.array(wait_time))) + # Creates and saves the new table + gen_utils.DataTableProc("%s_timed_xfer_%s" % (self.data_table_name, period), data=timed_xfer) + + if fatal_errors > 0: + raise Exception("Import of transit lines: %s fatal errors found" % fatal_errors) + self._log.append({"type": "text", "content": "Import transit lines complete"}) + + def calc_transit_attributes(self, network): + # for link in network.links(): + # if link.type == 0: # walk only links have FC ==0 + # link.type = 99 + + fares_file_name = FILE_NAMES["FARES"] + special_fare_path = _join(self.source, fares_file_name) + if not os.path.isfile(special_fare_path): + return + + # ON TRANSIT LINES + # Set 3-period headway based on revised headway calculation + for line in network.transit_lines(): + for period in ["ea", "am", "md", "pm", "ev"]: + line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) + + def get_line(line_id): + line = network.transit_line(line_id) + if line is None: + raise Exception("%s: line does not exist: %s" % (fares_file_name, line_id)) + return line + + # Special incremental boarding and in-vehicle fares + # to recreate the coaster zone fares + self._log.append({"type": "header", "content": "Apply special_fares to transit lines"}) + with open(special_fare_path) as fare_file: + self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) + special_fares = None + yaml_installed = True + try: + import yaml + special_fares = yaml.load(fare_file) + self._log.append({"type": "text", "content": yaml.dump(special_fares).replace("\n", "
      ")}) + except ImportError: + yaml_installed = False + except: + pass + if special_fares is None: + try: + import json + special_fares = json.load(fare_file) + self._log.append({"type": "text", "content": json.dumps(special_fares, indent=4).replace("\n", "
      ")}) + except: + pass + if special_fares is None: + msg = "YAML or JSON" if yaml_installed else "JSON (YAML parser not installed)" + raise Exception(fares_file_name + ": file could not be parsed as " + msg) + + + for record in special_fares["boarding_cost"]["base"]: + line = get_line(record["line"]) + line["@fare"] = 0 + for seg in line.segments(): + seg["@coaster_fare_board"] = record["cost"] + for record in special_fares["boarding_cost"].get("stop_increment", []): + line = get_line(record["line"]) + for seg in line.segments(True): + if record["stop"] in seg["#stop_name"]: + seg["@coaster_fare_board"] += record["cost"] + break + for record in special_fares["in_vehicle_cost"]: + line = get_line(record["line"]) + for seg in line.segments(True): + if record["from"] in seg["#stop_name"]: + seg["@coaster_fare_inveh"] = record["cost"] + break + pass_cost_keys = ['day_pass', 'regional_pass'] + pass_costs = [] + for key in pass_cost_keys: + cost = special_fares.get(key) + if cost is None: + raise Exception("key '%s' missing from %s" % (key, fares_file_name)) + pass_costs.append(cost) + pass_values = _dt.Data() + pass_values.add_attribute(_dt.Attribute("pass_type", _np.array(pass_cost_keys).astype("O"))) + pass_values.add_attribute(_dt.Attribute("cost", _np.array(pass_costs).astype("f8"))) + gen_utils.DataTableProc("%s_transit_passes" % self.data_table_name, data=pass_values) + self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) + + def renumber_base_nodes(self, network): + tracker = gen_utils.AvailableNodeIDTracker(network) + nodes = [n for n in network.nodes() if n.number > 999999] + nodes = sorted(nodes, key=lambda x: x.number, reverse=True) + if nodes: + self._log.append({"type": "text", "content": "Renumbered %s nodes" % len(nodes)}) + for n in nodes: + old_number = n.number + n.number = tracker.get_id() + self._log.append({"type": "text", "content": " - renumbered %s to %s " % (old_number, n.number)}) + + def create_turns(self, network): + self._log.append({"type": "header", "content": "Import turns and turn restrictions"}) + self._log.append({"type": "text", "content": "Process turns for turn prohibited by ID"}) + turn_data = gen_utils.DataTableProc("Turns", self.source) + if self.save_data_tables: + turn_data.save("%s_turns" % self.data_table_name, self.overwrite) + # Process turns.csv for prohibited turns penalty + for i, record in enumerate(turn_data): + from_node_id, to_node_id, at_node_id = record["FromNode"], record["ToNode"], record["MidNode"] + at_node = network.node(at_node_id) + if at_node and not at_node.is_intersection: + try: + network.create_intersection(at_node) + except Exception as error: + text = ("record %s turn from %s, at %s, to %s: cannot create intersection" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + trace_text = _traceback.format_exc().replace("\n", "
      ") + self._log.append({"type": "text", "content": trace_text}) + self._error.append(text) + continue + turn = network.turn(from_node_id, at_node_id, to_node_id) + if at_node is None: + text = ("record %s turn from %s, at %s, to %s: at node does not exist" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + self._error.append(text) + elif turn is None: + text = ("record %s turn from %s, at %s, to %s: does not form a turn" % + (i, from_node_id, at_node_id, to_node_id)) + self._log.append({"type": "text", "content": text}) + self._error.append(text) + else: + turn.penalty_func = 0 # prohibit turn + # NOTE: could support penalty value + # turn.penalty_func = 1 + # turn.data1 = float(record["penalty"]) + self._log.append({"type": "text", "content": "Import turns and turn prohibitions complete"}) + + def calc_traffic_attributes(self, network): + self._log.append({"type": "header", "content": "Calculate derived traffic attributes"}) + # "COST": "@cost_operating" + # "ITOLL": "@toll_flag" # ITOLL - Toll + 100 *[0,1] if managed lane (I-15 tolls) + # Note: toll_flag is no longer used + # "ITOLL2": "@toll" # ITOLL2 - Toll + # "ITOLL3": "@cost_auto" # ITOLL3 - Toll + AOC + # "@cost_hov" + # "ITOLL4": "@cost_med_truck" # ITOLL4 - Toll * 1.03 + AOC + # "ITOLL5": "@cost_hvy_truck" # ITOLL5 - Toll * 2.33 + AOC + fatal_errors = 0 + try: + aoc = float(self._props["aoc.fuel"]) + float(self._props["aoc.maintenance"]) + except ValueError: + raise Exception("Error during float conversion for aoc.fuel or aoc.maintenance from sandag_abm.properties file") + scenario_year = int(self._props["scenarioYear"]) + periods = ["EA", "AM", "MD", "PM", "EV"] + time_periods = ["_ea", "_am", "_md", "_pm", "_ev"] + src_time_periods = ["_op", "_am", "_op", "_pm", "_op"] + mode_d = network.mode('d') + + # Calculate upstream and downstream interchange distance + # First, label the intersection nodes as nodes with type 1 links (freeway) and + # type 8 links (freeway-to-freeway ramp) + network.create_attribute("NODE", "is_interchange") + interchange_points = [] + for node in network.nodes(): + adj_links = list(node.incoming_links()) + list(node.outgoing_links()) + has_freeway_links = bool( + [l for l in adj_links + if l.type == 1 and mode_d in l.modes]) + has_ramp_links = bool( + [l for l in adj_links + if l.type == 8 and mode_d in l.modes and not "HOV" in l["#name"]]) + if has_freeway_links and has_ramp_links: + node.is_interchange = True + interchange_points.append(node) + else: + node.is_interchange = False + for node in network.nodes(): + node["@interchange"] = node.is_interchange + + for link in network.links(): + if link.type == 1 and mode_d in link.modes: + link["@intdist_down"] = interchange_distance(link, "DOWNSTREAM") + link["@intdist_up"] = interchange_distance(link, "UPSTREAM") + self._log.append({"type": "text", "content": "Calculate of nearest interchange distance complete"}) + + # Static reliability parameters + # freeway coefficients + freeway_rel = { + "intercept": 0.1078, + "speed>70": 0.01393, + "upstream": 0.011, + "downstream": 0.0005445, + } + # arterial/ramp/other coefficients + road_rel = { + "intercept": 0.0546552, + "lanes": { + 1: 0.0, + 2: 0.0103589, + 3: 0.0361211, + 4: 0.0446958, + 5: 0.0 + }, + "speed": { + "<35": 0, + 35: 0.0075674, + 40: 0.0091012, + 45: 0.0080996, + 50: -0.0022938, + ">50": -0.0046211 + }, + "control": { + 0: 0, # Uncontrolled + 1: 0.0030973, # Signal + 2: -0.0063281, # Stop + 3: -0.0063281, # Stop + 4: 0.0127692, # Other, Railway, etc. + } + } + for link in network.links(): + # Change SR125 toll speed to 70MPH + if link["@hov"] == 4 and link.type == 1: + link["@speed_posted"] = 70 + link["@cost_operating"] = link.length * aoc + for time in time_periods: + # add link delay (30 sec=0.5mins) to HOV connectors to discourage travel + if link.type == 8 and (link["@hov"] == 2 or link["@hov"] == 3): + link["@time_link" + time] = link["@time_link" + time] + 0.375 + + # make speed on HOV lanes (70mph) the same as parallel GP lanes (65mph) + # - set speed back to posted speed - increase travel time by (speed_adj/speed_posted) + if link.type == 1 and (link["@hov"] == 2 or link["@hov"] == 3): + speed_adj = link["@speed_adjusted"] + speed_posted = link["@speed_posted"] + if speed_adj>0: + link["@time_link" + time] = (speed_adj/(speed_posted*1.0)) * link["@time_link" + time] + + # Required file + vehicle_class_factor_file = FILE_NAMES["VEHICLE_CLASS"] + facility_factors = _defaultdict(lambda: {}) + facility_factors["DEFAULT_FACTORS"] = { + "ALL": { + "auto": 1.0, + "hov2": 1.0, + "hov3": 1.0, + "lgt_truck": 1.0, + "med_truck": 1.03, + "hvy_truck": 2.03 + }, + "count": 0 + } + if os.path.exists(_join(self.source, vehicle_class_factor_file)): + msg = "Adjusting tolls based on factors from %s" % vehicle_class_factor_file + self._log.append({"type": "text", "content": msg}) + # NOTE: CSV Reader sets the field names to UPPERCASE for consistency + with gen_utils.CSVReader(_join(self.source, vehicle_class_factor_file)) as r: + for row in r: + if "YEAR" in r.fields and int(row["YEAR"]) != scenario_year: # optional year column + continue + name = row["FACILITY_NAME"] + # optional time-of-day entry, default to ALL if no column or blank + fac_time = row.get("TIME_OF_DAY") + if fac_time is None: + fac_time = "ALL" + facility_factors[name][fac_time] = { + "auto": float(row["DA_FACTOR"]), + "hov2": float(row["S2_FACTOR"]), + "hov3": float(row["S3_FACTOR"]), + "lgt_truck": float(row["TRK_L_FACTOR"]), + "med_truck": float(row["TRK_M_FACTOR"]), + "hvy_truck": float(row["TRK_H_FACTOR"]) + } + facility_factors[name]["count"] = 0 + + # validate ToD entry, either list EA, AM, MD, PM and EV, or ALL, but not both + for name, factors in facility_factors.iteritems(): + # default keys should be "ALL" and "count" + if "ALL" in factors: + if len(factors) > 2: + fatal_errors += 1 + msg = ("Individual time periods and 'ALL' (or blank) listed under " + "TIME_OF_DAY column in {} for facility {}").format(vehicle_class_factor_file, name) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + elif set(periods + ["count"]) != set(factors.keys()): + fatal_errors += 1 + msg = ("Missing time periods {} under TIME_OF_DAY column in {} for facility {}").format( + (set(periods) - set(factors.keys())), vehicle_class_factor_file, name) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + + def lookup_link_name(link): + for attr_name in ["#name", "#name_from", "#name_to"]: + for name, _factors in facility_factors.iteritems(): + if name in link[attr_name]: + return _factors + return facility_factors["DEFAULT_FACTORS"] + + def match_facility_factors(link): + factors = lookup_link_name(link) + factors["count"] += 1 + factors = _copy(factors) + del factors["count"] + # @hov = 2 or 3 overrides hov2 and hov3 costs + if link["@hov"] == 2: + for _, time_factors in factors.iteritems(): + time_factors["hov2"] = 0.0 + time_factors["hov3"] = 0.0 + elif link["@hov"] == 3: + for _, time_factors in factors.iteritems(): + time_factors["hov3"] = 0.0 + return factors + + vehicle_classes = ["auto", "hov2", "hov3", "lgt_truck", "med_truck", "hvy_truck"] + for link in network.links(): + if sum(link["@toll" + time] for time in time_periods) > 0: + factors = match_facility_factors(link) + for time, period in zip(time_periods, periods): + time_factors = factors.get(period, factors.get("ALL")) + for name in vehicle_classes: + link["@cost_" + name + time] = time_factors[name] * link["@toll" + time] + link["@cost_operating"] + else: + for time in time_periods: + for name in vehicle_classes: + link["@cost_" + name + time] = link["@cost_operating"] + for name, class_factors in facility_factors.iteritems(): + msg = "Facility name '%s' matched to %s links." % (name, class_factors["count"]) + self._log.append({"type": "text2", "content": msg}) + + self._log.append({ + "type": "text", + "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) + + # calculate static reliability + for link in network.links(): + for time in time_periods: + sta_reliability = "@sta_reliability" + time + # if freeway apply freeway parameters to this link + if link["type"] == 1 and link["@lane" + time] > 0: + high_speed_factor = freeway_rel["speed>70"] if link["@speed_posted"] >= 70 else 0.0 + upstream_factor = freeway_rel["upstream"] * 1 / link["@intdist_up"] + downstream_factor = freeway_rel["downstream"] * 1 / link["@intdist_down"] + link[sta_reliability] = ( + freeway_rel["intercept"] + high_speed_factor + upstream_factor + downstream_factor) + # arterial/ramp/other apply road parameters + elif link["type"] <= 9 and link["@lane" + time] > 0: + lane_factor = road_rel["lanes"].get(link["@lane" + time], 0.0) + speed_bin = int(link["@speed_posted"] / 5) * 5 # truncate to multiple of 5 + if speed_bin < 35: + speed_bin = "<35" + elif speed_bin > 50: + speed_bin = ">50" + speed_factor = road_rel["speed"][speed_bin] + control_bin = min(max(link["@traffic_control"], 0), 4) + control_factor = road_rel["control"][control_bin] + link[sta_reliability] = road_rel["intercept"] + lane_factor + speed_factor + control_factor + else: + link[sta_reliability] = 0.0 + self._log.append({"type": "text", "content": "Calculate of link static reliability factors complete"}) + + # Cycle length matrix + # Intersecting Link + # Approach Link 2 3 4 5 6 7 8 9 + # FC Description + # 2 Prime Arterial 2.5 2 2 2 2 2 2 2 + # 3 Major Arterial 2 2 2 2 2 2 2 2 + # 4 Collector 2 2 1.5 1.5 1.5 1.5 1.5 1.5 + # 5 Local Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 6 Rural Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 7 Local Road 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 8 Freeway connector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + # 9 Local Ramp 2 2 1.5 1.25 1.25 1.25 1.25 1.25 + + # Volume-delay functions + # fd10: freeway node approach + # fd11: non-intersection node approach + # fd20: cycle length 1.25 + # fd21: cycle length 1.5 + # fd22: cycle length 2.0 + # fd23: cycle length 2.5 + # fd24: cycle length 2.5 and metered ramp + # fd25: freeway node approach AM and PM only + network.create_attribute("LINK", "green_to_cycle") + network.create_attribute("LINK", "cycle") + vdf_cycle_map = {1.25: 20, 1.5: 21, 2.0: 22, 2.5: 23} + for node in network.nodes(): + incoming = list(node.incoming_links()) + outgoing = list(node.outgoing_links()) + is_signal = False + for link in incoming: + if link["@green_to_cycle_init"] > 0: + is_signal = True + break + if is_signal: + lcs = [link.type for link in incoming + outgoing] + min_lc = max(lcs) # Note: minimum class is actually the HIGHEST value, + max_lc = min(lcs) # and maximum is the LOWEST + + for link in incoming: + # Metered ramps + if link["@traffic_control"] in [4, 5]: + link["cycle"] = 2.5 + link["green_to_cycle"] = 0.42 + link.volume_delay_func = 24 + # Stops + elif link["@traffic_control"] in [2, 3]: + link["cycle"] = 1.25 + link["green_to_cycle"] = 0.42 + link.volume_delay_func = 20 + elif link["@green_to_cycle_init"] > 0 and is_signal: + if link.type == 2: + c_len = 2.5 if min_lc == 2 else 2.0 + elif link.type == 3: + c_len = 2.0 # Major arterial & anything + elif link.type == 4: + c_len = 1.5 if max_lc > 2 else 2.0 + elif link.type > 4: + if max_lc > 4: + c_len = 1.25 + elif max_lc == 4: + c_len = 1.5 + else: + c_len = 2.0 + if link["@green_to_cycle_init"] > 10: + link["green_to_cycle"] = link["@green_to_cycle_init"] / 100.0 + if link["green_to_cycle"] > 1.0: + link["green_to_cycle"] = 1.0 + link["cycle"] = c_len + link.volume_delay_func = vdf_cycle_map[c_len] + elif link.type == 1: + link.volume_delay_func = 10 # freeway + else: + link.volume_delay_func = 11 # non-controlled approach + self._log.append({"type": "text", "content": "Derive cycle, green_to_cycle, and VDF by approach node complete"}) + + for link in network.links(): + if link.volume_delay_func in [10, 11]: + continue + if link["@traffic_control"] in [4, 5]: + # Ramp meter controlled links are only enabled during the peak periods + for time in ["_am", "_pm"]: + link["@cycle" + time] = link["cycle"] + link["@green_to_cycle" + time] = link["green_to_cycle"] + else: + for time in time_periods: + link["@cycle" + time] = link["cycle"] + link["@green_to_cycle" + time] = link["green_to_cycle"] + self._log.append({"type": "text", "content": "Setting of time period @cycle and @green_to_cycle complete"}) + + network.delete_attribute("LINK", "green_to_cycle") + network.delete_attribute("LINK", "cycle") + network.delete_attribute("NODE", "is_interchange") + self._log.append({"type": "text", "content": "Calculate derived traffic attributes complete"}) + if fatal_errors > 0: + raise Exception("%s fatal errors during calculation of traffic attributes" % fatal_errors) + return + + def check_zone_access(self, network, mode): + # Verify that every centroid has at least one available + # access and egress connector + for centroid in network.centroids(): + access = egress = False + for link in centroid.outgoing_links(): + if mode in link.modes: + if link.j_node.is_intersection: + for turn in link.outgoing_turns(): + if turn.i_node != turn.k_node and turn.penalty_func != 0: + egress = True + else: + egress = True + if not egress: + raise Exception("No egress permitted from zone %s" % centroid.id) + for link in centroid.incoming_links(): + if mode in link.modes: + if link.j_node.is_intersection: + for turn in link.incoming_turns(): + if turn.i_node != turn.k_node and turn.penalty_func != 0: + access = True + else: + access = True + if not access: + raise Exception("No access permitted to zone %s" % centroid.id) + + @_m.logbook_trace("Set database functions (VDF, TPF and TTF)") + def set_functions(self, scenario): + create_function = _m.Modeller().tool( + "inro.emme.data.function.create_function") + set_extra_function_params = _m.Modeller().tool( + "inro.emme.traffic_assignment.set_extra_function_parameters") + emmebank = self.emmebank + for f_id in ["fd10", "fd11", "fd20", "fd21", "fd22", "fd23", "fd24", "fd25", + "fp1", "ft1", "ft2", "ft3", "ft4"]: + function = emmebank.function(f_id) + if function: + emmebank.delete_function(function) + + smartSignalf_CL = self._props["smartSignal.factor.LC"] + smartSignalf_MA = self._props["smartSignal.factor.MA"] + smartSignalf_PA = self._props["smartSignal.factor.PA"] + atdmf = self._props["atdm.factor"] + + reliability_tmplt = ( + "* (1 + el2 + {0}*(".format(atdmf)+ + "( {factor[LOS_C]} * ( put(get(1).min.1.5) - {threshold[LOS_C]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_C]})" + "+ ( {factor[LOS_D]} * ( get(2) - {threshold[LOS_D]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_D]})" + "+ ( {factor[LOS_E]} * ( get(2) - {threshold[LOS_E]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_E]})" + "+ ( {factor[LOS_FL]} * ( get(2) - {threshold[LOS_FL]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FL]})" + "+ ( {factor[LOS_FH]} * ( get(2) - {threshold[LOS_FH]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FH]})" + "))") + parameters = { + "freeway": { + "factor": { + "LOS_C": 0.2429, "LOS_D": 0.1705, "LOS_E": -0.2278, "LOS_FL": -0.1983, "LOS_FH": 1.022 + }, + "threshold": { + "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 + }, + }, + "road": { # for arterials, ramps, collectors, local roads, etc. + "factor": { + "LOS_C": 0.1561, "LOS_D": 0.0, "LOS_E": 0.0, "LOS_FL": -0.449, "LOS_FH": 0.0 + }, + "threshold": { + "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 + }, + } + } + # freeway fd10 + create_function( + "fd10", + "(ul1 * (1.0 + 0.24 * put((volau + volad) / ul3) ** 5.5))" + + reliability_tmplt.format(**parameters["freeway"]), + emmebank=emmebank) + # non-freeway link which is not an intersection approach fd11 + create_function( + "fd11", + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0))" + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd20", # Local collector and lower intersection and stop controlled approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "1.25 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))" + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd21", # Collector intersection approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "{0} * 1.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_CL) + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd22", # Major arterial and major or prime arterial intersection approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "{0} * 2.0 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_MA) + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd23", # Primary arterial intersection approaches + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "{0} * 2.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_PA) + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + create_function( + "fd24", # Metered ramps + "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" + "2.5/ 2 * (1-el1) ** 2 * (1.0 + 6.0 * ( (volau + volad) / el3 ) ** 2.0))" + + reliability_tmplt.format(**parameters["road"]), + emmebank=emmebank) + # freeway fd25 (AM and PM only) + create_function( + "fd25", + "(ul1 * (1.0 + 0.6 * put((volau + volad) / ul3) ** 4))" + + reliability_tmplt.format(**parameters["freeway"]), + emmebank=emmebank) + + set_extra_function_params( + el1="@green_to_cycle", el2="@sta_reliability", el3="@capacity_inter_am", + emmebank=emmebank) + + create_function("fp1", "up1", emmebank=emmebank) # fixed cost turns stored in turn data 1 (up1) + + # buses in mixed traffic, use auto time + create_function("ft1", "ul1", emmebank=emmebank) + # fixed speed for separate guideway operations + create_function("ft2", "ul2", emmebank=emmebank) + # special 0-cost segments for prohibition of walk to different stop from centroid + create_function("ft3", "0", emmebank=emmebank) + # fixed guideway systems according to vehicle speed (not used at the moment) + create_function("ft4", "60 * length / speed", emmebank=emmebank) + + @_m.logbook_trace("Traffic zone connectivity check") + def check_connectivity(self, scenario): + modeller = _m.Modeller() + sola_assign = modeller.tool( + "inro.emme.traffic_assignment.sola_traffic_assignment") + set_extra_function_para = modeller.tool( + "inro.emme.traffic_assignment.set_extra_function_parameters") + create_matrix = _m.Modeller().tool( + "inro.emme.data.matrix.create_matrix") + net_calc = gen_utils.NetworkCalculator(scenario) + + emmebank = scenario.emmebank + zone_index = dict(enumerate(scenario.zone_numbers)) + num_processors = dem_utils.parse_num_processors("MAX-1") + + # Note matrix is also created in initialize_matrices + create_matrix("ms1", "zero", "zero", scenario=scenario, overwrite=True) + with gen_utils.temp_matrices(emmebank, "FULL", 1) as (result_matrix,): + result_matrix.name = "TEMP_AUTO_TRAVEL_TIME" + set_extra_function_para( + el1="@green_to_cycle_am", + el2="@sta_reliability_am", + el3="@capacity_inter_am", emmebank=emmebank) + net_calc("ul1", "@time_link_am", "modes=d") + net_calc("ul3", "@capacity_link_am", "modes=d") + net_calc("lanes", "@lane_am", "modes=d") + spec = { + "type": "SOLA_TRAFFIC_ASSIGNMENT", + "background_traffic": None, + "classes": [ + { + "mode": "d", + "demand": 'ms"zero"', + "generalized_cost": None, + "results": { + "od_travel_times": {"shortest_paths": result_matrix.named_id} + } + } + ], + "stopping_criteria": { + "max_iterations": 0, "best_relative_gap": 0.0, + "relative_gap": 0.0, "normalized_gap": 0.0 + }, + "performance_settings": {"number_of_processors": num_processors}, + } + sola_assign(spec, scenario=scenario) + travel_time = result_matrix.get_numpy_data(scenario) + + is_disconnected = (travel_time == 1e20) + disconnected_pairs = is_disconnected.sum() + if disconnected_pairs > 0: + error_msg = "Connectivity error(s) between %s O-D pairs" % disconnected_pairs + self._log.append({"type": "header", "content": error_msg}) + count_disconnects = [] + for axis, term in [(0, "from"), (1, "to")]: + axis_totals = is_disconnected.sum(axis=axis) + for i, v in enumerate(axis_totals): + if v > 0: + count_disconnects.append((zone_index[i], term, v)) + count_disconnects.sort(key=lambda x: x[2], reverse=True) + for z, direction, count in count_disconnects[:50]: + msg ="Zone %s disconnected %s %d other zones" % (z, direction, count) + self._log.append({"type": "text", "content": msg}) + if disconnected_pairs > 50: + self._log.append({"type": "text", "content": "[List truncated]"}) + raise Exception(error_msg) + self._log.append({"type": "header", "content": + "Zone connectivity verified for AM period on SOV toll ('S') mode"}) + scenario.has_traffic_results = False + + def log_report(self): + report = _m.PageBuilder(title="Import network from TNED files report") + try: + if self._error: + report.add_html("
      Errors detected during import: %s
      " % len(self._error)) + error_msg = ["
        "] + for error in self._error: + error_msg.append("
      • %s
      • " % error) + error_msg.append("
      ") + report.add_html("".join(error_msg)) + else: + report.add_html("

      No errors detected during import :-)") + + for item in self._log: + if item["type"] == "text": + report.add_html("
      %s
      " % item["content"]) + if item["type"] == "text2": + report.add_html("
      %s
      " % item["content"]) + elif item["type"] == "header": + report.add_html("

      %s

      " % item["content"]) + elif item["type"] == "table": + table_msg = ["
      ", "

      %s

      " % item["title"]] + if "header" in item: + table_msg.append("") + for label in item["header"]: + table_msg.append("" % label) + table_msg.append("") + for row in item["content"]: + table_msg.append("") + for cell in row: + table_msg.append("" % cell) + table_msg.append("") + table_msg.append("
      %s
      %s
      ") + report.add_html("".join(table_msg)) + + except Exception as error: + # no raise during report to avoid masking real error + report.add_html("Error generating report") + report.add_html(unicode(error)) + report.add_html(_traceback.format_exc()) + + _m.logbook_write("Import network report", report.render()) + + +def get_node(network, number, coordinates, is_centroid=False): + node = network.node(number) + if not node: + node = network.create_node(number, is_centroid) + node.x, node.y = coordinates + return node + + +# shortest path interpolation +def find_path(orig_link, dest_link, mode): + visited = set([]) + visited_add = visited.add + back_links = {} + heap = [] + + for link in orig_link.j_node.outgoing_links(): + if mode in link.modes: + back_links[link] = None + _heapq.heappush(heap, (link["length"], link)) + + link_found = False + try: + while not link_found: + link_cost, link = _heapq.heappop(heap) + if link in visited: + continue + visited_add(link) + for outgoing in link.j_node.outgoing_links(): + if mode not in outgoing.modes: + continue + if outgoing in visited: + continue + back_links[outgoing] = link + if outgoing == dest_link: + link_found = True + break + outgoing_cost = link_cost + link["length"] + _heapq.heappush(heap, (outgoing_cost, outgoing)) + except IndexError: + pass # IndexError if heap is empty + if not link_found: + raise NoPathException( + "no path found between links with trcov_id %s and %s (Emme IDs %s and %s)" % ( + orig_link["@tcov_id"], dest_link["@tcov_id"], orig_link, dest_link)) + + prev_link = back_links[dest_link] + route = [] + while prev_link: + route.append(prev_link) + prev_link = back_links[prev_link] + return list(reversed(route)) + + +class NoPathException(Exception): + pass + + +def revised_headway(headway): + # CALCULATE REVISED HEADWAY + # new headway calculation is less aggressive; also only being used for initial wait + # It uses a negative exponential formula to calculate headway + # + if headway <= 10: + rev_headway = headway + else: + rev_headway = headway * (0.275 + 0.788 * _np.exp(-0.011*headway)) + return rev_headway + + +def interchange_distance(orig_link, direction): + visited = set([]) + visited_add = visited.add + back_links = {} + heap = [] + if direction == "DOWNSTREAM": + get_links = lambda l: l.j_node.outgoing_links() + check_far_node = lambda l: l.j_node.is_interchange + elif direction == "UPSTREAM": + get_links = lambda l: l.i_node.incoming_links() + check_far_node = lambda l: l.i_node.is_interchange + # Shortest path search for nearest interchange node along freeway + for link in get_links(orig_link): + _heapq.heappush(heap, (link["length"], link)) + interchange_found = False + try: + while not interchange_found: + link_cost, link = _heapq.heappop(heap) + if link in visited: + continue + visited_add(link) + if check_far_node(link): + interchange_found = True + break + for next_link in get_links(link): + if next_link in visited: + continue + next_cost = link_cost + link["length"] + _heapq.heappush(heap, (next_cost, next_link)) + except IndexError: + # IndexError if heap is empty + # case where start / end of highway, dist = 99 + return 99 + return orig_link["length"] / 2.0 + link_cost From 486a9ba5af256675d0f1e83017fc10aa9c02c7cd Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 20 Dec 2023 16:20:22 -0500 Subject: [PATCH 27/43] Fixing skipping of @headway_rev calculation in case the special fares file was missing; also centralized the attribute auto_time copying to the same in-memory network object --- .../assignment/build_transit_scenario.py | 8 -------- .../emme/toolbox/import/import_network.py | 19 +++++++++---------- 2 files changed, 9 insertions(+), 18 deletions(-) diff --git a/src/main/emme/toolbox/assignment/build_transit_scenario.py b/src/main/emme/toolbox/assignment/build_transit_scenario.py index d3c05a62d..82949e324 100644 --- a/src/main/emme/toolbox/assignment/build_transit_scenario.py +++ b/src/main/emme/toolbox/assignment/build_transit_scenario.py @@ -340,14 +340,6 @@ def __call__(self, period, base_scenario, transit_emmebank, scenario_id, scenari scenario.publish_network(network) self._node_id_tracker = None - - ##copying auto_time to ul1, so it does not get wiped when transit connectors are created. - if scenario.has_traffic_results and "@auto_time" in scenario.attributes("LINK"): - copy_att(from_attribute_name='timau', - to_attribute_name='ul1', - from_scenario=scenario, - to_scenario=scenario) - return scenario @_m.logbook_trace("Add timed-transfer links", save_arguments=True) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index a14dcb956..57dafc066 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -1035,20 +1035,19 @@ def lookup_line(ident): self._log.append({"type": "text", "content": "Import transit lines complete"}) def calc_transit_attributes(self, network): - # for link in network.links(): - # if link.type == 0: # walk only links have FC ==0 - # link.type = 99 - - fares_file_name = FILE_NAMES["FARES"] - special_fare_path = _join(self.source, fares_file_name) - if not os.path.isfile(special_fare_path): - return - + self._log.append({"type": "header", "content": "Calculate derived transit line attributes"}) # ON TRANSIT LINES # Set 3-period headway based on revised headway calculation for line in network.transit_lines(): for period in ["ea", "am", "md", "pm", "ev"]: line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) + self._log.append({"type": "text", "content": "Revised headway calculation complete"}) + + fares_file_name = FILE_NAMES["FARES"] + special_fare_path = _join(self.source, fares_file_name) + if not os.path.isfile(special_fare_path): + self._log.append({"type": "text", "content": "Special fares file %s not found" % fares_file_name}) + return def get_line(line_id): line = network.transit_line(line_id) @@ -1698,7 +1697,7 @@ def log_report(self): error_msg.append("
    ") report.add_html("".join(error_msg)) else: - report.add_html("

    No errors detected during import :-)") + report.add_html("

    No errors detected during import :-)") for item in self._log: if item["type"] == "text": From 646cfcd6dc116f22485951c5ae1f60ccea17351d Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 10 Jan 2024 13:33:18 -0500 Subject: [PATCH 28/43] Updating mode definitions to match use in traffic assignment --- .../emme/toolbox/import/import_network.py | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 57dafc066..d2918ac0a 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -475,18 +475,18 @@ def create_modes(self, network): mode_table = { "AUTO": [("d", "dummy auto")], "AUX_AUTO": [ - ("h", "SOV"), - ("H", "HOV2"), + ("s", "SOV"), + ("h", "HOV2"), ("i", "HOV3+"), - ("I", "TRKL"), - ("s", "TRKM"), - ("S", "TRKH"), - ("v", "SOV TOLL"), - ("V", "HOV2 TOLL"), - ("m", "HOV3+ TOLL"), - ("M", "TRKL TOLL"), - ("t", "TRKM TOLL"), - ("T", "TRKH TOLL"), + ("t", "TRKL"), + ("m", "TRKM"), + ("v", "TRKH"), + ("S", "SOV TOLL"), + ("H", "HOV2 TOLL"), + ("I", "HOV3+ TOLL"), + ("T", "TRKL TOLL"), + ("M", "TRKM TOLL"), + ("V", "TRKH TOLL"), ], "TRANSIT": [ ("b", "BUS" ), # (vehicle type 100, PCE=3.0) @@ -541,20 +541,20 @@ def create_modes(self, network): } modes_gp_lanes = { 0: set([]), - 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes - 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck - 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck - 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck - 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks - 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks - 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only + 1: set([network.mode(m_id) for m_id in "dvmtshiVMTSHI"]), # all modes + 2: set([network.mode(m_id) for m_id in "dmtshiMTSHI"]), # no heavy truck + 3: set([network.mode(m_id) for m_id in "dtshiTSHI"]), # no heavy or medium truck + 4: set([network.mode(m_id) for m_id in "dshiSHI"]), # no truck + 5: set([network.mode(m_id) for m_id in "dvV"]), # only heavy trucks + 6: set([network.mode(m_id) for m_id in "dvmVM"]), # heavy and medium trucks + 7: set([network.mode(m_id) for m_id in "dvmtVMT"]), # all trucks only (no passenger cars) } - non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) + non_toll_modes = set([network.mode(m_id) for m_id in "vmtshi"]) self._auto_mode_lookup = { "GP": modes_gp_lanes, "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), - "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), - "HOV3": set([network.mode(m_id) for m_id in "dim"]), + "HOV2": set([network.mode(m_id) for m_id in "dhiHI"]), + "HOV3": set([network.mode(m_id) for m_id in "diI"]), } def set_auto_modes(self, network, period): From ef123df3cd914ffa735710956964e6149544ad32 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 10 Jan 2024 13:36:04 -0500 Subject: [PATCH 29/43] Switching trrt and trlink to use the csv files instead of tables in geodatabase --- src/main/emme/toolbox/import/import_network.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index d2918ac0a..925aec50e 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -34,11 +34,11 @@ # - TNED_HwyNodes # - TNED_RailNet # - TNED_RailNodes -# - trrt -# - trlink # - Turns # The following files are also used (in the same directory as the *.gdb) # +# trrt: header data for the transit lines +# trlink: sequence of links (routing) of transit lines # trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period @@ -149,12 +149,12 @@ def page(self):
  • TNED_HwyNodes
  • TNED_RailNet
  • TNED_RailNodes
  • -
  • trrt
  • -
  • trlink
  • Turns
  • The following files are also used (in the same directory as the *.gdb):
      +
    • trrt.csv
    • +
    • trlink.csv
    • trstop.csv
    • mode5tod.csv
    • timexfer_.csv (optional)
    • @@ -734,9 +734,11 @@ def create_transit_lines(self, network, attr_map): self._log.append({"type": "header", "content": "Import transit lines"}) fatal_errors = 0 # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare - transit_line_data = gen_utils.DataTableProc("trrt", self.source) + #transit_line_data = gen_utils.DataTableProc("trrt", self.source) + transit_line_data = gen_utils.DataTableProc("trrt", _join(_dir(self.source), "trrt.csv")) # Route_ID,Link_ID,Link_GUID,Direction - transit_link_data = gen_utils.DataTableProc("trlink", self.source) + #transit_link_data = gen_utils.DataTableProc("trlink", self.source) + transit_link_data = gen_utils.DataTableProc("trlink", _join(_dir(self.source), "trlink.csv")) # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) From d0be0e154aa3b4263d1758a8d3cf41d9044af0a7 Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Wed, 10 Jan 2024 15:25:32 -0500 Subject: [PATCH 30/43] Adding extra check of transit line stop on next link, in case of split link (resulting in adjacent links with same ID) --- .../emme/toolbox/import/import_network.py | 42 +++++++++++-------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 925aec50e..f70cd49e2 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -37,8 +37,8 @@ # - Turns # The following files are also used (in the same directory as the *.gdb) # -# trrt: header data for the transit lines -# trlink: sequence of links (routing) of transit lines +# trrt.csv: header data for the transit lines +# trlink.csv: sequence of links (routing) of transit lines # trstop.csv: stop data for the transit lines # mode5tod.csv: global (per-mode) transit cost and perception attributes # timexfer_.csv (optional): table of timed transfer pairs of lines, by period @@ -961,31 +961,39 @@ def create_transit_lines(self, network, attr_map): if "DUMMY" in stop["StopName"]: continue stop_link_id = stop['Link_GUID'] - node_id = int(stop['Node']) + stop_node_id = int(stop['Node']) while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: segment = itinerary.next() if stop_node_id == segment.i_node.number: pass - elif segment.j_node and node_id == segment.j_node.number: + elif segment.j_node and stop_node_id == segment.j_node.number: # if matches the J-node then the stop is on the next segment segment = itinerary.next() else: - if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: - msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( - line_name, stop["Route_ID"], segment, stop_link_id, node_id) + next_segment = None + if segment.j_node: + next_segment = itinerary.next() + if next_segment and next_segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id and \ + stop_node_id == next_segment.j_node.number: + # split link case, where stop is at the end of the next segment + segment = next_segment else: - msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( - line_name, stop["Route_ID"], stop_link_id, node_id) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - # reset iterator to start back from previous segment - itinerary = tline.segments(include_hidden=True) - segment = itinerary.next() - while segment.id != prev_segment.id: + if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: + msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( + line_name, stop["Route_ID"], segment, stop_link_id, stop_node_id) + else: + msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( + line_name, stop["Route_ID"], stop_link_id, stop_node_id) + self._log.append({"type": "text", "content": msg}) + self._error.append(msg) + fatal_errors += 1 + # reset iterator to start back from previous segment + itinerary = tline.segments(include_hidden=True) segment = itinerary.next() - continue + while segment.id != prev_segment.id: + segment = itinerary.next() + continue segment.allow_boardings = True segment.allow_alightings = True segment.dwell_time = min(tline.default_dwell_time, 99.99) From 7220ccb62f33f492d480f22a88328fb01caa1cdb Mon Sep 17 00:00:00 2001 From: Kevin Bragg Date: Fri, 12 Jan 2024 11:09:06 -0500 Subject: [PATCH 31/43] Updates to export_data_loader_network.py for compatibility with new TNED data standards. Dropped "I" from leading column names; TOLL, CP, CX, CH 3->5 periods; transit headway 4->5 periods; added link GUID --- .../export/export_data_loader_network.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/main/emme/toolbox/export/export_data_loader_network.py b/src/main/emme/toolbox/export/export_data_loader_network.py index 3c77ebfec..f54fc1c39 100644 --- a/src/main/emme/toolbox/export/export_data_loader_network.py +++ b/src/main/emme/toolbox/export/export_data_loader_network.py @@ -206,16 +206,14 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, ("FFC", "type"), ("CLASS", "zero"), ("ASPD", "@speed_adjusted"), - ("IYR", "@year_open_traffic"), - ("IPROJ", "@project_code"), - ("IJUR", "@jurisdiction_type"), - ("IFC", "type"), - ("IHOV", "@hov"), - #("ITRUCK", "@truck_restriction"), - ("ISPD", "@speed_posted"), - ("ITSPD", "zero"), - ("IWAY", "iway"), - ("IMED", "@median"), + ("YR", "@year_open_traffic"), + ("PROJ", "@project_code"), + ("FC", "type"), + ("HOV", "@hov"), + ("SPD", "@speed_posted"), + ("TSPD", "zero"), + ("WAY", "iway"), + ("MED", "@median"), ("COST", "@cost_operating"), ] directional_attrs = [ From 6f22a94006b2eb7f29331ea548124e3bf0a023a5 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Wed, 17 Jan 2024 12:33:29 -0800 Subject: [PATCH 32/43] Delete import_network.py.bak Cleanup by deleting unnecessary bak file. --- .../emme/toolbox/import/import_network.py.bak | 1836 ----------------- 1 file changed, 1836 deletions(-) delete mode 100644 src/main/emme/toolbox/import/import_network.py.bak diff --git a/src/main/emme/toolbox/import/import_network.py.bak b/src/main/emme/toolbox/import/import_network.py.bak deleted file mode 100644 index a32fdd5d1..000000000 --- a/src/main/emme/toolbox/import/import_network.py.bak +++ /dev/null @@ -1,1836 +0,0 @@ -#////////////////////////////////////////////////////////////////////////////// -#//// /// -#//// Copyright INRO, 2016-2017. /// -#//// Rights to use and modify are granted to the /// -#//// San Diego Association of Governments and partner agencies. /// -#//// This copyright notice must be preserved. /// -#//// /// -#//// import/import_network.py /// -#//// /// -#//// /// -#//// /// -#//// /// -#////////////////////////////////////////////////////////////////////////////// -# -# Imports the network from the input network files. -# -# -# Inputs: -# source: path to the location of the input network geodatabase -# traffic_scenario_id: optional scenario to store the imported network from the traffic files only -# transit_scenario_id: optional scenario to store the imported network from the transit files only -# merged_scenario_id: scenario to store the combined traffic and transit data from all network files -# title: the title to use for the imported scenario -# save_data_tables: if checked, create a data table for each reference file for viewing in the Emme Desktop -# data_table_name: prefix to use to identify all data tables -# overwrite: check to overwrite any existing data tables or scenarios with the same ID or name -# emmebank: the Emme database in which to create the scenario. Default is the current open database -# create_time_periods: if True (default), also create per-time period scenarios (required to run assignments) -# -# Files referenced: -# -# *.gdb: A Geodatabase file with the network data for both highway and transit. The following tables are used -# - TNED_HwyNet -# - TNED_HwyNodes -# - TNED_RailNet -# - TNED_RailNodes -# - trrt -# - trlink -# - Turns -# The following files are also used (in the same directory as the *.gdb) -# -# trstop.csv: stop data for the transit lines -# mode5tod.csv: global (per-mode) transit cost and perception attributes -# timexfer_.csv (optional): table of timed transfer pairs of lines, by period -# special_fares.txt (optional): table listing special fares in terms of boarding and incremental in-vehicle costs. -# off_peak_toll_factors.csv (optional): factors to calculate the toll for EA, MD, and EV periods from the OP toll input for specified facilities -# vehicle_class_toll_factors.csv (optional): factors to adjust the toll cost by facility name and class (DA, S2, S3, TRK_L, TRK_M, TRK_H) -# -# -# Script example: -""" - import os - modeller = inro.modeller.Modeller() - main_directory = os.path.dirname(os.path.dirname(modeller.desktop.project.path)) - source_file = os.path.join(main_directory, "input", "EMMEOutputs.gdb") - title = "Base 2012 scenario" - import_network = modeller.tool("sandag.import.import_network") - import_network(source_file, merged_scenario_id=100, title=title, - data_table_name="2012_base", overwrite=True) -""" - - -TOOLBOX_ORDER = 11 - - -import inro.modeller as _m -import inro.emme.datatable as _dt -import inro.emme.network as _network -from inro.emme.core.exception import Error as _NetworkError - -from itertools import izip as _izip -from collections import defaultdict as _defaultdict, OrderedDict -from contextlib import contextmanager as _context -import fiona as _fiona - -from math import ceil as _ceiling -from copy import deepcopy as _copy -import numpy as _np -import heapq as _heapq -import pandas as pd - -import traceback as _traceback -import os - -_join = os.path.join -_dir = os.path.dirname - - -gen_utils = _m.Modeller().module("sandag.utilities.general") -dem_utils = _m.Modeller().module("sandag.utilities.demand") - -FILE_NAMES = { - "FARES": "special_fares.txt", - "TIMEXFER": "timexfer_%s.csv", - "OFF_PEAK": "off_peak_toll_factors.csv", - "VEHICLE_CLASS": "vehicle_class_toll_factors.csv", - "MODE5TOD": "MODE5TOD.csv", -} - - -class ImportNetwork(_m.Tool(), gen_utils.Snapshot): - - source = _m.Attribute(unicode) - scenario_id = _m.Attribute(int) - overwrite = _m.Attribute(bool) - title = _m.Attribute(unicode) - save_data_tables = _m.Attribute(bool) - data_table_name = _m.Attribute(unicode) - create_time_periods = _m.Attribute(bool) - - tool_run_msg = "" - - @_m.method(return_type=_m.UnicodeType) - def tool_run_msg_status(self): - return self.tool_run_msg - - def __init__(self): - self._log = [] - self._error = [] - project_dir = _dir(_m.Modeller().desktop.project.path) - self.source = _join(_dir(project_dir), "input") - self.overwrite = False - self.title = "" - self.data_table_name = "" - self.create_time_periods = True - self.attributes = [ - "source", "scenario_id", "overwrite", "title", "save_data_tables", "data_table_name", "create_time_periods" - ] - - def page(self): - if not self.data_table_name: - try: - load_properties = _m.Modeller().tool('sandag.utilities.properties') - props = load_properties(_join(_dir(self.source), "conf", "sandag_abm.properties")) - self.data_table_name = props["scenarioYear"] - except: - pass - - pb = _m.ToolPageBuilder(self) - pb.title = "Import network" - pb.description = """ -
      - Create an Emme network from TNED geodatabase (*.gdb) and associated files. -
      -
      - The following layers in the gdb are used: -
        -
      • TNED_HwyNet
      • -
      • TNED_HwyNodes
      • -
      • TNED_RailNet
      • -
      • TNED_RailNodes
      • -
      • trrt
      • -
      • trlink
      • -
      • Turns
      • -
      - The following files are also used (in the same directory as the *.gdb): -
        -
      • trstop.csv
      • -
      • mode5tod.csv
      • -
      • timexfer_.csv (optional)
      • -
      • special_fares.txt (optional)
      • -
      • off_peak_toll_factors.csv (optional)
      • -
      • vehicle_class_toll_factors.csv (optional)
      • -
      -
      - """ - pb.branding_text = "- SANDAG - Import" - - if self.tool_run_msg != "": - pb.tool_run_status(self.tool_run_msg_status) - - pb.add_select_file("source", window_type="directory", file_filter="", - title="Source gdb:",) - - pb.add_text_box("scenario_id", size=6, title="Scenario ID for imported network:") - pb.add_text_box("title", size=80, title="Scenario title:") - pb.add_checkbox("save_data_tables", title=" ", label="Save reference data tables of file data") - pb.add_text_box("data_table_name", size=80, title="Name for data tables:", - note="Prefix name to use for all saved data tables") - pb.add_checkbox("overwrite", title=" ", label="Overwrite existing scenarios and data tables") - pb.add_checkbox("create_time_periods", title=" ", label="Copy base scenario to all time periods and set modes (required for assignments)") - - return pb.render() - - def run(self): - self.tool_run_msg = "" - try: - self.emmebank = _m.Modeller().emmebank - with self.setup(): - self.execute() - run_msg = "Network import complete" - if self._error: - run_msg += " with %s non-fatal errors. See logbook for details" % len(self._error) - self.tool_run_msg = _m.PageBuilder.format_info(run_msg, escape=False) - except Exception as error: - self.tool_run_msg = _m.PageBuilder.format_exception( - error, _traceback.format_exc()) - raise - - def __call__(self, source, scenario_id, - title="", save_data_tables=False, data_table_name="", overwrite=False, - emmebank=None, create_time_periods=True): - - self.source = source - self.scenario_id = scenario_id - self.title = title - self.save_data_tables = save_data_tables - self.data_table_name = data_table_name - self.overwrite = overwrite - if not emmebank: - self.emmebank = _m.Modeller().emmebank - else: - self.emmebank = emmebank - self.create_time_periods = create_time_periods - - with self.setup(): - self.execute() - - return self.emmebank.scenario(scenario_id) - - @_context - def setup(self): - self._log = [] - self._error = [] - fatal_error = False - attributes = OrderedDict([ - ("self", str(self)), - ("source", self.source), - ("scenario_id", self.scenario_id), - ("title", self.title), - ("save_data_tables", self.save_data_tables), - ("data_table_name", self.data_table_name), - ("overwrite", self.overwrite), - ("create_time_periods", self.create_time_periods) - ]) - self._log = [{ - "content": attributes.items(), - "type": "table", "header": ["name", "value"], - "title": "Tool input values" - }] - with _m.logbook_trace("Import network", attributes=attributes) as trace: - gen_utils.log_snapshot("Import network", str(self), attributes) - load_properties = _m.Modeller().tool('sandag.utilities.properties') - self._props = load_properties(_join(_dir(_dir(self.source)), "conf", "sandag_abm.properties")) - try: - yield - except Exception as error: - self._log.append({"type": "text", "content": error}) - trace_text = _traceback.format_exc().replace("\n", "
      ") - self._log.append({"type": "text", "content": trace_text}) - self._error.append(error) - fatal_error = True - raise - finally: - self._props = None - self.log_report() - self._auto_mode_lookup = None - self._transit_mode_lookup = None - if self._error: - if fatal_error: - trace.write("Import network failed (%s errors)" % len(self._error), attributes=attributes) - else: - trace.write("Import network completed (%s non-fatal errors)" % len(self._error), attributes=attributes) - - def execute(self): - attr_map = { - "NODE": OrderedDict([ - ("HNODE", ("@hnode", "BOTH", "EXTRA", "HNODE label from TNED" )), - ("TAP", ("@tap_id", "BOTH", "EXTRA", "TAP number")), - ("PARK", ("@park", "BOTH", "EXTRA", "parking indicator" )), - ("STOPTYPE", ("@stoptype", "BOTH", "EXTRA", "stop type indicator" )), - ("ELEV", ("@elev", "BOTH", "EXTRA", "station/stop elevation in feet")), - ("interchange", ("@interchange", "DERIVED", "EXTRA", "is interchange node")), - ]), - "LINK": OrderedDict([ - ("HWYCOV0_ID",("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), - ("SPHERE", ("@sphere", "HWY_TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), - ("HWYSegGUID",("#hwyseg_guid", "TWO_WAY", "STRING", "HWYSegGUID")), - ("NM", ("#name", "TWO_WAY", "STRING", "Street name")), - ("FXNM", ("#name_from", "TWO_WAY", "STRING", "Cross street at the FROM end")), - ("TXNM", ("#name_to", "TWO_WAY", "STRING", "Cross street name at the TO end")), - ("DIR", ("@direction_cardinal", "TWO_WAY", "EXTRA", "Link direction")), - ("ASPD", ("@speed_adjusted", "HWY_TWO_WAY", "EXTRA", "Adjusted link speed (miles/hr)")), - ("YR", ("@year_open_traffic", "HWY_TWO_WAY", "EXTRA", "The year the link opened to traffic")), - ("PROJ", ("@project_code", "HWY_TWO_WAY", "EXTRA", "Project number for use with hwyproj.xls")), - ("FC", ("type", "TWO_WAY", "STANDARD", "")), - ("HOV", ("@hov", "TWO_WAY", "EXTRA", "Link operation type")), - ("MINMODE", ("@minmode", "TWO_WAY", "EXTRA", "Transit mode type")), - ("EATRUCK", ("@truck_ea", "HWY_TWO_WAY", "EXTRA", "Early AM truck restriction code ")), - ("AMTRUCK", ("@truck_am", "HWY_TWO_WAY", "EXTRA", "AM Peak truck restriction code ")), - ("MDTRUCK", ("@truck_md", "HWY_TWO_WAY", "EXTRA", "Mid-day truck restriction code ")), - ("PMTRUCK", ("@truck_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak truck restriction code ")), - ("EVTRUCK", ("@truck_ev", "HWY_TWO_WAY", "EXTRA", "Evening truck restriction code ")), - ("TOLLEA", ("@toll_ea", "HWY_TWO_WAY", "EXTRA", "Early AM toll cost (cent)")), - ("TOLLA", ("@toll_am", "HWY_TWO_WAY", "EXTRA", "AM Peak toll cost (cent)")), - ("TOLLMD", ("@toll_md", "HWY_TWO_WAY", "EXTRA", "Mid-day toll cost (cent)")), - ("TOLLP", ("@toll_pm", "HWY_TWO_WAY", "EXTRA", "PM Peak toll cost (cent)")), - ("TOLLEV", ("@toll_ev", "HWY_TWO_WAY", "EXTRA", "Evening toll cost (cent)")), - - ("SPD", ("@speed_posted", "HWY_TWO_WAY", "EXTRA", "Posted speed limit (mph)")), - ("MED", ("@median", "TWO_WAY", "EXTRA", "Median type")), - ("AU", ("@lane_auxiliary", "HWY_ONE_WAY", "EXTRA", "Number of auxiliary lanes")), - ("CNT", ("@traffic_control", "HWY_ONE_WAY", "EXTRA", "Intersection control type")), - ("TL", ("@turn_thru", "HWY_ONE_WAY", "EXTRA", "Intersection approach through lanes")), - ("RL", ("@turn_right", "HWY_ONE_WAY", "EXTRA", "Intersection approach right-turn lanes")), - ("LL", ("@turn_left", "HWY_ONE_WAY", "EXTRA", "Intersection approach left-turn lanes")), - ("GC", ("@green_to_cycle_init", "HWY_ONE_WAY", "EXTRA", "Initial green-to-cycle ratio")), - ("WAY", ("way", "HWY_TWO_WAY", "INTERNAL", "")), - ("TRANSIT_MODES", ("transit_modes", "DERIVED", "INTERNAL", "")), - ("@cost_operating", ("@cost_operating", "DERIVED", "EXTRA", "Fuel and maintenance cost")), - ("INTDIST_UP", ("@intdist_up", "DERIVED", "EXTRA", "Upstream major intersection distance")), - ("INTDIST_DOWN", ("@intdist_down", "DERIVED", "EXTRA", "Downstream major intersection distance")), - - ("TMO", ("@trtime", "RAIL_TWO_WAY", "EXTRA", "link time in minutes",)), - ("OSPD", ("@speed_observed", "RAIL_TWO_WAY", "EXTRA", "Observed speed")), - - ]), - "TRANSIT_LINE": OrderedDict([ - ("AM_Headway", ("@headway_am", "TRRT", "EXTRA", "AM Peak actual headway")), - ("PM_Headway", ("@headway_pm", "TRRT", "EXTRA", "PM Peak actual headway")), - ("Midday_Headway", ("@headway_md", "TRRT", "EXTRA", "Midday actual headway")), - ("Evening_Headway",("@headway_ev", "TRRT", "EXTRA", "Evening actual headway")), - ("EarlyAM_Headway",("@headway_ea", "TRRT", "EXTRA", "Early AM actual headway")), - ("AM_Headway_rev", ("@headway_rev_am", "DERIVED", "EXTRA", "AM Peak revised headway")), - ("PM_Headway_rev", ("@headway_rev_pm", "DERIVED", "EXTRA", "PM Peak revised headway")), - ("MD_Headway_rev", ("@headway_rev_md", "DERIVED", "EXTRA", "Midday revised headway")), - ("EV_Headway_rev", ("@headway_rev_ev", "DERIVED", "EXTRA", "Evening revised headway")), - ("EA_Headway_rev", ("@headway_rev_ea", "DERIVED", "EXTRA", "Early AM revised headway")), - ("WT_IVTPK", ("@vehicle_per_pk", "MODE5TOD", "EXTRA", "Peak in-vehicle perception factor")), - ("WT_IVTOP", ("@vehicle_per_op", "MODE5TOD", "EXTRA", "Off-Peak in-vehicle perception factor")), - ("WT_FAREPK", ("@fare_per_pk", "MODE5TOD", "EXTRA", "Peak fare perception factor")), - ("WT_FAREOP", ("@fare_per_op", "MODE5TOD", "EXTRA", "Off-Peak fare perception factor")), - ("DWELLTIME", ("default_dwell_time" "MODE5TOD", "INTERNAL", "")), - ("Fare", ("@fare", "TRRT", "EXTRA", "Boarding fare ($)")), - ("@transfer_penalty",("@transfer_penalty","DERIVED", "EXTRA", "Transfer penalty (min)")), - ("Route_ID", ("@route_id", "TRRT", "EXTRA", "Transit line internal ID")), - ("EarlyAM_Hours", ("@hours_ea", "TRRT", "EXTRA", "Early AM hours")), - ("Evening_Hours", ("@hours_ev", "TRRT", "EXTRA", "Evening hours")), - ("Config", ("@config", "TRRT", "EXTRA", "Config ID (same as route name)")), - ]), - "TRANSIT_SEGMENT": OrderedDict([ - ("Stop_ID", ("@stop_id", "TRSTOP", "EXTRA", "Stop ID from trcov")), - ("Pass_Count", ("@pass_count", "TRSTOP", "EXTRA", "Number of times this stop is passed")), - ("Milepost", ("@milepost", "TRSTOP", "EXTRA", "Distance from start of line")), - ("StopName", ("#stop_name", "TRSTOP", "STRING", "Name of stop")), - ("@coaster_fare_board", ("@coaster_fare_board", "DERIVED", "EXTRA", "Boarding fare for coaster")), - ("@coaster_fare_inveh", ("@coaster_fare_inveh", "DERIVED", "EXTRA", "Incremental fare for Coaster")), - ]) - } - - time_name = { - "_ea": "Early AM ", "_am": "AM Peak ", "_md": "Mid-day ", "_pm": "PM Peak ", "_ev": "Evening " - } - time_name_dst = ["_ea", "_am", "_md", "_pm", "_ev"] - time_name_src = ["EA", "A", "MD", "P", "EV"] - time_period_attrs = [ - ("CP", "@capacity_link", "mid-link capacity"), - ("CX", "@capacity_inter", "approach capacity"), - ("CH", "@capacity_hourly", "hourly mid-link capacity"), - ("LN", "@lane", "number of lanes"), - ("TM", "@time_link", "link time in minutes"), - ("TX", "@time_inter", "intersection delay time"), - ] - for src_attr, dst_attr, desc_tmplt in time_period_attrs: - for time_s, time_d in zip(time_name_src, time_name_dst): - attr_map["LINK"][src_attr + time_s] = \ - (dst_attr + time_d, "HWY_ONE_WAY", "EXTRA", time_name[time_d] + desc_tmplt) - derived_period_attrs = [ - ("@cost_auto", "toll + cost autos"), - ("@cost_hov2", "toll (non-mngd) + cost HOV2"), - ("@cost_hov3", "toll (non-mngd) + cost HOV3+"), - ("@cost_lgt_truck", "toll + cost light trucks"), - ("@cost_med_truck", "toll + cost medium trucks"), - ("@cost_hvy_truck", "toll + cost heavy trucks"), - ("@cycle", "cycle length (minutes)"), - ("@green_to_cycle", "green to cycle ratio"), - ("@sta_reliability", "static reliability") - ] - for attr, desc_tmplt in derived_period_attrs: - for time in time_name_dst: - attr_map["LINK"][attr + time] = \ - (attr + time, "DERIVED", "EXTRA", time_name[time] + desc_tmplt) - - create_scenario = _m.Modeller().tool( - "inro.emme.data.scenario.create_scenario") - - title = self.title - if not title: - existing_scenario = self.emmebank.scenario(self.scenario_id) - if existing_scenario: - title = existing_scenario.title - - scenario = create_scenario(self.scenario_id, title, overwrite=self.overwrite, emmebank=self.emmebank) - scenarios = [scenario] - if self.create_time_periods: - periods=["EA", "AM", "MD", "PM", "EV"] - period_ids = list(enumerate(periods, start=int(self.scenario_id) + 1)) - for ident, period in period_ids: - scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), - overwrite=self.overwrite, emmebank=self.emmebank)) - # create attributes in scenario - for elem_type, mapping in attr_map.iteritems(): - for name, _tcoved_type, emme_type, desc in mapping.values(): - if emme_type == "EXTRA": - for s in scenarios: - if not s.extra_attribute(name): - xatt = s.create_extra_attribute(elem_type, name) - xatt.description = desc - elif emme_type == "STRING": - for s in scenarios: - if not s.network_field(elem_type, name): - s.create_network_field(elem_type, name, 'STRING', description=desc) - - log_content = [] - for k, v in mapping.iteritems(): - if v[3] == "DERIVED": - k = "--" - log_content.append([k] + list(v)) - self._log.append({ - "content": log_content, - "type": "table", - "header": ["TNED", "Emme", "Source", "Type", "Description"], - "title": "Network %s attributes" % elem_type.lower().replace("_", " "), - "disclosure": True - }) - - network = _network.Network() - for elem_type, mapping in attr_map.iteritems(): - for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): - if emme_type == "STANDARD": - continue - default = "" if emme_type == "STRING" else 0 - network.create_attribute(elem_type, attr, default) - try: - self.create_modes(network) - self.create_road_base(network, attr_map) - self.create_turns(network) - self.calc_traffic_attributes(network) - self.check_zone_access(network, network.mode("d")) - self.create_rail_base(network, attr_map) - self.create_transit_lines(network, attr_map) - self.calc_transit_attributes(network) - finally: - # TAP connectors included in network, fix type setting and renumber node IDs - for link in network.links(): - if link.type <= 0: - link.type = 99 - self.renumber_base_nodes(network) - scenario.publish_network(network, resolve_attributes=True) - - self.set_functions(scenario) - self.check_connectivity(scenario) - - if "modify_network.py" in os.listdir(os.getcwd()): - try: - with _m.logbook_trace("Modify network script"): - import modify_network - reload(modify_network) - modify_network.run(base_scenario) - except ImportError as e: - pass - network = base_scenario.get_network() - network.create_attribute("LINK", "transit_modes") - - if self.create_time_periods: - for link in network.links(): - link.transit_modes = link.modes - for ident, period in period_ids: - self.set_auto_modes(network, period) - scenario = self.emmebank.scenario(ident) - scenario.publish_network(network, resolve_attributes=True) - - def create_modes(self, network): - # combined traffic and transit mode creation - mode_table = { - "AUTO": [("d", "dummy auto")], - "AUX_AUTO": [ - ("h", "SOV"), - ("H", "HOV2"), - ("i", "HOV3+"), - ("I", "TRKL"), - ("s", "TRKM"), - ("S", "TRKH"), - ("v", "SOV TOLL"), - ("V", "HOV2 TOLL"), - ("m", "HOV3+ TOLL"), - ("M", "TRKL TOLL"), - ("t", "TRKM TOLL"), - ("T", "TRKH TOLL"), - ], - "TRANSIT": [ - ("b", "BUS" ), # (vehicle type 100, PCE=3.0) - ("e", "EXP BUS"), # (vehicle type 90 , PCE=3.0) - ("p", "LTDEXP BUS"), # (vehicle type 80 , PCE=3.0) - ("l", "LRT"), # (vehicle type 50) - ("y", "BRT YEL"), # (vehicle type 60 , PCE=3.0) - ("r", "BRT RED"), # (vehicle type 70 , PCE=3.0) - ("c", "CMR"), # (vehicle type 40) - ("o", "TIER1"), # (vehicle type 45) - ], - "AUX_TRANSIT": [ - ("a", "ACCESS", 3), - ("x", "TRANSFER", 3), - ("w", "WALK", 3), - ("u", "ACCESS_WLK", 3), - ("k", "EGRESS_WLK", 3), - ("f", "ACCESS_PNR", 25), - ("g", "EGRESS_PNR", 25), - ("q", "ACCESS_KNR", 25), - ("j", "EGRESS_KNR", 25), - ("Q", "ACCESS_TNC", 25), - ("J", "EGRESS_TNC", 25), - ], - } - for mode_type, modes in mode_table.iteritems(): - for mode_info in modes: - mode = network.create_mode(mode_type, mode_info[0]) - mode.description = mode_info[1] - if len(mode_info) == 3: - mode.speed = mode_info[2] - self._transit_mode_lookup = { - 0: set([]), - 1: set([network.mode(m_id) for m_id in "x"]), # 1 = special transfer walk links between certain nearby stops - 2: set([network.mode(m_id) for m_id in "w"]), # 2 = walk links in the downtown area - 3: set([network.mode(m_id) for m_id in "a"]), # 3 = the special TAP connectors - 400: set([network.mode(m_id) for m_id in "c"]), # 4 = Coaster Rail Line - 500: set([network.mode(m_id) for m_id in "l"]), # 5 = Trolley & Light Rail Transit (LRT) - 600: set([network.mode(m_id) for m_id in "bpeyr"]), # 6 = Yellow Car Bus Rapid Transit (BRT) - 700: set([network.mode(m_id) for m_id in "bpeyr"]), # 7 = Red Car Bus Rapid Transit (BRT) - 800: set([network.mode(m_id) for m_id in "bpe"]), # 8 = Limited Express Bus - 900: set([network.mode(m_id) for m_id in "bpe"]), # 9 = Express Bus - 1000: set([network.mode(m_id) for m_id in "bpe"]), # 10 = Local Bus - 11: set([network.mode(m_id) for m_id in "u"]), # = access walk links - 12: set([network.mode(m_id) for m_id in "k"]), # = egress walk links - 13: set([network.mode(m_id) for m_id in "f"]), # = access PNR links - 14: set([network.mode(m_id) for m_id in "g"]), # = egress PNR links - 15: set([network.mode(m_id) for m_id in "q"]), # = access KNR links - 16: set([network.mode(m_id) for m_id in "j"]), # = egress KNR links - 17: set([network.mode(m_id) for m_id in "Q"]), # = access TNC links - 18: set([network.mode(m_id) for m_id in "J"]), # = egress TNC links - } - modes_gp_lanes = { - 0: set([]), - 1: set([network.mode(m_id) for m_id in "dhHiIsSvVmMtT"]), # all modes - 2: set([network.mode(m_id) for m_id in "dhHiIsvVmMt"]), # no heavy truck - 3: set([network.mode(m_id) for m_id in "dhHiIvVmM"]), # no heavy or medium truck - 4: set([network.mode(m_id) for m_id in "dhHivVm"]), # no truck - 5: set([network.mode(m_id) for m_id in "dST"]), # only heavy trucks - 6: set([network.mode(m_id) for m_id in "dsStT"]), # heavy and medium trucks - 7: set([network.mode(m_id) for m_id in "dIsSMtT"]), # all trucks only - } - non_toll_modes = set([network.mode(m_id) for m_id in "hHiIsS"]) - self._auto_mode_lookup = { - "GP": modes_gp_lanes, - "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), - "HOV2": set([network.mode(m_id) for m_id in "dHiVm"]), - "HOV3": set([network.mode(m_id) for m_id in "dim"]), - } - - def set_auto_modes(self, network, period): - # time periods - # need to update the modes from the XTRUCK for their time of day - # Note: only truck types 1, 3, 4, and 7 found in 2012 base network - truck = "@truck_%s" % period.lower() - toll = "@toll_%s" % period.lower() - lookup = self._auto_mode_lookup - for link in network.links(): - auto_modes = set([]) - if link.type == 10: # connector - auto_modes = lookup["GP"][link[truck]] - elif link.type in [11, 12]: - pass # no auto modes, rail only (11) or bus only (12) - elif link["@hov"] == 1: - auto_modes = lookup["GP"][link[truck]] - elif link["@hov"] in [2, 3]: - # managed lanes, free for HOV2 and HOV3+, tolls for SOV - if link[toll] > 0: - auto_modes = lookup["TOLL"][link[truck]] - # special case of I-15 managed lanes base year and 2020, no build - elif link.type == 1 and link["@project_code"] in [41, 42, 486, 373, 711]: - auto_modes = lookup["TOLL"][link[truck]] - elif link.type == 8 or link.type == 9: - auto_modes = lookup["TOLL"][link[truck]] - if link["@hov"] == 2: - auto_modes = auto_modes | lookup["HOV2"] - else: - auto_modes = auto_modes | lookup["HOV3"] - elif link["@hov"] == 4: - auto_modes = lookup["TOLL"][link[truck]] - link.modes = link.transit_modes | auto_modes - - def create_road_base(self, network, attr_map): - self._log.append({"type": "header", "content": "Import roadway base network from TNED_HwyNet %s" % self.source}) - hwy_data = gen_utils.DataTableProc("TNED_HwyNet", self.source) - # TEMP workaround: BN field is string - bn_index = hwy_data._attr_names.index("BN") - hwy_data._values[bn_index] = hwy_data._values[bn_index].astype(int) - - if self.save_data_tables: - hwy_data.save("%s_TNED_HwyNet" % self.data_table_name, self.overwrite) - - is_centroid = lambda arc, node : (arc["FC"] == 10) and (node == "AN") - link_attr_map = {} - for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): - if tcoved_type in ("TWO_WAY", "HWY_TWO_WAY", "ONE_WAY", "HWY_ONE_WAY"): - link_attr_map[field] = (name, tcoved_type.replace("HWY_", ""), emme_type, desc) - - def define_modes(arc): - if arc["FC"] in [11, 12] or arc["ABLNA"] == 0: #or ((arc["HOV"] < 1 or arc["HOV"] > 4) and arc["FC"] != 10): - vehicle_index = int(arc["MINMODE"] / 100)*100 - aux_index = int(arc["MINMODE"] % 100) - return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] - return [network.mode('d')] - - self._create_base_net( - hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map) - - hwy_node_data = gen_utils.DataTableProc("TNED_HwyNodes", self.source) - node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() - if v[1] in ("BOTH", "HWY")] - for record in hwy_node_data: - node = network.node(record["HNODE"]) - if node: - for src, dst in node_attrs: - node[dst] = record[src] - else: - self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) - self._log.append({"type": "text", "content": "Import traffic base network complete"}) - - def create_rail_base(self, network, attr_map): - self._log.append({"type": "header", "content": "Import rail base network from TNED_RailNet %s" % self.source}) - transit_data = gen_utils.DataTableProc("TNED_RailNet", self.source) - - if self.save_data_tables: - transit_data.save("%s_TNED_RailNet" % self.data_table_name, self.overwrite) - - link_attr_map = {} - for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): - if tcoved_type in ("TWO_WAY", "RAIL_TWO_WAY", "ONE_WAY", "RAIL_ONE_WAY"): - link_attr_map[field] = (name, tcoved_type.replace("RAIL_", ""), emme_type, desc) - - tier1_modes = set([network.mode(m_id) for m_id in "o"]) - tier1_rail_link_name = self._props["transit.newMode"] - - def define_modes(arc): - if arc["NM"] == tier1_rail_link_name: - return tier1_modes - vehicle_index = int(arc["MINMODE"] / 100)*100 - aux_index = int(arc["MINMODE"] % 100) - return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] - - self._create_base_net( - transit_data, network, mode_callback=define_modes, link_attr_map=link_attr_map) - - transit_node_data = gen_utils.DataTableProc("TNED_RailNodes", self.source) - # Load PARK, elevation, stop type data onto transit nodes - node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() - if v[1] in ("BOTH", "RAIL")] - for record in transit_node_data: - node = network.node(record["HNODE"]) - if node: - for src, dst in node_attrs: - node[dst] = record[src] - else: - self._log.append({"type": "text", "content": "Cannot find node %s" % record["HNODE"]}) - - self._log.append({"type": "text", "content": "Import transit base network complete"}) - - def _create_base_net(self, data, network, link_attr_map, mode_callback, centroid_callback=None): - forward_attr_map = {} - reverse_attr_map = {} - arc_id_name = "HWYCOV0_ID" - arc_guid_name = "HWYSegGUID" - for field, (name, tcoved_type, emme_type, desc) in link_attr_map.iteritems(): - if field in [arc_id_name, arc_guid_name, "DIR"]: - # these attributes are special cases for reverse link - forward_attr_map[field] = name - elif tcoved_type in "TWO_WAY": - forward_attr_map[field] = name - reverse_attr_map[field] = name - elif tcoved_type in "ONE_WAY": - forward_attr_map["AB" + field] = name - reverse_attr_map["BA" + field] = name - - emme_id_name = forward_attr_map[arc_id_name] - emme_guid_name = forward_attr_map[arc_guid_name] - dir_name = forward_attr_map["DIR"] - reverse_dir_map = {1: 3, 3: 1, 2: 4, 4: 2, 0: 0} - new_node_id = max(data.values("AN").max(), data.values("BN").max()) + 1 - - if centroid_callback is None: - centroid_callback = lambda a,n: False - - # Create nodes and links - for arc in data: - if float(arc["AN"]) == 0 or float(arc["BN"]) == 0: - self._log.append({"type": "text", - "content": "Node ID 0 in AN (%s) or BN (%s) for link GUID/ID %s/%s." % - (arc["AN"], arc["BN"], arc[arc_guid_name], arc[arc_id_name])}) - continue - coordinates = arc["geo_coordinates"] - i_node = get_node(network, arc['AN'], coordinates[0], centroid_callback(arc, "AN")) - j_node = get_node(network, arc['BN'], coordinates[-1], centroid_callback(arc, "BN")) - link = network.link(i_node, j_node) - if link: - msg = "Duplicate link between AN %s and BN %s. Link GUID/IDs %s/%s and %s/%s." % \ - (arc["AN"], arc["BN"], link[emme_guid_name], link[emme_id_name], arc[arc_guid_name], arc[arc_id_name]) - self._log.append({"type": "text", "content": msg}) - if link[emme_guid_name] == arc[arc_guid_name]: - self._log.append({"type": "text", "content": "... but GUIDs match (not an error)"}) - else: - self._error.append(msg) - else: - modes = mode_callback(arc) - link = network.create_link(i_node, j_node, modes) - link.length = arc["LENGTH"] - if len(coordinates) > 2: - link.vertices = coordinates[1:-1] - for field, attr in forward_attr_map.iteritems(): - link[attr] = arc[field] - if arc["WAY"] == 2 or arc["WAY"] == 0: - reverse_link = network.link(j_node, i_node) - if not reverse_link: - reverse_link = network.create_link(j_node, i_node, modes) - reverse_link.length = link.length - reverse_link.vertices = list(reversed(link.vertices)) - for field, attr in reverse_attr_map.iteritems(): - reverse_link[attr] = arc[field] - reverse_link[emme_id_name] = -1*arc[arc_id_name] - reverse_link[emme_guid_name] = "-" + arc[arc_guid_name] - reverse_link[dir_name] = reverse_dir_map[arc["DIR"]] - - def create_transit_lines(self, network, attr_map): - self._log.append({"type": "header", "content": "Import transit lines"}) - fatal_errors = 0 - # Route_ID,Route_Name,Mode,AM_Headway,PM_Headway,Midday_Headway,Evening_Headway,EarlyAM_Headway,Night_Headway,Night_Hours,Config,Fare - transit_line_data = gen_utils.DataTableProc("trrt", self.source) - # Route_ID,Link_ID,Link_GUID,Direction - transit_link_data = gen_utils.DataTableProc("trlink", self.source) - # Stop_ID,Route_ID,Link_ID,Pass_Count,Milepost,Longitude, Latitude,HwyNode,TrnNode,StopName - #transit_stop_data = gen_utils.DataTableProc("trstop", self.source) - transit_stop_data = gen_utils.DataTableProc("trstop", _join(_dir(self.source), "trstop.csv")) - # From_line,To_line,Board_stop,Wait_time - # Note: Board_stop is not used - # Timed xfer data - periods = ['EA', 'AM', 'MD', 'PM', 'EV'] - timed_xfer_data = {} - for period in periods: - file_path = _join(_dir(self.source), FILE_NAMES["TIMEXFER"] % period) - if os.path.exists(file_path): - timed_xfer_data[period] = gen_utils.DataTableProc("timexfer_"+period, file_path) - else: - timed_xfer_data[period] = [] - - mode_properties = gen_utils.DataTableProc("MODE5TOD", _join(_dir(self.source), FILE_NAMES["MODE5TOD"]), convert_numeric=True) - mode_details = {} - for record in mode_properties: - mode_details[int(record["MODE_ID"])] = record - - if self.save_data_tables: - transit_link_data.save("%s_trlink" % self.data_table_name, self.overwrite) - transit_line_data.save("%s_trrt" % self.data_table_name, self.overwrite) - transit_stop_data.save("%s_trstop" % self.data_table_name, self.overwrite) - mode_properties.save("%s_MODE5TOD" % self.data_table_name, self.overwrite) - - coaster = network.create_transit_vehicle(40, 'c') # 4 coaster - trolley = network.create_transit_vehicle(50, 'l') # 5 sprinter/trolley - brt_yellow = network.create_transit_vehicle(60, 'y') # 6 BRT yellow line (future line) - brt_red = network.create_transit_vehicle(70, 'r') # 7 BRT red line (future line) - premium_bus = network.create_transit_vehicle(80, 'p') # 8 prem express - express_bus = network.create_transit_vehicle(90, 'e') # 9 regular express - local_bus = network.create_transit_vehicle(100, 'b') # 10 local bus - tier1 = network.create_transit_vehicle(45, 'o') # 11 Tier 1 - - brt_yellow.auto_equivalent = 3.0 - brt_red.auto_equivalent = 3.0 - premium_bus.auto_equivalent = 3.0 - express_bus.auto_equivalent = 3.0 - local_bus.auto_equivalent = 3.0 - - # Capacities - for reference / post-assignment analysis - tier1.seated_capacity, tier1.total_capacity = 7 * 142, 7 * 276 - trolley.seated_capacity, trolley.total_capacity = 4 * 64, 4 * 200 - brt_yellow.seated_capacity, brt_yellow.total_capacity = 32, 70 - brt_red.seated_capacity, brt_red.total_capacity = 32, 70 - premium_bus.seated_capacity, premium_bus.total_capacity = 32, 70 - express_bus.seated_capacity, express_bus.total_capacity = 32, 70 - local_bus.seated_capacity, local_bus.total_capacity = 32, 70 - - trrt_attrs = [] - mode5tod_attrs = [] - for elem_type in "TRANSIT_LINE", "TRANSIT_SEGMENT": - mapping = attr_map[elem_type] - for field, (attr, tcoved_type, emme_type, desc) in mapping.iteritems(): - if tcoved_type == "TRRT": - trrt_attrs.append((field, attr)) - elif tcoved_type == "MODE5TOD": - mode5tod_attrs.append((field, attr)) - network.create_attribute("TRANSIT_SEGMENT", "milepost") - - # Pre-process transit line (trrt) to know the route names for errors / warnings - transit_line_records = list(transit_line_data) - line_names = {} - for record in transit_line_records: - line_names[int(record["Route_ID"])] = str(record["Route_Name"]) - - links = dict((link["#hwyseg_guid"], link) for link in network.links()) - transit_routes = _defaultdict(lambda: []) - for record in transit_link_data: - line_ref = line_names.get(int(record["Route_ID"]), record["Route_ID"]) - link_id = record["Link_GUID"] - if "-" in record["Direction"]: - link_id = "-" + link_id - link = links.get(link_id) - if not link: - if "-" in record["Direction"]: - reverse_link = links.get("-" + link_id) - else: - reverse_link = links.get(link_id[1:]) - if reverse_link: - link = network.create_link(reverse_link.j_node, reverse_link.i_node, reverse_link.modes) - link.vertices = list(reversed(reverse_link.vertices)) - for attr in network.attributes("LINK"): - if attr not in set(["vertices"]): - link[attr] = reverse_link[attr] - link["@tcov_id"] = -1 * reverse_link["@tcov_id"] - link["#hwyseg_guid"] = link_id - links[link_id] = link - msg = "Transit line %s : Missing reverse link with ID %s (%s) (reverse link created)" % ( - line_ref, record["Link_GUID"], link) - self._log.append({"type": "text", "content": msg}) - self._error.append("Transit route import: " + msg) - link = reverse_link - if not link: - msg = "Transit line %s : No link with GUID %s, routing may not be correct" % ( - line_ref, record["Link_GUID"]) - self._log.append({"type": "text", "content": msg}) - self._error.append("Transit route import: " + msg) - fatal_errors += 1 - continue - - transit_routes[int(record["Route_ID"])].append(link) - - # lookup list of special tier 1 mode route names - tier1_rail_route_names = [str(n) for n in self._props["transit.newMode.route"]] - dummy_links = set([]) - transit_lines = {} - auto_mode = network.mode("d") - for record in transit_line_records: - try: - route = transit_routes[int(record["Route_ID"])] - # Find if name matches one of the names listed in transit.newMode.route and convert to tier 1 rail - is_tier1_rail = False - for name in tier1_rail_route_names: - if str(record["Route_Name"]).startswith(name): - is_tier1_rail = True - break - if is_tier1_rail: - vehicle_type = 45 - mode = network.transit_vehicle(vehicle_type).mode - else: - vehicle_type = int(record["Mode"]) * 10 - mode = network.transit_vehicle(vehicle_type).mode - prev_link = route[0] - itinerary = [prev_link] - for link in route[1:]: - if prev_link.j_node != link.i_node: # filling in the missing gap - msg = "Transit line %s (index %s): Links not adjacent, shortest path interpolation used (%s and %s)" % ( - record["Route_Name"], record["Route_ID"], prev_link["#hwyseg_guid"], link["#hwyseg_guid"]) - log_record = {"type": "text", "content": msg} - self._log.append(log_record) - sub_path = find_path(prev_link, link, mode) - itinerary.extend(sub_path) - log_record["content"] = log_record["content"] + " through %s links" % (len(sub_path)) - itinerary.append(link) - prev_link = link - - node_itinerary = [itinerary[0].i_node] + [l.j_node for l in itinerary] - missing_mode = 0 - for link in itinerary: - if mode not in link.modes: - link.modes |= set([mode]) - missing_mode += 1 - if missing_mode: - msg = "Transit line %s (index %s): missing mode added to %s link(s)" % ( - str(record["Route_Name"]), record["Route_ID"], missing_mode) - self._log.append({"type": "text", "content": msg}) - tline = network.create_transit_line( - str(record["Route_Name"]), vehicle_type, node_itinerary) - - for field, attr in trrt_attrs: - tline[attr] = float(record[field]) - if is_tier1_rail: - line_details = mode_details[11] - else: - line_details = mode_details[int(record["Mode"])] - for field, attr in mode5tod_attrs: - tline[attr] = float(line_details[field]) - #"XFERPENTM": "Transfer penalty time: " - #"WTXFERTM": "Transfer perception:" - # NOTE: an additional transfer penalty perception factor of 5.0 is included - # in assignment - tline["@transfer_penalty"] = float(line_details["XFERPENTM"]) * float(line_details["WTXFERTM"]) - tline.headway = tline["@headway_am"] if tline["@headway_am"] > 0 else 999 - tline.layover_time = 5 - - transit_lines[int(record["Route_ID"])] = tline - milepost = 0 - for segment in tline.segments(): - segment.milepost = milepost - milepost += segment.link.length - segment.allow_boardings = False - segment.allow_alightings = False - if auto_mode in segment.link.modes: - # segments on links with auto mode are ft1 = timau - segment.transit_time_func = 1 - else: - # ft2 = ul2 -> copied @trtime (fixed speed) - segment.transit_time_func = 2 - except Exception as error: - msg = "Transit line %s: %s %s" % (record["Route_Name"], type(error), error) - self._log.append({"type": "text", "content": msg}) - trace_text = _traceback.format_exc().replace("\n", "
      ") - self._log.append({"type": "text", "content": trace_text}) - self._error.append("Transit route import: line %s not created" % record["Route_Name"]) - fatal_errors += 1 - for link in dummy_links: - network.delete_link(link.i_node, link.j_node) - - line_stops = _defaultdict(lambda: []) - for record in transit_stop_data: - try: - line_name = line_names[int(record["Route_ID"])] - line_stops[line_name].append(record) - except KeyError: - self._log.append( - {"type": "text", - "content": "Stop %s: could not find transit line by ID %s (link GUID %s)" % ( - record["Stop_ID"], record["Route_ID"], record["Link_GUID"])}) - for stops in line_stops.itervalues(): - stops.sort(key=lambda stop: float(stop["Milepost"])) - - seg_float_attr_map = [] - seg_string_attr_map = [] - for field, (attr, t_type, e_type, desc) in attr_map["TRANSIT_SEGMENT"].iteritems(): - if t_type == "TRSTOP": - if e_type == "STRING": - seg_string_attr_map.append([field, attr]) - else: - seg_float_attr_map.append([field, attr]) - - for line_name, stops in line_stops.iteritems(): - tline = network.transit_line(line_name) - if not tline: - continue - itinerary = tline.segments(include_hidden=True) - segment = prev_segment = itinerary.next() - for stop in stops: - if "DUMMY" in stop["StopName"]: - continue - stop_link_id = stop['Link_GUID'] - node_id = int(stop['Node']) - while segment.link and segment.link["#hwyseg_guid"].lstrip("-") != stop_link_id: - segment = itinerary.next() - - if node_id == segment.i_node.number: - pass - elif segment.j_node and node_id == segment.j_node.number: - # if matches the J-node then the stop is on the next segment - segment = itinerary.next() - else: - if segment.link and segment.link["#hwyseg_guid"].lstrip("-") == stop_link_id: - msg = "Transit line %s (index %s): found GUID %s (segment %s) but node ID %s does not match I or J node" % ( - line_name, stop["Route_ID"], segment, stop_link_id, node_id) - else: - msg = "Transit line %s (index %s): did not found GUID %s for stop node ID %s" % ( - line_name, stop["Route_ID"], stop_link_id, node_id) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - # reset iterator to start back from previous segment - itinerary = tline.segments(include_hidden=True) - segment = itinerary.next() - while segment.id != prev_segment.id: - segment = itinerary.next() - continue - segment.allow_boardings = True - segment.allow_alightings = True - segment.dwell_time = min(tline.default_dwell_time, 99.99) - for field, attr in seg_string_attr_map: - segment[attr] = stop[field] - for field, attr in seg_float_attr_map: - segment[attr] = float(stop[field]) - prev_segment = segment - - def lookup_line(ident): - line = network.transit_line(ident) - if line: - return line.id - line = transit_lines.get(int(ident)) - if line: - return line.id - raise Exception("'%s' is not a route name or route ID" % ident) - - # Normalizing the case of the headers as different examples have been seen - for period, data in timed_xfer_data.iteritems(): - norm_data = [] - for record in data: - norm_record = {} - for key, val in record.iteritems(): - norm_record[key.lower()] = val - norm_data.append(norm_record) - - from_line, to_line, wait_time = [], [], [] - for i, record in enumerate(norm_data, start=2): - try: - from_line.append(lookup_line(record["from_line"])) - to_line.append(lookup_line(record["to_line"])) - wait_time.append(float(record["wait_time"])) - except Exception as error: - msg = "Error processing timexfer_%s.csv on file line %s: %s" % (period, i, error) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - fatal_errors += 1 - - timed_xfer = _dt.Data() - timed_xfer.add_attribute(_dt.Attribute("from_line", _np.array(from_line).astype("O"))) - timed_xfer.add_attribute(_dt.Attribute("to_line", _np.array(to_line).astype("O"))) - timed_xfer.add_attribute(_dt.Attribute("wait_time", _np.array(wait_time))) - # Creates and saves the new table - gen_utils.DataTableProc("%s_timed_xfer_%s" % (self.data_table_name, period), data=timed_xfer) - - if fatal_errors > 0: - raise Exception("Import of transit lines: %s fatal errors found" % fatal_errors) - self._log.append({"type": "text", "content": "Import transit lines complete"}) - - def calc_transit_attributes(self, network): - # for link in network.links(): - # if link.type == 0: # walk only links have FC ==0 - # link.type = 99 - - fares_file_name = FILE_NAMES["FARES"] - special_fare_path = _join(self.source, fares_file_name) - if not os.path.isfile(special_fare_path): - return - - # ON TRANSIT LINES - # Set 3-period headway based on revised headway calculation - for line in network.transit_lines(): - for period in ["ea", "am", "md", "pm", "ev"]: - line["@headway_rev_" + period] = revised_headway(line["@headway_" + period]) - - def get_line(line_id): - line = network.transit_line(line_id) - if line is None: - raise Exception("%s: line does not exist: %s" % (fares_file_name, line_id)) - return line - - # Special incremental boarding and in-vehicle fares - # to recreate the coaster zone fares - self._log.append({"type": "header", "content": "Apply special_fares to transit lines"}) - with open(special_fare_path) as fare_file: - self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) - special_fares = None - yaml_installed = True - try: - import yaml - special_fares = yaml.load(fare_file) - self._log.append({"type": "text", "content": yaml.dump(special_fares).replace("\n", "
      ")}) - except ImportError: - yaml_installed = False - except: - pass - if special_fares is None: - try: - import json - special_fares = json.load(fare_file) - self._log.append({"type": "text", "content": json.dumps(special_fares, indent=4).replace("\n", "
      ")}) - except: - pass - if special_fares is None: - msg = "YAML or JSON" if yaml_installed else "JSON (YAML parser not installed)" - raise Exception(fares_file_name + ": file could not be parsed as " + msg) - - - for record in special_fares["boarding_cost"]["base"]: - line = get_line(record["line"]) - line["@fare"] = 0 - for seg in line.segments(): - seg["@coaster_fare_board"] = record["cost"] - for record in special_fares["boarding_cost"].get("stop_increment", []): - line = get_line(record["line"]) - for seg in line.segments(True): - if record["stop"] in seg["#stop_name"]: - seg["@coaster_fare_board"] += record["cost"] - break - for record in special_fares["in_vehicle_cost"]: - line = get_line(record["line"]) - for seg in line.segments(True): - if record["from"] in seg["#stop_name"]: - seg["@coaster_fare_inveh"] = record["cost"] - break - pass_cost_keys = ['day_pass', 'regional_pass'] - pass_costs = [] - for key in pass_cost_keys: - cost = special_fares.get(key) - if cost is None: - raise Exception("key '%s' missing from %s" % (key, fares_file_name)) - pass_costs.append(cost) - pass_values = _dt.Data() - pass_values.add_attribute(_dt.Attribute("pass_type", _np.array(pass_cost_keys).astype("O"))) - pass_values.add_attribute(_dt.Attribute("cost", _np.array(pass_costs).astype("f8"))) - gen_utils.DataTableProc("%s_transit_passes" % self.data_table_name, data=pass_values) - self._log.append({"type": "text", "content": "Apply special_fares to transit lines complete"}) - - def renumber_base_nodes(self, network): - tracker = gen_utils.AvailableNodeIDTracker(network) - nodes = [n for n in network.nodes() if n.number > 999999] - nodes = sorted(nodes, key=lambda x: x.number, reverse=True) - if nodes: - self._log.append({"type": "text", "content": "Renumbered %s nodes" % len(nodes)}) - for n in nodes: - old_number = n.number - n.number = tracker.get_id() - self._log.append({"type": "text", "content": " - renumbered %s to %s " % (old_number, n.number)}) - - def create_turns(self, network): - self._log.append({"type": "header", "content": "Import turns and turn restrictions"}) - self._log.append({"type": "text", "content": "Process turns for turn prohibited by ID"}) - turn_data = gen_utils.DataTableProc("Turns", self.source) - if self.save_data_tables: - turn_data.save("%s_turns" % self.data_table_name, self.overwrite) - # Process turns.csv for prohibited turns penalty - for i, record in enumerate(turn_data): - from_node_id, to_node_id, at_node_id = record["FromNode"], record["ToNode"], record["MidNode"] - at_node = network.node(at_node_id) - if at_node and not at_node.is_intersection: - try: - network.create_intersection(at_node) - except Exception as error: - text = ("record %s turn from %s, at %s, to %s: cannot create intersection" % - (i, from_node_id, at_node_id, to_node_id)) - self._log.append({"type": "text", "content": text}) - trace_text = _traceback.format_exc().replace("\n", "
      ") - self._log.append({"type": "text", "content": trace_text}) - self._error.append(text) - continue - turn = network.turn(from_node_id, at_node_id, to_node_id) - if at_node is None: - text = ("record %s turn from %s, at %s, to %s: at node does not exist" % - (i, from_node_id, at_node_id, to_node_id)) - self._log.append({"type": "text", "content": text}) - self._error.append(text) - elif turn is None: - text = ("record %s turn from %s, at %s, to %s: does not form a turn" % - (i, from_node_id, at_node_id, to_node_id)) - self._log.append({"type": "text", "content": text}) - self._error.append(text) - else: - turn.penalty_func = 0 # prohibit turn - # NOTE: could support penalty value - # turn.penalty_func = 1 - # turn.data1 = float(record["penalty"]) - self._log.append({"type": "text", "content": "Import turns and turn prohibitions complete"}) - - def calc_traffic_attributes(self, network): - self._log.append({"type": "header", "content": "Calculate derived traffic attributes"}) - # "COST": "@cost_operating" - # "ITOLL": "@toll_flag" # ITOLL - Toll + 100 *[0,1] if managed lane (I-15 tolls) - # Note: toll_flag is no longer used - # "ITOLL2": "@toll" # ITOLL2 - Toll - # "ITOLL3": "@cost_auto" # ITOLL3 - Toll + AOC - # "@cost_hov" - # "ITOLL4": "@cost_med_truck" # ITOLL4 - Toll * 1.03 + AOC - # "ITOLL5": "@cost_hvy_truck" # ITOLL5 - Toll * 2.33 + AOC - fatal_errors = 0 - try: - aoc = float(self._props["aoc.fuel"]) + float(self._props["aoc.maintenance"]) - except ValueError: - raise Exception("Error during float conversion for aoc.fuel or aoc.maintenance from sandag_abm.properties file") - scenario_year = int(self._props["scenarioYear"]) - periods = ["EA", "AM", "MD", "PM", "EV"] - time_periods = ["_ea", "_am", "_md", "_pm", "_ev"] - src_time_periods = ["_op", "_am", "_op", "_pm", "_op"] - mode_d = network.mode('d') - - # Calculate upstream and downstream interchange distance - # First, label the intersection nodes as nodes with type 1 links (freeway) and - # type 8 links (freeway-to-freeway ramp) - network.create_attribute("NODE", "is_interchange") - interchange_points = [] - for node in network.nodes(): - adj_links = list(node.incoming_links()) + list(node.outgoing_links()) - has_freeway_links = bool( - [l for l in adj_links - if l.type == 1 and mode_d in l.modes]) - has_ramp_links = bool( - [l for l in adj_links - if l.type == 8 and mode_d in l.modes and not "HOV" in l["#name"]]) - if has_freeway_links and has_ramp_links: - node.is_interchange = True - interchange_points.append(node) - else: - node.is_interchange = False - for node in network.nodes(): - node["@interchange"] = node.is_interchange - - for link in network.links(): - if link.type == 1 and mode_d in link.modes: - link["@intdist_down"] = interchange_distance(link, "DOWNSTREAM") - link["@intdist_up"] = interchange_distance(link, "UPSTREAM") - self._log.append({"type": "text", "content": "Calculate of nearest interchange distance complete"}) - - # Static reliability parameters - # freeway coefficients - freeway_rel = { - "intercept": 0.1078, - "speed>70": 0.01393, - "upstream": 0.011, - "downstream": 0.0005445, - } - # arterial/ramp/other coefficients - road_rel = { - "intercept": 0.0546552, - "lanes": { - 1: 0.0, - 2: 0.0103589, - 3: 0.0361211, - 4: 0.0446958, - 5: 0.0 - }, - "speed": { - "<35": 0, - 35: 0.0075674, - 40: 0.0091012, - 45: 0.0080996, - 50: -0.0022938, - ">50": -0.0046211 - }, - "control": { - 0: 0, # Uncontrolled - 1: 0.0030973, # Signal - 2: -0.0063281, # Stop - 3: -0.0063281, # Stop - 4: 0.0127692, # Other, Railway, etc. - } - } - for link in network.links(): - # Change SR125 toll speed to 70MPH - if link["@hov"] == 4 and link.type == 1: - link["@speed_posted"] = 70 - link["@cost_operating"] = link.length * aoc - for time in time_periods: - # add link delay (30 sec=0.5mins) to HOV connectors to discourage travel - if link.type == 8 and (link["@hov"] == 2 or link["@hov"] == 3): - link["@time_link" + time] = link["@time_link" + time] + 0.375 - - # make speed on HOV lanes (70mph) the same as parallel GP lanes (65mph) - # - set speed back to posted speed - increase travel time by (speed_adj/speed_posted) - if link.type == 1 and (link["@hov"] == 2 or link["@hov"] == 3): - speed_adj = link["@speed_adjusted"] - speed_posted = link["@speed_posted"] - if speed_adj>0: - link["@time_link" + time] = (speed_adj/(speed_posted*1.0)) * link["@time_link" + time] - - # Required file - vehicle_class_factor_file = FILE_NAMES["VEHICLE_CLASS"] - facility_factors = _defaultdict(lambda: {}) - facility_factors["DEFAULT_FACTORS"] = { - "ALL": { - "auto": 1.0, - "hov2": 1.0, - "hov3": 1.0, - "lgt_truck": 1.0, - "med_truck": 1.03, - "hvy_truck": 2.03 - }, - "count": 0 - } - if os.path.exists(_join(self.source, vehicle_class_factor_file)): - msg = "Adjusting tolls based on factors from %s" % vehicle_class_factor_file - self._log.append({"type": "text", "content": msg}) - # NOTE: CSV Reader sets the field names to UPPERCASE for consistency - with gen_utils.CSVReader(_join(self.source, vehicle_class_factor_file)) as r: - for row in r: - if "YEAR" in r.fields and int(row["YEAR"]) != scenario_year: # optional year column - continue - name = row["FACILITY_NAME"] - # optional time-of-day entry, default to ALL if no column or blank - fac_time = row.get("TIME_OF_DAY") - if fac_time is None: - fac_time = "ALL" - facility_factors[name][fac_time] = { - "auto": float(row["DA_FACTOR"]), - "hov2": float(row["S2_FACTOR"]), - "hov3": float(row["S3_FACTOR"]), - "lgt_truck": float(row["TRK_L_FACTOR"]), - "med_truck": float(row["TRK_M_FACTOR"]), - "hvy_truck": float(row["TRK_H_FACTOR"]) - } - facility_factors[name]["count"] = 0 - - # validate ToD entry, either list EA, AM, MD, PM and EV, or ALL, but not both - for name, factors in facility_factors.iteritems(): - # default keys should be "ALL" and "count" - if "ALL" in factors: - if len(factors) > 2: - fatal_errors += 1 - msg = ("Individual time periods and 'ALL' (or blank) listed under " - "TIME_OF_DAY column in {} for facility {}").format(vehicle_class_factor_file, name) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - elif set(periods + ["count"]) != set(factors.keys()): - fatal_errors += 1 - msg = ("Missing time periods {} under TIME_OF_DAY column in {} for facility {}").format( - (set(periods) - set(factors.keys())), vehicle_class_factor_file, name) - self._log.append({"type": "text", "content": msg}) - self._error.append(msg) - - def lookup_link_name(link): - for attr_name in ["#name", "#name_from", "#name_to"]: - for name, _factors in facility_factors.iteritems(): - if name in link[attr_name]: - return _factors - return facility_factors["DEFAULT_FACTORS"] - - def match_facility_factors(link): - factors = lookup_link_name(link) - factors["count"] += 1 - factors = _copy(factors) - del factors["count"] - # @hov = 2 or 3 overrides hov2 and hov3 costs - if link["@hov"] == 2: - for _, time_factors in factors.iteritems(): - time_factors["hov2"] = 0.0 - time_factors["hov3"] = 0.0 - elif link["@hov"] == 3: - for _, time_factors in factors.iteritems(): - time_factors["hov3"] = 0.0 - return factors - - vehicle_classes = ["auto", "hov2", "hov3", "lgt_truck", "med_truck", "hvy_truck"] - for link in network.links(): - if sum(link["@toll" + time] for time in time_periods) > 0: - factors = match_facility_factors(link) - for time, period in zip(time_periods, periods): - time_factors = factors.get(period, factors.get("ALL")) - for name in vehicle_classes: - link["@cost_" + name + time] = time_factors[name] * link["@toll" + time] + link["@cost_operating"] - else: - for time in time_periods: - for name in vehicle_classes: - link["@cost_" + name + time] = link["@cost_operating"] - for name, class_factors in facility_factors.iteritems(): - msg = "Facility name '%s' matched to %s links." % (name, class_factors["count"]) - self._log.append({"type": "text2", "content": msg}) - - self._log.append({ - "type": "text", - "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) - - # calculate static reliability - for link in network.links(): - for time in time_periods: - sta_reliability = "@sta_reliability" + time - # if freeway apply freeway parameters to this link - if link["type"] == 1 and link["@lane" + time] > 0: - high_speed_factor = freeway_rel["speed>70"] if link["@speed_posted"] >= 70 else 0.0 - upstream_factor = freeway_rel["upstream"] * 1 / link["@intdist_up"] - downstream_factor = freeway_rel["downstream"] * 1 / link["@intdist_down"] - link[sta_reliability] = ( - freeway_rel["intercept"] + high_speed_factor + upstream_factor + downstream_factor) - # arterial/ramp/other apply road parameters - elif link["type"] <= 9 and link["@lane" + time] > 0: - lane_factor = road_rel["lanes"].get(link["@lane" + time], 0.0) - speed_bin = int(link["@speed_posted"] / 5) * 5 # truncate to multiple of 5 - if speed_bin < 35: - speed_bin = "<35" - elif speed_bin > 50: - speed_bin = ">50" - speed_factor = road_rel["speed"][speed_bin] - control_bin = min(max(link["@traffic_control"], 0), 4) - control_factor = road_rel["control"][control_bin] - link[sta_reliability] = road_rel["intercept"] + lane_factor + speed_factor + control_factor - else: - link[sta_reliability] = 0.0 - self._log.append({"type": "text", "content": "Calculate of link static reliability factors complete"}) - - # Cycle length matrix - # Intersecting Link - # Approach Link 2 3 4 5 6 7 8 9 - # FC Description - # 2 Prime Arterial 2.5 2 2 2 2 2 2 2 - # 3 Major Arterial 2 2 2 2 2 2 2 2 - # 4 Collector 2 2 1.5 1.5 1.5 1.5 1.5 1.5 - # 5 Local Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 6 Rural Collector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 7 Local Road 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 8 Freeway connector 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - # 9 Local Ramp 2 2 1.5 1.25 1.25 1.25 1.25 1.25 - - # Volume-delay functions - # fd10: freeway node approach - # fd11: non-intersection node approach - # fd20: cycle length 1.25 - # fd21: cycle length 1.5 - # fd22: cycle length 2.0 - # fd23: cycle length 2.5 - # fd24: cycle length 2.5 and metered ramp - # fd25: freeway node approach AM and PM only - network.create_attribute("LINK", "green_to_cycle") - network.create_attribute("LINK", "cycle") - vdf_cycle_map = {1.25: 20, 1.5: 21, 2.0: 22, 2.5: 23} - for node in network.nodes(): - incoming = list(node.incoming_links()) - outgoing = list(node.outgoing_links()) - is_signal = False - for link in incoming: - if link["@green_to_cycle_init"] > 0: - is_signal = True - break - if is_signal: - lcs = [link.type for link in incoming + outgoing] - min_lc = max(lcs) # Note: minimum class is actually the HIGHEST value, - max_lc = min(lcs) # and maximum is the LOWEST - - for link in incoming: - # Metered ramps - if link["@traffic_control"] in [4, 5]: - link["cycle"] = 2.5 - link["green_to_cycle"] = 0.42 - link.volume_delay_func = 24 - # Stops - elif link["@traffic_control"] in [2, 3]: - link["cycle"] = 1.25 - link["green_to_cycle"] = 0.42 - link.volume_delay_func = 20 - elif link["@green_to_cycle_init"] > 0 and is_signal: - if link.type == 2: - c_len = 2.5 if min_lc == 2 else 2.0 - elif link.type == 3: - c_len = 2.0 # Major arterial & anything - elif link.type == 4: - c_len = 1.5 if max_lc > 2 else 2.0 - elif link.type > 4: - if max_lc > 4: - c_len = 1.25 - elif max_lc == 4: - c_len = 1.5 - else: - c_len = 2.0 - if link["@green_to_cycle_init"] > 10: - link["green_to_cycle"] = link["@green_to_cycle_init"] / 100.0 - if link["green_to_cycle"] > 1.0: - link["green_to_cycle"] = 1.0 - link["cycle"] = c_len - link.volume_delay_func = vdf_cycle_map[c_len] - elif link.type == 1: - link.volume_delay_func = 10 # freeway - else: - link.volume_delay_func = 11 # non-controlled approach - self._log.append({"type": "text", "content": "Derive cycle, green_to_cycle, and VDF by approach node complete"}) - - for link in network.links(): - if link.volume_delay_func in [10, 11]: - continue - if link["@traffic_control"] in [4, 5]: - # Ramp meter controlled links are only enabled during the peak periods - for time in ["_am", "_pm"]: - link["@cycle" + time] = link["cycle"] - link["@green_to_cycle" + time] = link["green_to_cycle"] - else: - for time in time_periods: - link["@cycle" + time] = link["cycle"] - link["@green_to_cycle" + time] = link["green_to_cycle"] - self._log.append({"type": "text", "content": "Setting of time period @cycle and @green_to_cycle complete"}) - - network.delete_attribute("LINK", "green_to_cycle") - network.delete_attribute("LINK", "cycle") - network.delete_attribute("NODE", "is_interchange") - self._log.append({"type": "text", "content": "Calculate derived traffic attributes complete"}) - if fatal_errors > 0: - raise Exception("%s fatal errors during calculation of traffic attributes" % fatal_errors) - return - - def check_zone_access(self, network, mode): - # Verify that every centroid has at least one available - # access and egress connector - for centroid in network.centroids(): - access = egress = False - for link in centroid.outgoing_links(): - if mode in link.modes: - if link.j_node.is_intersection: - for turn in link.outgoing_turns(): - if turn.i_node != turn.k_node and turn.penalty_func != 0: - egress = True - else: - egress = True - if not egress: - raise Exception("No egress permitted from zone %s" % centroid.id) - for link in centroid.incoming_links(): - if mode in link.modes: - if link.j_node.is_intersection: - for turn in link.incoming_turns(): - if turn.i_node != turn.k_node and turn.penalty_func != 0: - access = True - else: - access = True - if not access: - raise Exception("No access permitted to zone %s" % centroid.id) - - @_m.logbook_trace("Set database functions (VDF, TPF and TTF)") - def set_functions(self, scenario): - create_function = _m.Modeller().tool( - "inro.emme.data.function.create_function") - set_extra_function_params = _m.Modeller().tool( - "inro.emme.traffic_assignment.set_extra_function_parameters") - emmebank = self.emmebank - for f_id in ["fd10", "fd11", "fd20", "fd21", "fd22", "fd23", "fd24", "fd25", - "fp1", "ft1", "ft2", "ft3", "ft4"]: - function = emmebank.function(f_id) - if function: - emmebank.delete_function(function) - - smartSignalf_CL = self._props["smartSignal.factor.LC"] - smartSignalf_MA = self._props["smartSignal.factor.MA"] - smartSignalf_PA = self._props["smartSignal.factor.PA"] - atdmf = self._props["atdm.factor"] - - reliability_tmplt = ( - "* (1 + el2 + {0}*(".format(atdmf)+ - "( {factor[LOS_C]} * ( put(get(1).min.1.5) - {threshold[LOS_C]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_C]})" - "+ ( {factor[LOS_D]} * ( get(2) - {threshold[LOS_D]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_D]})" - "+ ( {factor[LOS_E]} * ( get(2) - {threshold[LOS_E]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_E]})" - "+ ( {factor[LOS_FL]} * ( get(2) - {threshold[LOS_FL]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FL]})" - "+ ( {factor[LOS_FH]} * ( get(2) - {threshold[LOS_FH]} + 0.01 ) ) * (get(1) .gt. {threshold[LOS_FH]})" - "))") - parameters = { - "freeway": { - "factor": { - "LOS_C": 0.2429, "LOS_D": 0.1705, "LOS_E": -0.2278, "LOS_FL": -0.1983, "LOS_FH": 1.022 - }, - "threshold": { - "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 - }, - }, - "road": { # for arterials, ramps, collectors, local roads, etc. - "factor": { - "LOS_C": 0.1561, "LOS_D": 0.0, "LOS_E": 0.0, "LOS_FL": -0.449, "LOS_FH": 0.0 - }, - "threshold": { - "LOS_C": 0.7, "LOS_D": 0.8, "LOS_E": 0.9, "LOS_FL": 1.0, "LOS_FH": 1.2 - }, - } - } - # freeway fd10 - create_function( - "fd10", - "(ul1 * (1.0 + 0.24 * put((volau + volad) / ul3) ** 5.5))" - + reliability_tmplt.format(**parameters["freeway"]), - emmebank=emmebank) - # non-freeway link which is not an intersection approach fd11 - create_function( - "fd11", - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0))" - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd20", # Local collector and lower intersection and stop controlled approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "1.25 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))" - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd21", # Collector intersection approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "{0} * 1.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_CL) - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd22", # Major arterial and major or prime arterial intersection approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "{0} * 2.0 / 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_MA) - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd23", # Primary arterial intersection approaches - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "{0} * 2.5/ 2 * (1-el1) ** 2 * (1.0 + 4.5 * ( (volau + volad) / el3 ) ** 2.0))".format(smartSignalf_PA) - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - create_function( - "fd24", # Metered ramps - "(ul1 * (1.0 + 0.8 * put((volau + volad) / ul3) ** 4.0) +" - "2.5/ 2 * (1-el1) ** 2 * (1.0 + 6.0 * ( (volau + volad) / el3 ) ** 2.0))" - + reliability_tmplt.format(**parameters["road"]), - emmebank=emmebank) - # freeway fd25 (AM and PM only) - create_function( - "fd25", - "(ul1 * (1.0 + 0.6 * put((volau + volad) / ul3) ** 4))" - + reliability_tmplt.format(**parameters["freeway"]), - emmebank=emmebank) - - set_extra_function_params( - el1="@green_to_cycle", el2="@sta_reliability", el3="@capacity_inter_am", - emmebank=emmebank) - - create_function("fp1", "up1", emmebank=emmebank) # fixed cost turns stored in turn data 1 (up1) - - # buses in mixed traffic, use auto time - create_function("ft1", "ul1", emmebank=emmebank) - # fixed speed for separate guideway operations - create_function("ft2", "ul2", emmebank=emmebank) - # special 0-cost segments for prohibition of walk to different stop from centroid - create_function("ft3", "0", emmebank=emmebank) - # fixed guideway systems according to vehicle speed (not used at the moment) - create_function("ft4", "60 * length / speed", emmebank=emmebank) - - @_m.logbook_trace("Traffic zone connectivity check") - def check_connectivity(self, scenario): - modeller = _m.Modeller() - sola_assign = modeller.tool( - "inro.emme.traffic_assignment.sola_traffic_assignment") - set_extra_function_para = modeller.tool( - "inro.emme.traffic_assignment.set_extra_function_parameters") - create_matrix = _m.Modeller().tool( - "inro.emme.data.matrix.create_matrix") - net_calc = gen_utils.NetworkCalculator(scenario) - - emmebank = scenario.emmebank - zone_index = dict(enumerate(scenario.zone_numbers)) - num_processors = dem_utils.parse_num_processors("MAX-1") - - # Note matrix is also created in initialize_matrices - create_matrix("ms1", "zero", "zero", scenario=scenario, overwrite=True) - with gen_utils.temp_matrices(emmebank, "FULL", 1) as (result_matrix,): - result_matrix.name = "TEMP_AUTO_TRAVEL_TIME" - set_extra_function_para( - el1="@green_to_cycle_am", - el2="@sta_reliability_am", - el3="@capacity_inter_am", emmebank=emmebank) - net_calc("ul1", "@time_link_am", "modes=d") - net_calc("ul3", "@capacity_link_am", "modes=d") - net_calc("lanes", "@lane_am", "modes=d") - spec = { - "type": "SOLA_TRAFFIC_ASSIGNMENT", - "background_traffic": None, - "classes": [ - { - "mode": "d", - "demand": 'ms"zero"', - "generalized_cost": None, - "results": { - "od_travel_times": {"shortest_paths": result_matrix.named_id} - } - } - ], - "stopping_criteria": { - "max_iterations": 0, "best_relative_gap": 0.0, - "relative_gap": 0.0, "normalized_gap": 0.0 - }, - "performance_settings": {"number_of_processors": num_processors}, - } - sola_assign(spec, scenario=scenario) - travel_time = result_matrix.get_numpy_data(scenario) - - is_disconnected = (travel_time == 1e20) - disconnected_pairs = is_disconnected.sum() - if disconnected_pairs > 0: - error_msg = "Connectivity error(s) between %s O-D pairs" % disconnected_pairs - self._log.append({"type": "header", "content": error_msg}) - count_disconnects = [] - for axis, term in [(0, "from"), (1, "to")]: - axis_totals = is_disconnected.sum(axis=axis) - for i, v in enumerate(axis_totals): - if v > 0: - count_disconnects.append((zone_index[i], term, v)) - count_disconnects.sort(key=lambda x: x[2], reverse=True) - for z, direction, count in count_disconnects[:50]: - msg ="Zone %s disconnected %s %d other zones" % (z, direction, count) - self._log.append({"type": "text", "content": msg}) - if disconnected_pairs > 50: - self._log.append({"type": "text", "content": "[List truncated]"}) - raise Exception(error_msg) - self._log.append({"type": "header", "content": - "Zone connectivity verified for AM period on SOV toll ('S') mode"}) - scenario.has_traffic_results = False - - def log_report(self): - report = _m.PageBuilder(title="Import network from TNED files report") - try: - if self._error: - report.add_html("
      Errors detected during import: %s
      " % len(self._error)) - error_msg = ["
        "] - for error in self._error: - error_msg.append("
      • %s
      • " % error) - error_msg.append("
      ") - report.add_html("".join(error_msg)) - else: - report.add_html("

      No errors detected during import :-)") - - for item in self._log: - if item["type"] == "text": - report.add_html("
      %s
      " % item["content"]) - if item["type"] == "text2": - report.add_html("
      %s
      " % item["content"]) - elif item["type"] == "header": - report.add_html("

      %s

      " % item["content"]) - elif item["type"] == "table": - table_msg = ["
      ", "

      %s

      " % item["title"]] - if "header" in item: - table_msg.append("") - for label in item["header"]: - table_msg.append("" % label) - table_msg.append("") - for row in item["content"]: - table_msg.append("") - for cell in row: - table_msg.append("" % cell) - table_msg.append("") - table_msg.append("
      %s
      %s
      ") - report.add_html("".join(table_msg)) - - except Exception as error: - # no raise during report to avoid masking real error - report.add_html("Error generating report") - report.add_html(unicode(error)) - report.add_html(_traceback.format_exc()) - - _m.logbook_write("Import network report", report.render()) - - -def get_node(network, number, coordinates, is_centroid=False): - node = network.node(number) - if not node: - node = network.create_node(number, is_centroid) - node.x, node.y = coordinates - return node - - -# shortest path interpolation -def find_path(orig_link, dest_link, mode): - visited = set([]) - visited_add = visited.add - back_links = {} - heap = [] - - for link in orig_link.j_node.outgoing_links(): - if mode in link.modes: - back_links[link] = None - _heapq.heappush(heap, (link["length"], link)) - - link_found = False - try: - while not link_found: - link_cost, link = _heapq.heappop(heap) - if link in visited: - continue - visited_add(link) - for outgoing in link.j_node.outgoing_links(): - if mode not in outgoing.modes: - continue - if outgoing in visited: - continue - back_links[outgoing] = link - if outgoing == dest_link: - link_found = True - break - outgoing_cost = link_cost + link["length"] - _heapq.heappush(heap, (outgoing_cost, outgoing)) - except IndexError: - pass # IndexError if heap is empty - if not link_found: - raise NoPathException( - "no path found between links with trcov_id %s and %s (Emme IDs %s and %s)" % ( - orig_link["@tcov_id"], dest_link["@tcov_id"], orig_link, dest_link)) - - prev_link = back_links[dest_link] - route = [] - while prev_link: - route.append(prev_link) - prev_link = back_links[prev_link] - return list(reversed(route)) - - -class NoPathException(Exception): - pass - - -def revised_headway(headway): - # CALCULATE REVISED HEADWAY - # new headway calculation is less aggressive; also only being used for initial wait - # It uses a negative exponential formula to calculate headway - # - if headway <= 10: - rev_headway = headway - else: - rev_headway = headway * (0.275 + 0.788 * _np.exp(-0.011*headway)) - return rev_headway - - -def interchange_distance(orig_link, direction): - visited = set([]) - visited_add = visited.add - back_links = {} - heap = [] - if direction == "DOWNSTREAM": - get_links = lambda l: l.j_node.outgoing_links() - check_far_node = lambda l: l.j_node.is_interchange - elif direction == "UPSTREAM": - get_links = lambda l: l.i_node.incoming_links() - check_far_node = lambda l: l.i_node.is_interchange - # Shortest path search for nearest interchange node along freeway - for link in get_links(orig_link): - _heapq.heappush(heap, (link["length"], link)) - interchange_found = False - try: - while not interchange_found: - link_cost, link = _heapq.heappop(heap) - if link in visited: - continue - visited_add(link) - if check_far_node(link): - interchange_found = True - break - for next_link in get_links(link): - if next_link in visited: - continue - next_cost = link_cost + link["length"] - _heapq.heappush(heap, (next_cost, next_link)) - except IndexError: - # IndexError if heap is empty - # case where start / end of highway, dist = 99 - return 99 - return orig_link["length"] / 2.0 + link_cost From 95e05ecc12fcdf687318b46a239a6d12d7274368 Mon Sep 17 00:00:00 2001 From: Michael Wehrmeyer Date: Tue, 13 Feb 2024 10:28:14 -0800 Subject: [PATCH 33/43] [TNED] resident skim py update The stops lat and long fields were getting dividided by 1,000,000 perhaps due to a remnant process from either TransCAD or TCOV and that was causing the stops file to have incorrect stop IDs, x-coord and y-coords. Those erroneous values caused the output/skims/maz_stop_walk.csv file to have 9999 for all records and resulted in no transit trips for the TNED run. --- src/asim/scripts/resident/2zoneSkim.py | 49 ++++++++++++++++++-------- 1 file changed, 35 insertions(+), 14 deletions(-) diff --git a/src/asim/scripts/resident/2zoneSkim.py b/src/asim/scripts/resident/2zoneSkim.py index ac58604d7..386268411 100644 --- a/src/asim/scripts/resident/2zoneSkim.py +++ b/src/asim/scripts/resident/2zoneSkim.py @@ -42,6 +42,9 @@ nodes['X'] = nodes.geometry.x nodes['Y'] = nodes.geometry.y +# TEST: nodes +# nodes.to_csv(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/nodes.csv") + links = gpd.read_file(sf) links = links.to_crs(epsg=2230) @@ -59,33 +62,48 @@ centroids['Y'] = nodes[nodes[parms['maz_shape_maz_id']]!=0].Y centroids['MAZ'] = nodes[nodes[parms['maz_shape_maz_id']]!=0].MGRA centroids['MAZ_centroid_id'] = nodes[nodes[parms['maz_shape_maz_id']]!=0].index + centroids = pd.merge(centroids, maz_closest_network_node_id, left_on='MAZ_centroid_id', right_on=parms['mmms']["mmms_link_ref_id"], how='left') centroids = centroids.rename(columns={parms['mmms']["mmms_link_nref_id"]:'network_node_id'}) centroids["network_node_x"] = nodes["X"].loc[centroids["network_node_id"]].tolist() centroids["network_node_y"] = nodes["Y"].loc[centroids["network_node_id"]].tolist() +# TEST: centroids +# centroids.to_csv(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/centroids.csv") + # %% ## read transit stop and route file (KK) ============ stops = pd.read_csv(os.path.join(model_inputs, parms['stop_attributes']['file'])) routes = pd.read_csv(os.path.join(model_inputs, parms['route_attributes']['file'])) routes = routes.filter(['Route_ID','Route_Name', 'Mode']) +# TEST: stops 1 +# stops.to_csv(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/stops_1.csv") + # add mode from route file & convert lat.long to stateplane(KK)===== -stops = stops.merge(routes, left_on='Route_ID', right_on='Route_ID') # +stops = stops.merge(routes, left_on='Route_ID', right_on='Route_ID') # + +# TEST: stops 2 +# stops.to_csv(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/stops_2.csv") -stops.rename(columns={' Latitude': 'Latitude'}, inplace=True) -stops["Longitude1"] = stops["Longitude"]/1000000 -stops["Latitude1"] = stops["Latitude"]/1000000 +# stops.rename(columns={' Latitude': 'Latitude'}, inplace=True) +# stops["Longitude1"] = stops["Longitude"]/1000000 +# stops["Latitude1"] = stops["Latitude"]/1000000 -gpd_stops = gpd.GeoDataFrame(stops, geometry = gpd.points_from_xy(stops.Longitude1, stops.Latitude1)) -gpd_stops = gpd_stops.set_crs('epsg:4326') -gpd_stops = gpd_stops.to_crs(epsg=2230) +# TEST: stops 3 +# stops.to_csv(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/stops_3.csv") + +gpd_stops = gpd.GeoDataFrame(stops, geometry = gpd.points_from_xy(stops.Longitude, stops.Latitude, crs='epsg:4326')) +gpd_stops = gpd_stops.to_crs('epsg:2230') + +# TEST: stops 4 +# gpd_stops.to_file(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/gpd_stops.shp") pd.set_option('display.float_format', lambda x: '%.9f' % x) -gpd_stops['Longitude'] = gpd_stops['geometry'].x -gpd_stops['Latitude'] = gpd_stops['geometry'].y +gpd_stops['Longitude'] = gpd_stops['geometry'].x +gpd_stops['Latitude'] = gpd_stops['geometry'].y stops["network_node_id"] = net.get_node_ids(gpd_stops['Longitude'], gpd_stops['Latitude']) stops["network_node_x"] = nodes["X"].loc[stops["network_node_id"]].tolist() @@ -95,6 +113,9 @@ np.where((stops['Mode']==4) | (stops['Mode']==5) | (stops['Mode']==8) | (stops['Mode']==9), 'E', np.where((stops['Mode']==6) | (stops['Mode']==7),'B','N'))) +# TEST: stops 4 +# stops.to_csv(r"C:/abm_runs/sar/2022_ABM3_tned/src/asim/scripts/resident/stops_4.csv") + # %% # MAZ-to-MAZ Walk print(f"{datetime.now().strftime('%H:%M:%S')} Build MAZ to MAZ Walk Table...") @@ -154,8 +175,8 @@ maz_to_stop_cost["DISTANCE"] = maz_to_stop_cost.eval("(((OMAZ_NODE_X-DSTOP_NODE_X)**2 + (OMAZ_NODE_Y-DSTOP_NODE_Y)**2)**0.5) / 5280.0") # B: Future BRT, E: regular express, premium express, sprinter\trolley, and coaster bus, L: Local bus, N: None. There should be no Ns -maz_to_stop_walk_cost = maz_to_stop_cost[(maz_to_stop_cost["DISTANCE"] <= max_maz_local_bus_stop_walk_dist_feet / 5280.0) & (maz_to_stop_cost['MODE'] == 'L') | - (maz_to_stop_cost["DISTANCE"] <= max_maz_premium_transit_stop_walk_dist_feet / 5280.0) & (maz_to_stop_cost['MODE'] == 'E') | +maz_to_stop_walk_cost = maz_to_stop_cost[(maz_to_stop_cost["DISTANCE"] <= max_maz_local_bus_stop_walk_dist_feet / 5280.0) & (maz_to_stop_cost['MODE'] == 'L') | + (maz_to_stop_cost["DISTANCE"] <= max_maz_premium_transit_stop_walk_dist_feet / 5280.0) & (maz_to_stop_cost['MODE'] == 'E') | (maz_to_stop_cost["DISTANCE"] <= max_maz_future_BRT_stop_walk_dist_feet / 5280.0) & (maz_to_stop_cost['MODE'] == 'B')].copy() print(f"{datetime.now().strftime('%H:%M:%S')} Get Shortest Path Length...") @@ -165,8 +186,8 @@ print(f"{datetime.now().strftime('%H:%M:%S')} Remove Maz Stop Pairs Beyond Max Walk Distance...") -maz_to_stop_walk_cost_out = maz_to_stop_walk_cost[(maz_to_stop_walk_cost["DISTANCE"] <= max_maz_local_bus_stop_walk_dist_feet / 5280.0) & (maz_to_stop_walk_cost['MODE'] == 'L') | - (maz_to_stop_walk_cost["DISTANCE"] <= max_maz_premium_transit_stop_walk_dist_feet / 5280.0) & (maz_to_stop_walk_cost['MODE'] == 'E') | +maz_to_stop_walk_cost_out = maz_to_stop_walk_cost[(maz_to_stop_walk_cost["DISTANCE"] <= max_maz_local_bus_stop_walk_dist_feet / 5280.0) & (maz_to_stop_walk_cost['MODE'] == 'L') | + (maz_to_stop_walk_cost["DISTANCE"] <= max_maz_premium_transit_stop_walk_dist_feet / 5280.0) & (maz_to_stop_walk_cost['MODE'] == 'E') | (maz_to_stop_walk_cost["DISTANCE"] <= max_maz_future_BRT_stop_walk_dist_feet / 5280.0) & (maz_to_stop_walk_cost['MODE'] == 'B')].copy() @@ -192,7 +213,7 @@ maz_stop_walk['DISTWALK'].fillna(9999, inplace = True) maz_stop_walk.rename({'MAZ': 'maz', 'DISTWALK': 'walk_dist_' + output}, axis='columns', inplace=True) maz_stop_walk0 = maz_stop_walk0.merge(maz_stop_walk, left_on='maz', right_on='maz') - + maz_stop_walk0.sort_values(by=['maz'], inplace=True) print(f"{datetime.now().strftime('%H:%M:%S')} Write Results...") maz_stop_walk0.to_csv(path + '/output/skims/' + "maz_stop_walk.csv", index=False) From be4b21e185dd9e60cda3e73bb8ca1db1db096efe Mon Sep 17 00:00:00 2001 From: Michael Wehrmeyer Date: Tue, 13 Feb 2024 10:39:13 -0800 Subject: [PATCH 34/43] [TNED] update attr names toolbox export network The file was still referencing attributes names that have been changed under TNED. Made the appropriate changes so that it is compatible with TNED files. --- .../toolbox/export/export_data_loader_network.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/main/emme/toolbox/export/export_data_loader_network.py b/src/main/emme/toolbox/export/export_data_loader_network.py index f54fc1c39..798606f0f 100644 --- a/src/main/emme/toolbox/export/export_data_loader_network.py +++ b/src/main/emme/toolbox/export/export_data_loader_network.py @@ -210,9 +210,14 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, ("PROJ", "@project_code"), ("FC", "type"), ("HOV", "@hov"), + ("EATRUCK", "@truck_ea"), + ("AMTRUCK", "@truck_am"), + ("MDTRUCK", "@truck_md"), + ("PMTRUCK", "@truck_pm"), + ("EVTRUCK", "@truck_ev"), ("SPD", "@speed_posted"), ("TSPD", "zero"), - ("WAY", "iway"), + ("WAY", "way"), ("MED", "@median"), ("COST", "@cost_operating"), ] @@ -322,7 +327,7 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, network.set_attribute_values("LINK", dst_attrs, values) # add in and calculate additional columns new_attrs = [ - ("zero", 0), ("is_one_way", 0), ("iway", 2), ("length_feet", 0), + ("zero", 0), ("is_one_way", 0), ("way", 2), ("length_feet", 0), ("toll_hov", 0), ("per_lane_capacity", 1900), ("progression_factor", 1.0), ("alpha1", 0.8), ("beta1", 4.0), ("alpha2", 4.5), ("beta2", 2.0), ("relifac", 1.0), @@ -336,7 +341,7 @@ def export_traffic_attribute(self, base_scenario, export_path, traffic_emmebank, network.create_attribute("LINK", "hov3_total_gencost" + period, 0) for link in network.links(): link.is_one_way = 1 if link.reverse_link else 0 - link.iway = 2 if link.reverse_link else 1 + link.way = 2 if link.reverse_link else 1 link.length_feet = link.length * 5280 for period in periods: link["toll_hov" + period] = link["@cost_hov2" + period] - link["@cost_operating"] @@ -492,7 +497,7 @@ def export_transit_results(self, export_path, input_path, transit_emmebank_dict, trrt_atts = ["Route_ID","Route_Name","Mode","AM_Headway","PM_Headway","Midday_Headway","Evening_Headway","EarlyAM_Headway", "Evening_Hours", "EarlyAM_Hours", "Config","Fare"] - trstop_atts = ["Stop_ID","Route_ID","Link_ID","Link_GUID","Pass_Count","Milepost","Longitude","Latitude","StopName"] + trstop_atts = ["Stop_ID","Route_ID","Link_ID","Link_GUID","Pass_Count","Milepost","Longitude","Latitude","NearNode","StopName"] #transit route file trrt_infile = os.path.join(input_path, "trrt.csv") @@ -507,7 +512,7 @@ def export_transit_results(self, export_path, input_path, transit_emmebank_dict, #transit stop file trstop_infile = os.path.join(input_path, "trstop.csv") trstop = pd.read_csv(trstop_infile) - trstop = trstop.rename(columns={"HwyNode":"NearNode"}) + trstop = trstop.rename(columns={"Node":"NearNode"}) trstop = trstop.rename(columns=lambda x:x.strip()) trstop_out = trstop[trstop_atts] trstop_outfile = os.path.join(export_path, "trstop.csv") From ea022d16373249ddf7ef7bff74444b60926615a1 Mon Sep 17 00:00:00 2001 From: Michael Wehrmeyer Date: Tue, 13 Feb 2024 10:40:43 -0800 Subject: [PATCH 35/43] update fare file name path MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit From Alexander Berryhill-Williams: I found the cause of the error. The “source” parameter of import_network.py was modified to point to input\EMMEOutputs.gdb, but the code was still expecting it to point to the input directory when loading the special fares file here: ABM/src/main/emme/toolbox/import/import_network.py at ABM3_import_TNED · SANDAG/ABM (github.com) I modified this line as “special_fare_path = _join(_dir(self.source), fares_file_name)” and ran a test of the setup and initialization step, which completed transit initialization without errors. --- .../emme/toolbox/import/import_network.py | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index f70cd49e2..96c85e900 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -28,7 +28,7 @@ # create_time_periods: if True (default), also create per-time period scenarios (required to run assignments) # # Files referenced: -# +# # *.gdb: A Geodatabase file with the network data for both highway and transit. The following tables are used # - TNED_HwyNet # - TNED_HwyNodes @@ -36,7 +36,7 @@ # - TNED_RailNodes # - Turns # The following files are also used (in the same directory as the *.gdb) -# +# # trrt.csv: header data for the transit lines # trlink.csv: sequence of links (routing) of transit lines # trstop.csv: stop data for the transit lines @@ -272,7 +272,7 @@ def execute(self): ("ELEV", ("@elev", "BOTH", "EXTRA", "station/stop elevation in feet")), ("interchange", ("@interchange", "DERIVED", "EXTRA", "is interchange node")), ]), - "LINK": OrderedDict([ + "LINK": OrderedDict([ ("HWYCOV0_ID",("@tcov_id", "TWO_WAY", "EXTRA", "SANDAG-assigned link ID")), ("SPHERE", ("@sphere", "HWY_TWO_WAY", "EXTRA", "Jurisdiction sphere of influence")), ("HWYSegGUID",("#hwyseg_guid", "TWO_WAY", "STRING", "HWYSegGUID")), @@ -396,7 +396,7 @@ def execute(self): periods=["EA", "AM", "MD", "PM", "EV"] period_ids = list(enumerate(periods, start=int(self.scenario_id) + 1)) for ident, period in period_ids: - scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), + scenarios.append(create_scenario(ident, "%s - %s assign" % (title, period), overwrite=self.overwrite, emmebank=self.emmebank)) # create attributes in scenario for elem_type, mapping in attr_map.iteritems(): @@ -546,12 +546,12 @@ def create_modes(self, network): 3: set([network.mode(m_id) for m_id in "dtshiTSHI"]), # no heavy or medium truck 4: set([network.mode(m_id) for m_id in "dshiSHI"]), # no truck 5: set([network.mode(m_id) for m_id in "dvV"]), # only heavy trucks - 6: set([network.mode(m_id) for m_id in "dvmVM"]), # heavy and medium trucks + 6: set([network.mode(m_id) for m_id in "dvmVM"]), # heavy and medium trucks 7: set([network.mode(m_id) for m_id in "dvmtVMT"]), # all trucks only (no passenger cars) } non_toll_modes = set([network.mode(m_id) for m_id in "vmtshi"]) self._auto_mode_lookup = { - "GP": modes_gp_lanes, + "GP": modes_gp_lanes, "TOLL": dict((k, v - non_toll_modes) for k, v in modes_gp_lanes.iteritems()), "HOV2": set([network.mode(m_id) for m_id in "dhiHI"]), "HOV3": set([network.mode(m_id) for m_id in "diI"]), @@ -598,7 +598,7 @@ def create_road_base(self, network, attr_map): if self.save_data_tables: hwy_data.save("%s_TNED_HwyNet" % self.data_table_name, self.overwrite) - + is_centroid = lambda arc, node : (arc["FC"] == 10) and (node == "AN") link_attr_map = {} for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): @@ -616,7 +616,7 @@ def define_modes(arc): hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map) hwy_node_data = gen_utils.DataTableProc("TNED_HwyNodes", self.source) - node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() if v[1] in ("BOTH", "HWY")] for record in hwy_node_data: node = network.node(record["HNODE"]) @@ -654,7 +654,7 @@ def define_modes(arc): transit_node_data = gen_utils.DataTableProc("TNED_RailNodes", self.source) # Load PARK, elevation, stop type data onto transit nodes - node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() + node_attrs = [(k, v[0]) for k, v in attr_map["NODE"].iteritems() if v[1] in ("BOTH", "RAIL")] for record in transit_node_data: node = network.node(record["HNODE"]) @@ -720,7 +720,7 @@ def _create_base_net(self, data, network, link_attr_map, mode_callback, centroid link[attr] = arc[field] if arc["WAY"] == 2 or arc["WAY"] == 0: reverse_link = network.link(j_node, i_node) - if not reverse_link: + if not reverse_link: reverse_link = network.create_link(j_node, i_node, modes) reverse_link.length = link.length reverse_link.vertices = list(reversed(link.vertices)) @@ -810,7 +810,7 @@ def create_transit_lines(self, network, attr_map): transit_routes = _defaultdict(lambda: []) for record in transit_link_data: line_ref = line_names.get(int(record["Route_ID"]), record["Route_ID"]) - link_id = record["Link_GUID"] + link_id = record["Link_GUID"] if "-" in record["Direction"]: link_id = "-" + link_id link = links.get(link_id) @@ -1054,7 +1054,7 @@ def calc_transit_attributes(self, network): self._log.append({"type": "text", "content": "Revised headway calculation complete"}) fares_file_name = FILE_NAMES["FARES"] - special_fare_path = _join(self.source, fares_file_name) + special_fare_path = _join(_dir(self.source), fares_file_name) if not os.path.isfile(special_fare_path): self._log.append({"type": "text", "content": "Special fares file %s not found" % fares_file_name}) return @@ -1066,7 +1066,7 @@ def get_line(line_id): return line # Special incremental boarding and in-vehicle fares - # to recreate the coaster zone fares + # to recreate the coaster zone fares self._log.append({"type": "header", "content": "Apply special_fares to transit lines"}) with open(special_fare_path) as fare_file: self._log.append({"type": "text", "content": "Using fare details (for coaster) from %s" % fares_file_name}) @@ -1155,7 +1155,7 @@ def create_turns(self, network): self._error.append(text) continue turn = network.turn(from_node_id, at_node_id, to_node_id) - if at_node is None: + if at_node is None: text = ("record %s turn from %s, at %s, to %s: at node does not exist" % (i, from_node_id, at_node_id, to_node_id)) self._log.append({"type": "text", "content": text}) @@ -1365,7 +1365,7 @@ def match_facility_factors(link): self._log.append({"type": "text2", "content": msg}) self._log.append({ - "type": "text", + "type": "text", "content": "Calculation and time period expansion of costs, tolls, capacities and times complete"}) # calculate static reliability From c85c1eb14ddc04212bbdc07702a364d87af0d8e8 Mon Sep 17 00:00:00 2001 From: Michael Wehrmeyer Date: Tue, 13 Feb 2024 10:47:20 -0800 Subject: [PATCH 36/43] [TNED] remove ood script call {Cundo} Commented out the very first line because it was pointing to the incorrect/old path of the hwyShapeExport.py script located within the python/dataExporter directory. That version of the file is no longer needed. --- src/main/python/dataExporter/serialRun.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/python/dataExporter/serialRun.py b/src/main/python/dataExporter/serialRun.py index 0d73804c4..5819293f9 100644 --- a/src/main/python/dataExporter/serialRun.py +++ b/src/main/python/dataExporter/serialRun.py @@ -1,4 +1,4 @@ -from hwyShapeExport import export_highway_shape +#from hwyShapeExport import export_highway_shape from skimAppender import SkimAppender from abmScenario import ScenarioData, LandUse, SyntheticPopulation, TourLists, TripLists import os @@ -164,7 +164,7 @@ def export_data(fp): #print("Writing: Highway Load Shape File") #export_highway_shape(scenarioPath).to_file( # os.path.join(reportPath, "hwyLoad.shp")) - + if __name__ == '__main__': targets = sys.argv[1:] export_data(targets[0]) From 1fcdcfd8efbfb2a957f0962c08feb3079f9068ec Mon Sep 17 00:00:00 2001 From: Michael Wehrmeyer Date: Tue, 13 Feb 2024 10:51:51 -0800 Subject: [PATCH 37/43] [TNED] hwyshape exporter update attrs The file was still referencing attributes names that have been changed under TNED. Made the appropriate changes so that it is compatible with TNED files. --- .../python/dataExporter/hwyShapeExport.py | 85 ++++++++++++------- 1 file changed, 53 insertions(+), 32 deletions(-) diff --git a/src/main/python/dataExporter/hwyShapeExport.py b/src/main/python/dataExporter/hwyShapeExport.py index df1df8d9b..b409d9258 100644 --- a/src/main/python/dataExporter/hwyShapeExport.py +++ b/src/main/python/dataExporter/hwyShapeExport.py @@ -1,3 +1,4 @@ +import sys import geopandas import numpy as np import os @@ -23,12 +24,16 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "COJUR", # count jurisdiction code "COSTAT", # count station number "COLOC", # count location code - "IFC", # initial functional class - "IHOV", # link operation type - "ITRUCK", # truck restriction code - "ISPD", # posted speed limit - "IWAY", # one or two way operations - "IMED", # median type + "FC", # initial functional class + "HOV", # link operation type + "EATRUCK", # truck restriction code - Early AM + "AMTRUCK", # truck restriction code - AM Peak + "MDTRUCK", # truck restriction code - Midday + "PMTRUCK", # truck restriction code - PM Peak + "EVTRUCK", # truck restriction code - Evening + "SPD", # posted speed limit + "WAY", # one or two way operations + "MED", # median type "AN", # A node number "FXNM", # cross street name at from end of link "BN", # B node number @@ -54,6 +59,11 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "ABPRELOAD_EV", # preloaded bus flow - to-from - Evening "BAPRELOAD_EV", # preloaded bus flow - from-to - Evening "geometry"]) # WKT geometry + + # temporary so that the sensitivity summary on data lake works + # the sensitivity summary on data lake uses IFC (from TCOV) rather than FC (from TNED) + hwy_tcad['IFC'] = hwy_tcad['FC'] + hwy_tcad.to_csv(os.path.join(scenario_path, "report", "hwyTcad.csv"), index=False) # read in loaded highway network for each time period for tod in ["EA", "AM", "MD", "PM", "EV"]: @@ -200,17 +210,17 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: left_on="ID", right_on="ID1_" + tod) - # create string description of [IFC] field - conditions = [hwy_tcad["IFC"] == 1, - hwy_tcad["IFC"] == 2, - hwy_tcad["IFC"] == 3, - hwy_tcad["IFC"] == 4, - hwy_tcad["IFC"] == 5, - hwy_tcad["IFC"] == 6, - hwy_tcad["IFC"] == 7, - hwy_tcad["IFC"] == 8, - hwy_tcad["IFC"] == 9, - hwy_tcad["IFC"] == 10] + # create string description of [FC] field + conditions = [hwy_tcad["FC"] == 1, + hwy_tcad["FC"] == 2, + hwy_tcad["FC"] == 3, + hwy_tcad["FC"] == 4, + hwy_tcad["FC"] == 5, + hwy_tcad["FC"] == 6, + hwy_tcad["FC"] == 7, + hwy_tcad["FC"] == 8, + hwy_tcad["FC"] == 9, + hwy_tcad["FC"] == 10] choices = ["Freeway", "Prime Arterial", @@ -223,7 +233,7 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "Local Ramp", "Zone Connector"] - hwy_tcad["IFC_Desc"] = np.select(conditions, choices, default="") + hwy_tcad["FC_Desc"] = np.select(conditions, choices, default="") # calculate aggregate flows hwy_tcad["AB_Flow_SOV"] = hwy_tcad[["AB_Flow_SOV_EA", @@ -336,13 +346,17 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "COJUR", "COSTAT", "COLOC", - "IFC", - "IFC_Desc", - "IHOV", - "ITRUCK", - "ISPD", - "IWAY", - "IMED", + "FC", + "FC_Desc", + "HOV", + "EATRUCK", + "AMTRUCK", + "MDTRUCK", + "PMTRUCK", + "EVTRUCK", + "SPD", + "WAY", + "MED", "AN", "FXNM", "BN", @@ -427,13 +441,17 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "COJUR": "count_jur", "COSTAT": "count_stat", "COLOC": "count_loc", - "IFC": "ifc", - "IFC_Desc": "ifc_desc", - "IHOV": "ihov", - "ITRUCK": "itruck", - "ISPD": "post_speed", - "IWAY": "iway", - "IMED": "imed", + "FC": "fc", + "FC_Desc": "fc_desc", + "HOV": "hov", + "EATRUCK": "truck_ea", + "AMTRUCK": "truck_am", + "MDTRUCK": "truck_md", + "PMTRUCK": "truck_pm", + "EVTRUCK": "truck_ev", + "SPD": "post_speed", + "WAY": "way", + "MED": "med", "AN": "from_node", "FXNM": "from_nm", "BN": "to_node", @@ -521,3 +539,6 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: crs=2230) return hwy_tcad + +scenario_path = sys.argv[1] +export_highway_shape(scenario_path).to_file(os.path.join(scenario_path, "report", "hwyLoad.shp")) \ No newline at end of file From 7fbf89ead53788c82c29c54a2580d967010c4ea5 Mon Sep 17 00:00:00 2001 From: Michael Wehrmeyer Date: Tue, 13 Feb 2024 11:10:59 -0800 Subject: [PATCH 38/43] resident batch file fails if res model fails [Cundo] So that the entire model crashes if the resident model crashes. What's been happening is that the resident model has been crashing but has continued to next steps and iterations --- src/main/resources/runSandagAbm_ActivitySimResident.cmd | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/resources/runSandagAbm_ActivitySimResident.cmd b/src/main/resources/runSandagAbm_ActivitySimResident.cmd index f3d4e7065..e983b1af5 100644 --- a/src/main/resources/runSandagAbm_ActivitySimResident.cmd +++ b/src/main/resources/runSandagAbm_ActivitySimResident.cmd @@ -77,6 +77,7 @@ CD .. :: Run simulation.py %PYTHON3% src/asim/simulation.py -s settings_mp.yaml -c src/asim/configs/resident -c src/asim/configs/common -d input -d output/skims -o output/resident +if ERRORLEVEL 1 exit 2 :::::::::::::::::::::: CD /d %ANACONDA2_DIR%\Scripts From fa47d868ab882bf90389d38c3702db6bcadd3fe8 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Wed, 14 Feb 2024 17:26:07 -0800 Subject: [PATCH 39/43] Update hwyShapeExport.py Previous commit (1fcdcfd) updated the py script under the python/dataExporter directory, but that location is no longer valid. --- src/main/python/hwyShapeExport.py | 81 +++++++++++++++++++------------ 1 file changed, 49 insertions(+), 32 deletions(-) diff --git a/src/main/python/hwyShapeExport.py b/src/main/python/hwyShapeExport.py index 0eafed646..b409d9258 100644 --- a/src/main/python/hwyShapeExport.py +++ b/src/main/python/hwyShapeExport.py @@ -24,12 +24,16 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "COJUR", # count jurisdiction code "COSTAT", # count station number "COLOC", # count location code - "IFC", # initial functional class - "IHOV", # link operation type - "ITRUCK", # truck restriction code - "ISPD", # posted speed limit - "IWAY", # one or two way operations - "IMED", # median type + "FC", # initial functional class + "HOV", # link operation type + "EATRUCK", # truck restriction code - Early AM + "AMTRUCK", # truck restriction code - AM Peak + "MDTRUCK", # truck restriction code - Midday + "PMTRUCK", # truck restriction code - PM Peak + "EVTRUCK", # truck restriction code - Evening + "SPD", # posted speed limit + "WAY", # one or two way operations + "MED", # median type "AN", # A node number "FXNM", # cross street name at from end of link "BN", # B node number @@ -55,6 +59,11 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "ABPRELOAD_EV", # preloaded bus flow - to-from - Evening "BAPRELOAD_EV", # preloaded bus flow - from-to - Evening "geometry"]) # WKT geometry + + # temporary so that the sensitivity summary on data lake works + # the sensitivity summary on data lake uses IFC (from TCOV) rather than FC (from TNED) + hwy_tcad['IFC'] = hwy_tcad['FC'] + hwy_tcad.to_csv(os.path.join(scenario_path, "report", "hwyTcad.csv"), index=False) # read in loaded highway network for each time period for tod in ["EA", "AM", "MD", "PM", "EV"]: @@ -201,17 +210,17 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: left_on="ID", right_on="ID1_" + tod) - # create string description of [IFC] field - conditions = [hwy_tcad["IFC"] == 1, - hwy_tcad["IFC"] == 2, - hwy_tcad["IFC"] == 3, - hwy_tcad["IFC"] == 4, - hwy_tcad["IFC"] == 5, - hwy_tcad["IFC"] == 6, - hwy_tcad["IFC"] == 7, - hwy_tcad["IFC"] == 8, - hwy_tcad["IFC"] == 9, - hwy_tcad["IFC"] == 10] + # create string description of [FC] field + conditions = [hwy_tcad["FC"] == 1, + hwy_tcad["FC"] == 2, + hwy_tcad["FC"] == 3, + hwy_tcad["FC"] == 4, + hwy_tcad["FC"] == 5, + hwy_tcad["FC"] == 6, + hwy_tcad["FC"] == 7, + hwy_tcad["FC"] == 8, + hwy_tcad["FC"] == 9, + hwy_tcad["FC"] == 10] choices = ["Freeway", "Prime Arterial", @@ -224,7 +233,7 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "Local Ramp", "Zone Connector"] - hwy_tcad["IFC_Desc"] = np.select(conditions, choices, default="") + hwy_tcad["FC_Desc"] = np.select(conditions, choices, default="") # calculate aggregate flows hwy_tcad["AB_Flow_SOV"] = hwy_tcad[["AB_Flow_SOV_EA", @@ -337,13 +346,17 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "COJUR", "COSTAT", "COLOC", - "IFC", - "IFC_Desc", - "IHOV", - "ITRUCK", - "ISPD", - "IWAY", - "IMED", + "FC", + "FC_Desc", + "HOV", + "EATRUCK", + "AMTRUCK", + "MDTRUCK", + "PMTRUCK", + "EVTRUCK", + "SPD", + "WAY", + "MED", "AN", "FXNM", "BN", @@ -428,13 +441,17 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "COJUR": "count_jur", "COSTAT": "count_stat", "COLOC": "count_loc", - "IFC": "ifc", - "IFC_Desc": "ifc_desc", - "IHOV": "ihov", - "ITRUCK": "itruck", - "ISPD": "post_speed", - "IWAY": "iway", - "IMED": "imed", + "FC": "fc", + "FC_Desc": "fc_desc", + "HOV": "hov", + "EATRUCK": "truck_ea", + "AMTRUCK": "truck_am", + "MDTRUCK": "truck_md", + "PMTRUCK": "truck_pm", + "EVTRUCK": "truck_ev", + "SPD": "post_speed", + "WAY": "way", + "MED": "med", "AN": "from_node", "FXNM": "from_nm", "BN": "to_node", From 8c04ff88a05e549fb9f967e1102832bfd1f11c5a Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:07:33 -0800 Subject: [PATCH 40/43] Remove TAP reference from properties file Deleted "tap" from active.node.fieldnames and active.node.columns and commented out active.maxdist.walk.tap, active.maxdist.micromobility.tap, and active.maxdist.microtransit.tap --- src/main/resources/sandag_abm.properties | 10 +- src/main/resources/sandag_abm.properties.bak | 1162 ++++++++++++++++++ 2 files changed, 1167 insertions(+), 5 deletions(-) create mode 100644 src/main/resources/sandag_abm.properties.bak diff --git a/src/main/resources/sandag_abm.properties b/src/main/resources/sandag_abm.properties index 57b8eb938..44b97a887 100644 --- a/src/main/resources/sandag_abm.properties +++ b/src/main/resources/sandag_abm.properties @@ -1090,8 +1090,8 @@ cvm.share.heavy = 0 ################################################################# active.node.file = %project.folder%/input/SANDAG_Bike_NODE.dbf active.node.id = NodeLev_ID -active.node.fieldnames = mgra,taz,x,y,tap,signalized -active.node.columns = MGRA,TAZ,XCOORD,YCOORD,TAP,Signal +active.node.fieldnames = mgra,taz,x,y,signalized +active.node.columns = MGRA,TAZ,XCOORD,YCOORD,Signal active.edge.file = %project.folder%/input/SANDAG_Bike_NET.dbf active.edge.anode = A active.edge.bnode = B @@ -1117,11 +1117,11 @@ active.sample.random.scale.link = 0.7 active.sample.random.seeded = true active.sample.maxcost = 998 active.maxdist.walk.mgra = 3.0 -active.maxdist.walk.tap = 1.0 +#active.maxdist.walk.tap = 1.0 active.maxdist.micromobility.mgra = 3.0 -active.maxdist.micromobility.tap = 1.0 +#active.maxdist.micromobility.tap = 1.0 active.maxdist.microtransit.mgra = 3.0 -active.maxdist.microtransit.tap = 3.0 +#active.maxdist.microtransit.tap = 3.0 active.output.bike = %project.folder%/output/ active.output.walk = %project.folder%/output/ active.coef.nonscenic = 0.3 diff --git a/src/main/resources/sandag_abm.properties.bak b/src/main/resources/sandag_abm.properties.bak new file mode 100644 index 000000000..57b8eb938 --- /dev/null +++ b/src/main/resources/sandag_abm.properties.bak @@ -0,0 +1,1162 @@ +##SANDAG ABM Properties +##Software Version +#version = version_14_3_0 +## geography ID +#geographyID = 1 +############################################################################################################################################################################## +## +## CLUSTER PROPERTIES: MODIFY WHEN CHANGING CLUSTER CONFIGURATION OR MOVING TO NEW CLUSTER. +## +############################################################################################################################################################################## +RunModel.MatrixServerAddress = localhost +RunModel.MatrixServerPort = 1191 +RunModel.HouseholdServerAddress = localhost +RunModel.HouseholdServerPort = 1129 +# +############################################################################################################################################################################## +## +## RUN PROPERTIES: MODEL COMPONENT SWITCHES +## +############################################################################################################################################################################## +##set sample rates +sample_rates = 0.25,0.5,1.0 +# +##highway assignment convergence criteria +convergence = 0.0005 +RunModel.useLocalDrive = false +RunModel.skipInitialization = false +RunModel.deleteAllMatrices = false +RunModel.skip4Ds = false +RunModel.skipInputChecker = true +RunModel.skipCopyWarmupTripTables = false +RunModel.skipCopyBikeLogsum = false +RunModel.skipShadowPricing = false +RunModel.skipBikeLogsums = true +RunModel.skipCopyWalkImpedance = true +RunModel.skipWalkLogsums = false +RunModel.skipBuildNetwork = false +RunModel.startFromIteration = 1 +RunModel.skipHighwayAssignment = false,false,false +RunModel.skipTransitSkimming = false,false,false +RunMode.skipTransitConnector = false +RunModel.skipTransponderExport = false,false,false +RunModel.skipABMPreprocessing = false,false,false +RunModel.skipABMResident = false,false,false +RunModel.skipABMAirport = false,false,false +RunModel.skipABMVisitor = false,false,false +RunModel.skipABMXborderWait = false +RunModel.skipABMXborder = false,false,false +RunModel.skipCTM = false,false,false +RunModel.skipEI = false,false,false +RunModel.skipExternalExternal = true,true,true +RunModel.skipTruck = false,false,false +RunModel.skipTripTableCreation = false,false,false +RunModel.skipFinalHighwayAssignment = false +RunModel.skipFinalHighwayAssignmentStochastic = true +RunModel.skipFinalTransitAssignment = false +RunModel.collapseOnOffByRoute = false +RunModel.skipLUZSkimCreation = true +RunModel.skipVisualizer = true +RunModel.skipDataExport = false +RunModel.skipDataLoadRequest = true +RunModel.skipDeleteIntermediateFiles = true +RunModel.MatrixPrecision = 0.0005 +# minimual space (MB) on C drive +RunModel.minSpaceOnC = 400 + +TNC.totalThreads = 10 + +############################################################################################################################################################################## +## +## LOGGING PROPERTIES: USE FOR TRACING HOUSEHOLDS OR AGENTS THROUGH SIMULATION. +## +## Note that the way that logging works right now, the trace zones also have to be valid transit stops or the code will crash. Check the skims to make sure they exist. +## Turn off trace debugging in routine model runs to speed things up (comment out Debug.Trace.HouseholdIdList) +## +############################################################################################################################################################################## +## Resident models +#Trace = false +##Trace.otaz = 1638 +##Trace.dtaz = 2447 +#Trace.otaz = +#Trace.dtaz = +#Seek = false +#Process.Debug.HHs.Only = false +Debug.Trace.HouseholdIdList = +# +## Internal-External models +#internalExternal.seek = false +#internalExternal.trace = 50 +# +## Cross-Border models +#crossBorder.seek = false +## trace by tourId +#crossBorder.trace = 12 +# +## Visitor models +#visitor.seek = false +##trace by tourId +##visitor.trace = 742 +#visitor.trace = 742 +# +## Special event models +#specialEvent.seek = false +#specialEvent.trace = 5855 +# +## Trace TransCAD trip table creation by TAZ (to/from); only applies to SD resident model +#tripTable.trace = 4384 +# +#RunModel.LogResults = true +# +############################################################################################################################################################################## +## PATH PROPERTIES: MODIFY AS NEEDED WHEN COPY RELEASE TO A LOCAL RUN FOLDER +############################################################################################################################################################################## +Project.Directory = %project.folder%/ +generic.path = %project.folder%/input/ +scenario.path = %project.folder%/ +skims.path = %project.folder%/output/skims/ +uec.path = %project.folder%/uec/ +report.path = %project.folder%/report/ +# +## Visitor model is run using Java 7 Fork\Join Framework. Parallelism controls number of simultaneous threads. Can increase if more processors. +## 5 threads provided optimum runtimes on a 6 core, 24 thread machine with 128GB of RAM. +#visitor.run.concurrent = true +#visitor.concurrent.parallelism = 5 +# +############################################################################################################################################################################## +## +## SCENARIO PROPERTIES: MODIFY WHEN RUNNING NEW SCENARIO, IF NECESSARY +## +############################################################################################################################################################################## +## MGRA data file: this token is referred to in many UECs in additon to submodels like truck +mgra.socec.file = input/mgra15_based_input${year}.csv +## scenario year +scenarioYear = ${year} +# +## Auto operating costs: these tokens are referred to in many UECs +aoc.fuel = ${aoc.fuel} +aoc.maintenance = ${aoc.maintenance} +# +## Cross border model is run using Java 7 Fork\Join Framework. Parallelism controls number of simultaneous threads. Can increase if more processors. +#crossBorder.run.concurrent = true +#crossBorder.concurrent.parallelism = 8 +# +## Cross border model settings: Number of tours, share of tours that are SENTRI. +#crossBorder.tours = 120700 +#crossBorder.sentriShare = 0.44 +# +## Visitor model settings: occupancy rates for hotels, households and share of each that are business visitors +#visitor.hotel.occupancyRate = 0.7 +#visitor.household.occupancyRate = 0.018 +#visitor.hotel.businessPercent = 0.3 +#visitor.household.businessPercent = 0.04 +# +## Airport model settings: enplanements, connecting passengers, average party size, MGRA that the airport is in +#airport.SAN.enplanements = 13727381 +#airport.SAN.connecting = 808619 +#airport.SAN.annualizationFactor = 365 +#airport.SAN.averageSize = 1.7 +#airport.SAN.airportMgra = 11249 +# +#airport.CBX.enplanements = 984428 +#airport.CBX.connecting = 0 +#airport.CBX.annualizationFactor = 365 +#airport.CBX.averageSize = 2.2 +#airport.CBX.airportMgra = 9350 +# +# Truck model settings: + +truck.DFyear = 2016,2019,2020,2022,2023,2025,2026,2029,2030,2032,2035,2040,2050 +truck.luOverRide = "False" +truck.FFyear = ${year} +# +## Destination zones for the transponder accessibility calculator +transponder.destinations = 4027,2563,2258 +#traffic.sla_limit = 3 +# +## Number of zones where 4996 is the default, but may be changed by Land Use Converter Tool zone split +#zones.count = 4947 +# +############################################################################################# +## EMERGING MOBILITY SECTION: MODIFY WHEN CHANGE AV, TNC, and MICROMOBILITY ASSUMPTIONS +##------------------------------------------------------------------------------------------- +## AV Mobility Scenario Parameters +##------------------------------------------------------------------------------------------- +## AV.Share: the share of vehicles assumed to be AVs in the vehicle fleet; Auto ownership ASCs will be calibrated for different levels of AV penetration +## AV.ProbabilityBoost: the increased probability (multiplicative) for using AVs for tours, based on autos to drivers. The highest this should go is 1.2 +## AV.IVTFactor: the auto in-vehicle time factor to apply to AVs +## AV.ParkingCostFactor: The auto parking cost factor to apply to AVs, assuming some AVs are sent to remote locations or home +## AV.CostPerMileFactor: The auto cost per mile factor to apply to AVs, assuming AVs are more efficient in terms of fuel consumption than human-driven vehicles +## AV.TerminalTimeFactor: The factor to apply to terminal time for AVs, assuming AVs offer curbside passenger pickup/dropoff +## TNC.shared.IVTFactor: The factor to apply to in-vehicle time for shared TNC mode, reflecting out-direction travel for pickup/dropoff of other passengers +# +#Mobility.AV.Share = 0 +#Mobility.AV.ProbabilityBoost.AutosLTDrivers = 1.2 +#Mobility.AV.ProbabilityBoost.AutosGEDrivers = 1.1 +#Mobility.AV.IVTFactor = 0.75 +#Mobility.AV.ParkingCostFactor = 0.5 +Mobility.AV.CostPerMileFactor = 0.7 +#Mobility.AV.TerminalTimeFactor = 0.65 +#Mobility.AV.MinimumAgeDriveAlone = 13 +#Mobility.TNC.shared.IVTFactor = 1.25 +#crossBorder.avShare = 0.0 +# +##------------------------------------------------------------------------------------------- +## Taxi and TNC cost and wait time parameters +##------------------------------------------------------------------------------------------- +## 3 modes: taxi, TNC - single, and TNC - shared +## baseFare: Initial fare +## costPerMile: The cost per mile +## costPerMinute: The cost per minute +## costMinimum: The minimum cost (for TNC modes only) +## +## Wait times are drawn from a distribution by area type (emp+hh)/sq. miles +## The mean and standard deviation is given for each area type range +## The ranges are configurable, set by WaitTimeDistribution.EndPopEmpPerSqMi +# +#taxi.baseFare = 1.78 +#taxi.costPerMile = 1.87 +#taxi.costPerMinute = 0.08 +# +#TNC.single.baseFare = 1.78 +#TNC.single.costPerMile = 1.08 +#TNC.single.costPerMinute = 0.19 +#TNC.single.costMinimum = 5.84 +# +## use lower costs - these are synthesized, need real prices +#TNC.shared.baseFare = 1.78 +#TNC.shared.costPerMile = 0.36 +#TNC.shared.costPerMinute = 0.06 +#TNC.shared.costMinimum = 2.43 +# +##Note: the following comma-separated value properties cannot have spaces between them, or else the RuntimeConfiguration.py code won't work +#TNC.single.waitTime.mean = 10.3,8.5,8.4,6.3,4.7 +#TNC.single.waitTime.sd = 4.1,4.1,4.1,4.1,4.1 +# +#TNC.shared.waitTime.mean = 15.0,15.0,11.0,8.0,7.0 +#TNC.shared.waitTime.sd = 4.1,4.1,4.1,4.1,4.1 +# +#Taxi.waitTime.mean = 26.5,17.3,13.3,9.5,5.5 +#Taxi.waitTime.sd = 6.4,6.4,6.4,6.4,6.4 +# +#WaitTimeDistribution.EndPopEmpPerSqMi = 500,2000,5000,15000,9999999999 +# +##------------------------------------------------------------------------------------------- +## Taxi and TNC vehcicle trip conversion factors +##------------------------------------------------------------------------------------------- +## The following properties are used to split out the taxi, TNC-single, and TNC-shared trips into vehicle trips to be added to the rest of the vehicle trips by occupancy prior to assignment. +# +Taxi.da.share = 0.0 +Taxi.s2.share = 0.9 +Taxi.s3.share = 0.1 +Taxi.passengersPerVehicle = 1.1 +# +TNC.single.da.share = 0.0 +TNC.single.s2.share = 0.8 +TNC.single.s3.share = 0.2 +TNC.single.passengersPerVehicle = 1.2 +# +TNC.shared.da.share = 0.0 +TNC.shared.s2.share = 0.3 +TNC.shared.s3.share = 0.7 +TNC.shared.passengersPerVehicle = 2.0 +# +##------------------------------------------------------------------------------------------- +## Maas Routing Model Properties +##------------------------------------------------------------------------------------------- +Maas.RoutingModel.maxDistanceForPickup = 5 +Maas.RoutingModel.maxDiversionTimeForPickup = 5 +Maas.RoutingModel.minutesPerSimulationPeriod = 5 +Maas.RoutingModel.maxPassengers = 6 +Maas.RoutingModel.maxWalkDistance = 0.15 +Maas.RoutingModel.vehicletrip.output.file = output/TNCTrips.csv +Maas.RoutingModel.vehicletrip.output.matrix = output/TNCVehicleTrips + +Maas.RoutingModel.routeIntrazonal = false +#NULL,DRIVEALONE,SHARED2,SHARED3,WALK,BIKE,WALK_SET,PNR_SET,KNR_SET,TNC_SET,TAXI,TNC_SINGLE,TNC_SHARED,SCHBUS +Maas.RoutingModel.Modes = 0,0,0,0,0,0,0,0,0,0,1,1,1,0 +Maas.RoutingModel.SharedEligible = 0,0,0,0,0,0,0,0,0,0,0,0,1,0 +Maas.RoutingModel.maxDistanceBeforeRefuel = 300 +Maas.RoutingModel.timeRequiredForRefuel = 15 + +Maas.AVAllocationModel.vehicletrip.output.file = output/householdAVTrips.csv +Maas.AVAllocationModel.vehicletrip.output.matrix = output/emptyAVTrips + +Maas.AVAllocation.uec.file = AutonomousVehicleAllocationChoice.xls +Maas.AVAllocation.data.page = 0 +Maas.AVAllocation.vehiclechoice.model.page = 1 +Maas.AVAllocation.parkingchoice.model.page = 2 +Maas.AVAllocation.triputility.model.page = 3 +Mobility.AV.RemoteParkingCostPerHour = ${Mobility.AV.RemoteParkingCostPerHour} +# +## END--EMERGING MOBILITY SECTION +############################################################################################# +# Transit PCE VEH Conversion cliu 8/19/20 +##################################################################################### +transit.bus.pceveh = 3.0 +############################################################################################################################################################################## +## +## CORE MODEL RUN PROPERTIES: CONTROL STEPS RUN IN CORE MODEL +## +############################################################################################################################################################################## +Model.Random.Seed = 1 +# +#RunModel.Clear.MatrixMgr.At.Start = false +# +## Set to true if read the accessibilities from an input file instead of calculating them prior to running CTRAMP +#acc.read.input.file = false +# +## Setting shadow price files to null will reset prices to 0. If running new land-use scenario, set files to null and set maximum iterations to 20. +## Then copy shadow price output files to input directory, set maximum iterations to 1 for any subsequent runs with the same land-use file. +#UsualWorkLocationChoice.ShadowPrice.Input.File = input/ShadowPricingOutput_work_9.csv +#UsualSchoolLocationChoice.ShadowPrice.Input.File = input/ShadowPricingOutput_school_9.csv +#uwsl.ShadowPricing.Work.MaximumIterations = 1 +#uwsl.ShadowPricing.School.MaximumIterations = 1 +#uwsl.ShadowPricing.OutputFile = output/ShadowPricingOutput.csv +# +#uwsl.run.workLocChoice = true +#uwsl.run.schoolLocChoice = true +#uwsl.write.results = true +# +#uwsl.use.new.soa = false +#nmdc.use.new.soa = false +#slc.use.new.soa = false +# +## properties for distributed time coefficient +#distributedTimeCoefficients = true +# +#timeDistribution.mean.work = 1.0 +#timeDistribution.standardDeviation.work = 0.7 +#timeDistribution.mean.nonWork = 1.0 +#timeDistribution.standardDeviation.nonWork = 0.6 +# +#timeDistribution.randomSeed = 2301832 +# +## value of time thresholds for skimming, assignment, mode choice UECs and trip tables ($/hr). +#valueOfTime.threshold.low = 8.81 +#valueOfTime.threshold.med = 18.0 +# +# +## save tour mode choice utilities and probabilities (for debugging purpose) +#TourModeChoice.Save.UtilsAndProbs = true +# +## packet size for distributing households, DO NOT change +#distributed.task.packet.size = 200 +# +##RunModel.RestartWithHhServer = uwsl +#RunModel.RestartWithHhServer = none +##RunModel.RestartWithHhServer = ao +##RunModel.RestartWithHhServer = stf +# +## Model Component run flags; Wu's note: not functional yet +#RunModel.PreAutoOwnership = true +#RunModel.UsualWorkAndSchoolLocationChoice = true +#RunModel.AutoOwnership = true +#RunModel.TransponderChoice = true +#RunModel.FreeParking = true +#RunModel.CoordinatedDailyActivityPattern = true +#RunModel.IndividualMandatoryTourFrequency = true +#RunModel.MandatoryTourModeChoice = true +#RunModel.MandatoryTourDepartureTimeAndDuration = true +#RunModel.SchoolEscortModel = true +#RunModel.JointTourFrequency = true +#RunModel.JointTourLocationChoice = true +#RunModel.JointTourDepartureTimeAndDuration = true +#RunModel.JointTourModeChoice = true +#RunModel.IndividualNonMandatoryTourFrequency = true +#RunModel.IndividualNonMandatoryTourLocationChoice = true +#RunModel.IndividualNonMandatoryTourDepartureTimeAndDuration = true +#RunModel.IndividualNonMandatoryTourModeChoice = true +#RunModel.AtWorkSubTourFrequency = true +#RunModel.AtWorkSubTourLocationChoice = true +#RunModel.AtWorkSubTourDepartureTimeAndDuration = true +#RunModel.AtWorkSubTourModeChoice = true +#RunModel.StopFrequency = true +#RunModel.StopLocation = true +# +############################################################################################################################################################################## +## +## INPUT PROPERTIES +## +############################################################################################################################################################################## +##PopSyn Inputs +PopulationSynthesizer.InputToCTRAMP.HouseholdFile = input/households.csv +PopulationSynthesizer.InputToCTRAMP.PersonFile = input/persons.csv +PopulationSynthesizer.OccupCodes = input/pecas_occ_occsoc_acs.csv +PopulationSynthesizer.IndustryCodes = input/activity_code_indcen_acs.csv +## +## The military industry ranges are used to recode military occupation. This is +## necessary because military workers identify themselves as non-military occupations. +## The models need to be consistent with PECAS, where all military workers are in +## the military occupation category 56. +#PopulationSynthesizer.MilitaryIndustryRange = 9670,9870 +# +## auxiliary inputs, these are scenario-specific +#taz.driveaccess.taps.file = input/accessam.csv +#tap.ptype.file = input/tap.ptype +#taz.parkingtype.file = input/zone.park +#taz.terminal.time.file = input/zone.term +#maz.tap.tapLines = output/tapLines.csv +# +## transit stop attribute file +#transit.stop.file = input/trstop.csv +# +############################################################################################################################################################################## +## +## OUTPUT PROPERTIES +## +############################################################################################################################################################################## +#Results.WriteDataToFiles = true +Results.HouseholdDataFile = output/resident/final_households.csv +Results.PersonDataFile = output/resident/final_persons.csv +#Results.IndivTourDataFile = output/indivTourData.csv +#Results.JointTourDataFile = output/jointTourData.csv +Results.TripDataFile = output/resident/final_trips.csv +#Results.JointTripDataFile = output/jointTripData.csv +#Results.WriteDataToDatabase = false +#Results.HouseholdTable = household_data +#Results.PersonTable = person_data +#Results.IndivTourTable = indiv_tour_data +#Results.JointTourTable = joint_tour_data +#Results.IndivTripTable = indiv_trip_data +#Results.JointTripTable = joint_trip_data +#Results.AutoTripMatrix = output/autoTrips +#Results.TranTripMatrix = output/tranTrips +#Results.NMotTripMatrix = output/nmotTrips +#Results.OthrTripMatrix = output/othrTrips +#Results.PNRFile = output/PNRByTAP_Vehicles.csv +#Results.CBDFile = output/CBDByMGRA_Vehicles.csv +Results.MatrixType = OMX +#Results.segmentByTransponderOwnership = true +# +# +#Results.AutoOwnership = output/aoResults.csv +#read.pre.ao.results = false +#read.pre.ao.filename = output/aoResults_pre.csv +# +#Results.UsualWorkAndSchoolLocationChoice = output/wsLocResults.csv +#read.uwsl.results = false +#read.uwsl.filename = output/wsLocResults_1.csv +# +############################################################################################################################################################################# +# +# CORE MODEL UECS +# +############################################################################################################################################################################# +# UECs for calculating accessibilities +#acc.uec.file = %project.folder%/uec/Accessibilities.xls +#acc.data.page = 0 +#acc.sov.offpeak.page = 1 +#acc.sov.peak.page = 2 +#acc.hov.offpeak.page = 3 +#acc.hov.peak.page = 4 +#acc.maas.offpeak.page = 5 +#acc.maas.peak.page = 6 +#acc.nonmotorized.page = 7 +#acc.constants.page = 8 +#acc.sizeTerm.page = 9 +#acc.schoolSizeTerm.page = 10 +#acc.workerSizeTerm.page = 11 +#acc.dcUtility.uec.file = %project.folder%/uec/Accessibilities_DC.xls +#acc.dcUtility.data.page = 0 +#acc.dcUtility.page = 1 +# +## accessibility file location +#acc.output.file = input/accessibilities.csv +# +##UECs for calculating destination choice based land use accessibilities +#lu.acc.dcUtility.uec.file = %project.folder%/uec/Accessibilities_LU_DC.xls +#lu.acc.dcUtility.data.page = 0 +#lu.acc.dcUtility.page = 1 +#lu.accessibility.alts.file = Acc_LU_alts.csv +# +## land use accessibililty file locations +#lu.acc.output.file = output/luAccessibilities.csv +#lu.acc.mc.logsums.output.file = output/luLogsums.csv +# +## set either or both averaging methods to be used to write LU accessibilities files +## also requires command line parameter "-luAcc true" and acc.read.input.file = false +#lu.acc.simple.averaging.method = true +#lu.acc.logit.averaging.method = true +# +#accessibility.alts.file = Acc_alts.csv +# +##UEC for Mandatory accessibilities +#acc.mandatory.uec.file = %project.folder%/uec/MandatoryAccess.xls +#acc.mandatory.data.page = 0 +#acc.mandatory.auto.page = 1 +#acc.mandatory.autoLogsum.page = 2 +#acc.mandatory.bestWalkTransit.page = 3 +#acc.mandatory.bestDriveTransit.page = 4 +#acc.mandatory.transitLogsum.page = 5 +# +## UECs for auto ownership model +#ao.uec.file = AutoOwnership.xls +#ao.data.page = 0 +#ao.model.page = 1 +# +## UECs for Mandatory tour destination choice model +#uwsl.dc.uec.file = TourDestinationChoice2020.xls +#uwsl.dc2.uec.file = TourDestinationChoice2.xls +#uwsl.soa.uec.file = DestinationChoiceAlternativeSample.xls +#uwsl.soa.alts.file = DestinationChoiceAlternatives.csv +#uwsl.work.soa.SampleSize = 30 +#uwsl.school.soa.SampleSize = 30 +# +## The UEC file for work purposes includes TAZ Size in the expressions +#work.soa.uec.file = TourDcSoaDistance.xls +#work.soa.uec.data = 0 +#work.soa.uec.model = 1 +# +## The UEC file for school purposes does not include TAZ Size in the expressions +## so that the utilities can be stored as exponentiated distance utility matrices +## for univ, hs, gs, and ps, and then multiplied by the various school segment +## size terms for each of these 4 groups of school segments. +#univ.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls +#univ.soa.uec.data = 0 +#univ.soa.uec.model = 1 +# +#hs.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls +#hs.soa.uec.data = 0 +#hs.soa.uec.model = 2 +# +#gs.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls +#gs.soa.uec.data = 0 +#gs.soa.uec.model = 3 +# +#ps.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls +#ps.soa.uec.data = 0 +#ps.soa.uec.model = 4 +# +##UECs for transponder ownership model +#tc.choice.avgtts.file = output/transponderModelAccessibilities.csv +#tc.uec.file = TransponderOwnership.xls +#tc.data.page = 0 +#tc.model.page = 1 +#tc.everyone.owns = 0 +# +# +##UECs for parking provision model +#fp.uec.file = ParkingProvision.xls +#fp.data.page = 0 +#fp.model.page = 1 +# +##UEC for telecommute model +#te.uec.file = Telecommute.xls +#te.data.page = 0 +#te.model.page = 1 +# +# +##UECs for CDAP model +#cdap.uec.file = CoordinatedDailyActivityPattern.xls +#cdap.data.page = 0 +#cdap.one.person.page = 1 +#cdap.two.person.page = 2 +#cdap.three.person.page = 3 +#cdap.all.person.page = 4 +#cdap.joint.page = 5 +# +##UECs for individual mandatory tour frequency model +#imtf.uec.file = MandatoryTourFrequency.xls +#imtf.data.page = 0 +#imtf.model.page = 1 +# +##UECs for Non-Mandatory tour destination sampling +#nonSchool.soa.uec.file = TourDcSoaDistance.xls +#escort.soa.uec.data = 0 +#escort.soa.uec.model = 2 +#other.nonman.soa.uec.data = 0 +#other.nonman.soa.uec.model = 3 +#atwork.soa.uec.data = 0 +#atwork.soa.uec.model = 4 +# +soa.taz.dist.alts.file = SoaTazDistAlts.csv +# +#nmdc.dist.alts.file = NonMandatoryTlcAlternatives.csv +#nmdc.soa.alts.file = DestinationChoiceAlternatives.csv +#nmdc.soa.SampleSize = 30 +# +##UECs for Non-Mandatory tour destination choice model +#nmdc.uec.file2 = TourDestinationChoice2.xls +#nmdc.uec.file = TourDestinationChoice2020.xls +#nmdc.data.page = 0 +#nmdc.escort.model.page = 7 +#nmdc.shop.model.page = 8 +#nmdc.maint.model.page = 9 +#nmdc.eat.model.page = 10 +#nmdc.visit.model.page = 11 +#nmdc.discr.model.page = 12 +#nmdc.atwork.model.page = 13 +# +## following properties use tod sampling instead of logsums +#nmdc.SampleTODPeriod = true +#nmdc.SampleTODPeriod.file = input/Non_Mand_Tours_ArrDep_Distbn.csv +# +##UECs for Non-Mandatory tour destination sampling +#nmdc.soa.uec.file = DestinationChoiceAlternativeSample.xls +#nmdc.soa.data.page = 0 +#nmdc.soa.escort.model.page = 6 +#nmdc.soa.shop.model.page = 7 +#nmdc.soa.maint.model.page = 7 +#nmdc.soa.eat.model.page = 7 +#nmdc.soa.visit.model.page = 7 +#nmdc.soa.discr.model.page = 7 +#nmdc.soa.atwork.model.page = 8 +# +##UECs for School Escorting Model +#school.escort.uec.filename = SchoolEscorting.xls +#school.escort.alts.file = SchoolEscortingAlts.csv +#school.escort.data.sheet = 0 +#school.escort.outbound.model.sheet = 1 +#school.escort.inbound.conditonal.model.sheet = 2 +#school.escort.outbound.conditonal.model.sheet = 3 +#school.escort.RNG.offset = 384571483 +# +##UECs for tour mode choice model +#tourModeChoice.uec.file = TourModeChoice.xls +#tourModeChoice.maint.model.page = 4 +#tourModeChoice.discr.model.page = 5 +#tourModeChoice.atwork.model.page = 6 +# +## utility coefficients by tour purpose (work, univ, school, maintenance, discretionary, work-based). These are at tour level. +#tour.utility.ivt.coeffs = -0.016,-0.016,-0.01,-0.017,-0.015,-0.032 +#tour.utility.income.coeffs = -0.625,-0.262,-0.262,-0.262,-0.262,-0.262 +#tour.utility.income.exponents = 0.6,0.5,0.5,0.5,0.5,0.5 +# +##UECs for tour TOD choice model +#departTime.uec.file = TourDepartureAndDuration.xls +#departTime.data.page = 0 +#departTime.work.page = 1 +#departTime.univ.page = 2 +#departTime.school.page = 3 +#departTime.escort.page = 4 +#departTime.shop.page = 5 +#departTime.maint.page = 6 +#departTime.eat.page = 7 +#departTime.visit.page = 8 +#departTime.discr.page = 9 +#departTime.atwork.page = 10 +#departTime.alts.file = DepartureTimeAndDurationAlternatives.csv +# +##UECs for joint tour frequency choice model +#jtfcp.uec.file = JointTourFrequency.xls +#jtfcp.alternatives.file = JointAlternatives.csv +#jtfcp.data.page = 0 +#jtfcp.freq.comp.page = 1 +#jtfcp.participate.page = 2 +# +##UECs for individual non-mandatory tour frequency model +#inmtf.uec.file = NonMandatoryIndividualTourFrequency.xls +#inmtf.FrequencyExtension.ProbabilityFile = IndividualNonMandatoryTourFrequencyExtensionProbabilities_p1.csv +#IndividualNonMandatoryTourFrequency.AlternativesList.InputFile = IndividualNonMandatoryTourFrequencyAlternatives.csv +#inmtf.data.page = 0 +#inmtf.perstype1.page = 1 +#inmtf.perstype2.page = 2 +#inmtf.perstype3.page = 3 +#inmtf.perstype4.page = 4 +#inmtf.perstype5.page = 5 +#inmtf.perstype6.page = 6 +#inmtf.perstype7.page = 7 +#inmtf.perstype8.page = 8 +# +##UECs for at work subtour frequency model +#awtf.uec.file = AtWorkSubtourFrequency.xls +#awtf.data.page = 0 +#awtf.model.page = 1 +# +##UECs for stop frequency model +#stf.uec.file = StopFrequency.xls +#stf.purposeLookup.proportions = StopPurposeLookupProportions.csv +#stf.data.page = 0 +#stf.work.page = 1 +#stf.univ.page = 2 +#stf.school.page = 3 +#stf.escort.page = 4 +#stf.shop.page = 5 +#stf.maint.page = 6 +#stf.eat.page = 7 +#stf.visit.page = 8 +#stf.discr.page = 9 +#stf.subtour.page = 10 +# +##UECs for stop location choice model +#slc.uec.file = StopLocationChoice.xls +#slc.uec.data.page = 0 +#slc.mandatory.uec.model.page = 1 +#slc.maintenance.uec.model.page = 2 +#slc.discretionary.uec.model.page = 3 +#slc.alts.file = SlcAlternatives.csv +# +#slc.soa.uec.file = SlcSoaSize.xls +#slc.soa.alts.file = DestinationChoiceAlternatives.csv +# +#auto.slc.soa.distance.uec.file = SlcSoaDistanceUtility.xls +#auto.slc.soa.distance.data.page = 0 +#auto.slc.soa.distance.model.page = 1 +# +#slc.soa.size.uec.file = SlcSoaSize.xls +#slc.soa.size.uec.data.page = 0 +#slc.soa.size.uec.model.page = 1 +# +#stop.depart.arrive.proportions = StopDepartArriveProportions.csv +# +##UECs for trip mode choice model +#tripModeChoice.uec.file = TripModeChoice.xls +# +## utility coefficients by tour purpose (work, univ, school, maintenance, discretionary, work-based). These are at trip level. +#trip.utility.ivt.coeffs = -0.032,-0.032,-0.02,-0.034,-0.03,-0.064 +#trip.utility.income.coeffs = -1.25,-0.524,-0.524,-0.524,-0.524,-0.524 +#trip.utility.income.exponents = 0.6,0.5,0.5,0.5,0.5,0.5 +# +# +##UECs for parking location choice model +#plc.uec.file = ParkLocationChoice.xls +#plc.uec.data.page = 0 +#plc.uec.model.page = 1 +# +#plc.alts.corresp.file = ParkLocationAlts.csv +#plc.alts.file = ParkLocationSampleAlts.csv +# +#mgra.avg.cost.output.file = output/mgraParkingCost.csv +# +#mgra.avg.cost.dist.coeff.work = -8.6 +#mgra.avg.cost.dist.coeff.other = -4.9 +# +#park.cost.reimb.mean = -0.05 +#park.cost.reimb.std.dev = 0.54 +# +##UECs for best transit path finding +#utility.bestTransitPath.uec.file = BestTransitPathUtility.xls +#utility.bestTransitPath.data.page = 0 +#utility.bestTransitPath.tapToTap.page = 1 +#utility.bestTransitPath.walkAccess.page = 2 +#utility.bestTransitPath.driveAccess.page = 3 +#utility.bestTransitPath.walkEgress.page = 4 +#utility.bestTransitPath.driveEgress.page = 5 +#utility.bestTransitPath.driveAccDisutility.page = 6 +#utility.bestTransitPath.driveEgrDisutility.page = 7 +#utility.bestTransitPath.skim.sets = 3 +#utility.bestTransitPath.alts = 4 +#utility.bestTransitPath.maxPathsPerSkimSetForLogsum = 1,1,1 +#utility.bestTransitPath.nesting.coeff = 0.24 +# +##UECs for auto skimming +#skims.auto.uec.file = AutoSkims.xls +#skims.auto.data.page = 0 +#skims.auto.ea.page = 1 +#skims.auto.am.page = 2 +#skims.auto.md.page = 3 +#skims.auto.pm.page = 4 +#skims.auto.ev.page = 5 +# +##UECs for TAZ distances +taz.distance.uec.file = tazDistance.xls +taz.distance.data.page = 0 +taz.od.distance.ea.page = 1 +taz.od.distance.am.page = 2 +taz.od.distance.md.page = 3 +taz.od.distance.pm.page = 4 +taz.od.distance.ev.page = 5 +# +##UECs for TAZ times +taz.od.time.ea.page = 6 +taz.od.time.am.page = 7 +taz.od.time.md.page = 8 +taz.od.time.pm.page = 9 +taz.od.time.ev.page = 10 +# +# +##UECs for walk-transit-walk skimming +#skim.walk.transit.walk.uec.file = WalkTransitWalkSkims.xls +#skim.walk.transit.walk.data.page = 0 +#skim.walk.transit.walk.skim.page = 1 +#skim.walk.transit.walk.skims = 13 +# +##UECs for walk-transit-drive skimming +#skim.walk.transit.drive.uec.file = WalkTransitDriveSkims.xls +#skim.walk.transit.drive.data.page = 0 +#skim.walk.transit.drive.skim.page = 1 +#skim.walk.transit.drive.skims = 13 +# +##UECs for drive-transit-walk skimming +#skim.drive.transit.walk.uec.file = DriveTransitWalkSkims.xls +#skim.drive.transit.walk.data.page = 0 +#skim.drive.transit.walk.skim.page = 1 +#skim.drive.transit.walk.skims = 13 +# +# +###################################################################################### +## IE Model Settings (run as part of CT-RAMP) +###################################################################################### +# +#RunModel.InternalExternal = true +# +#ie.uec.file = InternalExternalTripChoice.xls +#ie.data.page = 0 +#ie.model.page = 1 +#ie.logsum.distance.coeff = -0.05 +#external.tazs = 1,2,3,4,5,6,7,8,9,10,11,12 +# +# +#internalExternal.dc.uec.file = InternalExternalDestinationChoice.xls +#internalExternal.dc.uec.data.page = 0 +#internalExternal.dc.uec.model.page = 1 +#internalExternal.dc.uec.alts.file = InternalExternalDestinationChoiceAlternatives.csv +# +#internalExternal.tour.tod.file = input/internalExternal_tourTOD.csv +# +#internalExternal.trip.mc.uec.file = InternalExternalTripModeChoice.xls +#internalExternal.trip.mc.data.page = 0 +#internalExternal.trip.mc.model.page = 1 +# +#internalExternal.trip.output.file = output/internalExternalTrips.csv +# +#internalExternal.results.autoTripMatrix = output/autoInternalExternalTrips +#internalExternal.results.nMotTripMatrix = output/nmotInternalExternalTrips +#internalExternal.results.tranTripMatrix = output/tranInternalExternalTrips +#internalExternal.results.othrTripMatrix = output/othrInternalExternalTrips +# +###################################################################################### +## Cross-Border Model Settings +###################################################################################### +#crossBorder.purpose.nonsentri.file = input/crossBorder_tourPurpose_nonSENTRI.csv +#crossBorder.purpose.sentri.file = input/crossBorder_tourPurpose_SENTRI.csv +# +#crossBorder.tour.tod.file = input/crossBorder_tourEntryAndReturn.csv +# +#crossBorder.dc.soa.uec.file = CrossBorderDestinationChoiceSample.xls +#crossBorder.dc.soa.data.page = 0 +#crossBorder.dc.soa.model.page = 1 +#crossBorder.dc.soa.size.page = 2 +#crossborder.dc.soa.alts.file = CrossBorderDestinationChoiceSoaAlternatives2020.csv +# +#crossBorder.dc.uec.file = CrossBorderDestinationChoice2020.xls +#crossBorder.dc.data.page = 0 +#crossBorder.dc.model.page = 1 +#crossborder.dc.alts.file = CrossBorderDestinationChoiceAlternatives.csv +# +#crossBorder.dc.colonia.file = input/crossBorder_supercolonia.csv +#crossBorder.dc.colonia.distance.parameter = -0.19 +#crossBorder.dc.soa.sampleRate = 30 +# +##crossBorder.tour.mc.uec.file = CrossBorderTourModeChoice.xls +#crossBorder.tour.mc.uec.file = CrossBorderTourModeChoice2020.xls +#crossBorder.tour.mc.data.page = 0 +#crossBorder.tour.mc.mandatory.model.page = 1 +#crossBorder.tour.mc.nonmandatory.model.page = 2 +#crossBorder.poe.waittime.file = input/crossBorder_pointOfEntryWaitTime.csv +# +#crossBorder.trip.mc.uec.file = CrossBorderTripModeChoice.xls +#crossBorder.trip.mc.data.page = 0 +#crossBorder.trip.mc.model.page = 1 +# +#crossBorder.stop.frequency.file = input/crossBorder_stopFrequency.csv +#crossBorder.stop.purpose.file = input/crossBorder_stopPurpose.csv +# +#crossBorder.slc.soa.uec.file = CrossBorderStopLocationChoiceSample.xls +#crossBorder.slc.soa.data.page = 0 +#crossBorder.slc.soa.model.page = 1 +#crossBorder.slc.soa.alts.file = SoaTazDistAlts.csv +# +#crossBorder.slc.uec.file = CrossBorderStopLocationChoice.xls +#crossBorder.slc.data.page = 0 +#crossBorder.slc.model.page = 1 +# +#crossBorder.stop.outbound.duration.file = input/crossBorder_outboundStopDuration.csv +#crossBorder.stop.inbound.duration.file = input/crossBorder_inboundStopDuration.csv +# +#crossBorder.tour.output.file = output/crossBorderTours.csv +crossBorder.trip.output.file = output/crossborder/final_trips.csv +# +#crossBorder.results.autoTripMatrix = output/autoCrossBorderTrips +#crossBorder.results.nMotTripMatrix = output/nmotCrossBorderTrips +#crossBorder.results.tranTripMatrix = output/tranCrossBorderTrips +#crossBorder.results.othrTripMatrix = output/othrCrossBorderTrips +# +###################################################################################### +## Visitor Model Settings +###################################################################################### +#visitor.business.tour.file = input/visitor_businessFrequency.csv +#visitor.personal.tour.file = input/visitor_personalFrequency.csv +# +#visitor.partySize.file = input/visitor_partySize.csv +#visitor.autoAvailable.file = input/visitor_autoAvailable.csv +#visitor.income.file = input/visitor_income.csv +# +#visitor.dc.soa.uec.file = VisitorDestinationChoiceSample.xls +#visitor.dc.soa.data.page = 0 +#visitor.dc.soa.work.page = 1 +#visitor.dc.soa.recreate.page = 2 +#visitor.dc.soa.dining.page = 3 +#visitor.dc.soa.size.page = 4 +#visitor.dc.soa.alts.file = SoaTazDistAlts.csv +# +#visitor.dc.uec.file = VisitorDestinationChoice.xls +#visitor.dc.data.page = 0 +#visitor.dc.work.page = 1 +#visitor.dc.recreate.page = 2 +#visitor.dc.dining.page = 3 +# +#visitor.tour.tod.file = input/visitor_tourTOD.csv +# +#visitor.mc.uec.file = VisitorTourModeChoice.xls +#visitor.mc.data.page = 0 +#visitor.mc.model.page = 1 +# +#visitor.stop.frequency.file = input/visitor_stopFrequency.csv +#visitor.stop.purpose.file = input/visitor_stopPurpose.csv +#visitor.stop.outbound.duration.file = input/visitor_outboundStopDuration.csv +#visitor.stop.inbound.duration.file = input/visitor_inboundStopDuration.csv +# +#visitor.slc.soa.uec.file = VisitorStopLocationChoiceSample.xls +#visitor.slc.soa.data.page = 0 +#visitor.slc.soa.model.page = 1 +# +#visitor.slc.uec.file = VisitorStopLocationChoice.xls +#visitor.slc.data.page = 0 +#visitor.slc.model.page = 1 +# +#visitor.trip.mc.uec.file = VisitorTripModeChoice.xls +#visitor.trip.mc.data.page = 0 +#visitor.trip.mc.model.page = 1 +# +#visitor.micromobility.uec.file = VisitorMicromobilityChoice.xls +#visitor.micromobility.data.page = 0 +#visitor.micromobility.model.page = 1 +# +# +# +# +#visitor.tour.output.file = output/visitorTours.csv +visitor.trip.output.file = output/visitor/final_trips.csv +# +#visitor.results.autoTripMatrix = output/autoVisitorTrips +#visitor.results.nMotTripMatrix = output/nmotVisitorTrips +#visitor.results.tranTripMatrix = output/tranVisitorTrips +#visitor.results.othrTripMatrix = output/othrVisitorTrips +# +# +## These settings are for building an estimation file, not used for main visitor model code +#visitor.uec.file = VisitorSize.xls +#visitor.uec.data.page = 0 +#visitor.uec.sizeTerms.page = 1 +# +###################################################################################### +## SAN Airport Model Settings +###################################################################################### +#airport.SAN.purpose.file = input/airport_purpose.SAN.csv +#airport.SAN.size.file = input/airport_party.SAN.csv +#airport.SAN.duration.file = input/airport_nights.SAN.csv +#airport.SAN.income.file = input/airport_income.SAN.csv +#airport.SAN.departureTime.file = input/airport_departure.SAN.csv +#airport.SAN.arrivalTime.file = input/airport_arrival.SAN.csv +airport.SAN.output.file = output/airport.SAN/final_trips.csv +# +#airport.SAN.dc.uec.file = AirportDestinationChoice.SAN.xls +#airport.SAN.dc.data.page = 0 +#airport.SAN.dc.size.page = 5 +#airport.SAN.dc.segment1.page = 1 +#airport.SAN.dc.segment2.page = 2 +#airport.SAN.dc.segment3.page = 3 +#airport.SAN.dc.segment4.page = 4 +# +#airport.SAN.mc.uec.file = AirportModeChoice.SAN2016.xls +#airport.SAN.mc.data.page = 0 +#airport.SAN.mc.da.page = 1 +#airport.SAN.mc.s2.page = 2 +#airport.SAN.mc.s3.page = 3 +#airport.SAN.mc.transit.page = 4 +#airport.SAN.mc.accessMode.page = 5 +# +#airport.SAN.externalStationFile = uec/InternalExternalDestinationChoiceAlternatives.csv +# +#airport.SAN.results.autoTripMatrix = output/autoAirportTrips.SAN +#airport.SAN.results.nMotTripMatrix = output/nmotAirportTrips.SAN +#airport.SAN.results.tranTripMatrix = output/tranAirportTrips.SAN +#airport.SAN.results.othrTripMatrix = output/othrAirportTrips.SAN +# +###################################################################################### +## CBX Airport Model Settings +###################################################################################### +#airport.CBX.purpose.file = input/airport_purpose.CBX.csv +#airport.CBX.size.file = input/airport_party.CBX.csv +#airport.CBX.duration.file = input/airport_nights.CBX.csv +#airport.CBX.income.file = input/airport_income.CBX.csv +#airport.CBX.departureTime.file = input/airport_departure.CBX.csv +#airport.CBX.arrivalTime.file = input/airport_arrival.CBX.csv +airport.CBX.output.file = output/airport.CBX/final_trips.csv +RunModel.skipTransitConnector = true +RunModel.skipExternal = false,false,false +SavedFrom = Emme Modeller properties writer Process ID 51972 +SavedLast = Sep-07-2023 07:59:49 +active.coef.dwrongwy = ${active.coef.dwrongwy} +active.coef.dartne2 = ${active.coef.dartne2} +TNC.single.baseFare = ${TNC.single.baseFare} +TNC.shared.costMinimum = ${TNC.shared.costMinimum} +TNC.shared.costPerMinute = ${TNC.shared.costPerMinute} +active.micromobility.fixedCost = ${active.micromobility.fixedCost} +active.micromobility.variableCost = ${active.micromobility.variableCost} +airport.CBX.airportMgra = ${airport.CBX.airportMgra} +TNC.single.costPerMile = ${TNC.single.costPerMile} +active.ebike.ownership = ${active.ebike.ownership} +taxi.costPerMinute = ${taxi.costPerMinute} +airport.SAN.connecting = ${airport.SAN.connecting} +atdm.factor = ${atdm.factor} +active.maxdist.bike.taz = ${active.maxdist.bike.taz} +TNC.single.costMinimum = ${TNC.single.costMinimum} +airport.CBX.connecting = ${airport.CBX.connecting} +active.coef.distcla0 = ${active.coef.distcla0} +active.coef.distcla3 = ${active.coef.distcla3} +smartSignal.factor.MA = ${smartSignal.factor.MA} +airport.CBX.enplanements = ${airport.CBX.enplanements} +active.maxdist.bike.mgra = ${active.maxdist.bike.mgra} +TNC.single.costPerMinute = ${TNC.single.costPerMinute} +Mobility.AV.Share = ${Mobility.AV.Share} +active.bike.minutes.per.mile = ${active.bike.minutes.per.mile} +TNC.shared.costPerMile = ${TNC.shared.costPerMile} +smartSignal.factor.PA = ${smartSignal.factor.PA} +airport.SAN.airportMgra = ${airport.SAN.airportMgra} +active.coef.dbikblvd = ${active.coef.dbikblvd} +crossBorder.tours = ${crossBorder.tours} +tc.everyone.owns = ${tc.everyone.owns} +crossBorder.sentriShare = ${crossBorder.sentriShare} +active.coef.dcyctrac = ${active.coef.dcyctrac} +TNC.shared.baseFare = ${TNC.shared.baseFare} +airport.SAN.enplanements = ${airport.SAN.enplanements} +taxi.baseFare = ${taxi.baseFare} +active.coef.distcla1 = ${active.coef.distcla1} +active.microtransit.fixedCost = ${active.microtransit.fixedCost} +taxi.costPerMile = ${taxi.costPerMile} +active.coef.distcla2 = ${active.coef.distcla2} + +# +##################################################################################### +# Smart Signal Properties wsu 8/22/18 +##################################################################################### +smartSignal.factor.LC = ${smartSignal.factor.LC} +crossborder.dc.soa.alts.file = ${crossborder.dc.soa.alts.file} +visualizer.reference.path = ${visualizer.reference.path} +nmdc.uec.file = ${nmdc.uec.file} +airport.SAN.mc.uec.file = ${airport.SAN.mc.uec.file} +crossBorder.dc.uec.file = ${crossBorder.dc.uec.file} +crossBorder.tour.mc.uec.file = ${crossBorder.tour.mc.uec.file} +uwsl.dc.uec.file = ${uwsl.dc.uec.file} + +# +path.choice.uec.spreadsheet = %project.folder%/uec/BikeTripPathChoice.xls +path.choice.uec.model.sheet = 1 +path.choice.uec.data.sheet = 0 +path.choice.max.path.count = 200 +btpc.alts.file = bike_path_alts.csv +active.logsum.matrix.file.bike.taz = bikeTazLogsum.csv +active.logsum.matrix.file.bike.mgra = bikeMgraLogsum.csv +active.logsum.matrix.file.walk.mgra = walkMgraEquivMinutes.csv +#active.logsum.matrix.file.walk.mgratap = walkMgraTapEquivMinutes.csv + +active.bike.write.derived.network = true +active.bike.derived.network.edges = derivedBikeEdges.csv +active.bike.derived.network.nodes = derivedBikeNodes.csv +active.bike.derived.network.traversals = derivedBikeTraversals.csv + +active.assignment.file.bike = bikeAssignmentResults.csv +active.micromobility.file.walk.mgra = microMgraEquivMinutes.csv +#active.micromobility.file.walk.mgratap = microMgraTapEquivMinutes.csv + +AtTransitConsistency.xThreshold = 1.0 +AtTransitConsistency.yThreshold = 1.0 + +##################################################################################### +# Commercial Vehicle Model Settings +##################################################################################### +#scale factor to use in cvm trip generation. Also, used during demand import to factor-in demand accordingly +cvm.scale_factor = 1 +#scale factors by vehicle (light, medium, and heavy) and time of day (ea,am,md,pm,ev) - used to boost cvm demand +#light vehicles +cvm.scale_light = 1,2,3.5,2,1 +#medium vehicles +cvm.scale_medium = 1,1,1,1,1 +#heavy vehicles +cvm.scale_heavy = 1,1,1,1,1 +#cvm vehicle shares representing portions of the cvm vehicle trips that go to light-heavy trucks. +#share value should be between 0 and 1. 0 representing none will go to light-heavy truck and 1 means all will go. +cvm.share.light = 0.04 +cvm.share.medium = 0.64 +cvm.share.heavy = 0 + +# +################################################################# +# Active Transportation Model Settings +# updated 4/2/2014 wsu +################################################################# +active.node.file = %project.folder%/input/SANDAG_Bike_NODE.dbf +active.node.id = NodeLev_ID +active.node.fieldnames = mgra,taz,x,y,tap,signalized +active.node.columns = MGRA,TAZ,XCOORD,YCOORD,TAP,Signal +active.edge.file = %project.folder%/input/SANDAG_Bike_NET.dbf +active.edge.anode = A +active.edge.bnode = B +active.edge.directional = false +active.edge.fieldnames = functionalClass,distance,gain,bikeClass,lanes,cycleTrack,bikeBlvd,roadsegid +active.edge.columns.ab = Func_Class,Distance,AB_Gain,ABBikeClas,AB_Lanes,Bike2Sep,Bike3Blvd,ROADSEGID +active.edge.columns.ba = Func_Class,Distance,BA_Gain,BABikeClas,BA_Lanes,Bike2Sep,Bike3Blvd,ROADSEGID +active.edge.centroid.field = functionalClass +active.edge.centroid.value = 10 +active.edge.autospermitted.field = functionalClass +active.edge.autospermitted.values = 1,2,3,4,5,6,7 +# distance bins for control of path sampling +active.sample.distance.breaks = 99 +# minimum path sizes of alternative lists for each distance bin +active.sample.pathsizes = 2 +# minimum count of samples for each distance bin +active.sample.count.min = 10 +# maximum count of samples for each distance bin +active.sample.count.max = 100 +# scale of random cost for each sampling iteration where random cost = cost + scale * unif(0,1) * distance +active.sample.random.scale.coef = 0.5 +active.sample.random.scale.link = 0.7 +active.sample.random.seeded = true +active.sample.maxcost = 998 +active.maxdist.walk.mgra = 3.0 +active.maxdist.walk.tap = 1.0 +active.maxdist.micromobility.mgra = 3.0 +active.maxdist.micromobility.tap = 1.0 +active.maxdist.microtransit.mgra = 3.0 +active.maxdist.microtransit.tap = 3.0 +active.output.bike = %project.folder%/output/ +active.output.walk = %project.folder%/output/ +active.coef.nonscenic = 0.3 +active.coef.gain = 0.015 +active.coef.turn = 0.083 +active.coef.signals = 0.04 +active.coef.unlfrma = 0.36 +active.coef.unlfrmi = 0.15 +active.coef.untoma = 0.48 +active.coef.untomi = 0.1 +active.coef.gain.walk = 0.034 + +active.walk.minutes.per.mile = 20 + +active.micromobility.speed = 15 +active.micromobility.rentalTime = 1 +active.micromobility.constant = 60 +active.micromobility.vot = 15 + +micromobility.uec.file = MicromobilityChoice.xls +micromobility.data.page = 0 +micromobility.model.page = 1 + +active.microtransit.speed = 17 +active.microtransit.variableCost = 0.0 +active.microtransit.waitTime = 4.0 +active.microtransit.accessTime = 0.0 +active.microtransit.constant = 120 +active.microtransit.notAvailable = 999 + +active.microtransit.mgra.file = input/mobilityHubMGRAs.csv + +##################################################################################### +##################################################################################### +# Transit Tier 1 EMME Link Name zou 5/7/20 +##################################################################################### +transit.newMode = TIER 1 RAIL +transit.newMode.route = 581,582,583 From bfbc5b75c79ae9723f6ec48fed7b8d912f0eb165 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:09:08 -0800 Subject: [PATCH 41/43] Delete sandag_abm.properties.bak --- src/main/resources/sandag_abm.properties.bak | 1162 ------------------ 1 file changed, 1162 deletions(-) delete mode 100644 src/main/resources/sandag_abm.properties.bak diff --git a/src/main/resources/sandag_abm.properties.bak b/src/main/resources/sandag_abm.properties.bak deleted file mode 100644 index 57b8eb938..000000000 --- a/src/main/resources/sandag_abm.properties.bak +++ /dev/null @@ -1,1162 +0,0 @@ -##SANDAG ABM Properties -##Software Version -#version = version_14_3_0 -## geography ID -#geographyID = 1 -############################################################################################################################################################################## -## -## CLUSTER PROPERTIES: MODIFY WHEN CHANGING CLUSTER CONFIGURATION OR MOVING TO NEW CLUSTER. -## -############################################################################################################################################################################## -RunModel.MatrixServerAddress = localhost -RunModel.MatrixServerPort = 1191 -RunModel.HouseholdServerAddress = localhost -RunModel.HouseholdServerPort = 1129 -# -############################################################################################################################################################################## -## -## RUN PROPERTIES: MODEL COMPONENT SWITCHES -## -############################################################################################################################################################################## -##set sample rates -sample_rates = 0.25,0.5,1.0 -# -##highway assignment convergence criteria -convergence = 0.0005 -RunModel.useLocalDrive = false -RunModel.skipInitialization = false -RunModel.deleteAllMatrices = false -RunModel.skip4Ds = false -RunModel.skipInputChecker = true -RunModel.skipCopyWarmupTripTables = false -RunModel.skipCopyBikeLogsum = false -RunModel.skipShadowPricing = false -RunModel.skipBikeLogsums = true -RunModel.skipCopyWalkImpedance = true -RunModel.skipWalkLogsums = false -RunModel.skipBuildNetwork = false -RunModel.startFromIteration = 1 -RunModel.skipHighwayAssignment = false,false,false -RunModel.skipTransitSkimming = false,false,false -RunMode.skipTransitConnector = false -RunModel.skipTransponderExport = false,false,false -RunModel.skipABMPreprocessing = false,false,false -RunModel.skipABMResident = false,false,false -RunModel.skipABMAirport = false,false,false -RunModel.skipABMVisitor = false,false,false -RunModel.skipABMXborderWait = false -RunModel.skipABMXborder = false,false,false -RunModel.skipCTM = false,false,false -RunModel.skipEI = false,false,false -RunModel.skipExternalExternal = true,true,true -RunModel.skipTruck = false,false,false -RunModel.skipTripTableCreation = false,false,false -RunModel.skipFinalHighwayAssignment = false -RunModel.skipFinalHighwayAssignmentStochastic = true -RunModel.skipFinalTransitAssignment = false -RunModel.collapseOnOffByRoute = false -RunModel.skipLUZSkimCreation = true -RunModel.skipVisualizer = true -RunModel.skipDataExport = false -RunModel.skipDataLoadRequest = true -RunModel.skipDeleteIntermediateFiles = true -RunModel.MatrixPrecision = 0.0005 -# minimual space (MB) on C drive -RunModel.minSpaceOnC = 400 - -TNC.totalThreads = 10 - -############################################################################################################################################################################## -## -## LOGGING PROPERTIES: USE FOR TRACING HOUSEHOLDS OR AGENTS THROUGH SIMULATION. -## -## Note that the way that logging works right now, the trace zones also have to be valid transit stops or the code will crash. Check the skims to make sure they exist. -## Turn off trace debugging in routine model runs to speed things up (comment out Debug.Trace.HouseholdIdList) -## -############################################################################################################################################################################## -## Resident models -#Trace = false -##Trace.otaz = 1638 -##Trace.dtaz = 2447 -#Trace.otaz = -#Trace.dtaz = -#Seek = false -#Process.Debug.HHs.Only = false -Debug.Trace.HouseholdIdList = -# -## Internal-External models -#internalExternal.seek = false -#internalExternal.trace = 50 -# -## Cross-Border models -#crossBorder.seek = false -## trace by tourId -#crossBorder.trace = 12 -# -## Visitor models -#visitor.seek = false -##trace by tourId -##visitor.trace = 742 -#visitor.trace = 742 -# -## Special event models -#specialEvent.seek = false -#specialEvent.trace = 5855 -# -## Trace TransCAD trip table creation by TAZ (to/from); only applies to SD resident model -#tripTable.trace = 4384 -# -#RunModel.LogResults = true -# -############################################################################################################################################################################## -## PATH PROPERTIES: MODIFY AS NEEDED WHEN COPY RELEASE TO A LOCAL RUN FOLDER -############################################################################################################################################################################## -Project.Directory = %project.folder%/ -generic.path = %project.folder%/input/ -scenario.path = %project.folder%/ -skims.path = %project.folder%/output/skims/ -uec.path = %project.folder%/uec/ -report.path = %project.folder%/report/ -# -## Visitor model is run using Java 7 Fork\Join Framework. Parallelism controls number of simultaneous threads. Can increase if more processors. -## 5 threads provided optimum runtimes on a 6 core, 24 thread machine with 128GB of RAM. -#visitor.run.concurrent = true -#visitor.concurrent.parallelism = 5 -# -############################################################################################################################################################################## -## -## SCENARIO PROPERTIES: MODIFY WHEN RUNNING NEW SCENARIO, IF NECESSARY -## -############################################################################################################################################################################## -## MGRA data file: this token is referred to in many UECs in additon to submodels like truck -mgra.socec.file = input/mgra15_based_input${year}.csv -## scenario year -scenarioYear = ${year} -# -## Auto operating costs: these tokens are referred to in many UECs -aoc.fuel = ${aoc.fuel} -aoc.maintenance = ${aoc.maintenance} -# -## Cross border model is run using Java 7 Fork\Join Framework. Parallelism controls number of simultaneous threads. Can increase if more processors. -#crossBorder.run.concurrent = true -#crossBorder.concurrent.parallelism = 8 -# -## Cross border model settings: Number of tours, share of tours that are SENTRI. -#crossBorder.tours = 120700 -#crossBorder.sentriShare = 0.44 -# -## Visitor model settings: occupancy rates for hotels, households and share of each that are business visitors -#visitor.hotel.occupancyRate = 0.7 -#visitor.household.occupancyRate = 0.018 -#visitor.hotel.businessPercent = 0.3 -#visitor.household.businessPercent = 0.04 -# -## Airport model settings: enplanements, connecting passengers, average party size, MGRA that the airport is in -#airport.SAN.enplanements = 13727381 -#airport.SAN.connecting = 808619 -#airport.SAN.annualizationFactor = 365 -#airport.SAN.averageSize = 1.7 -#airport.SAN.airportMgra = 11249 -# -#airport.CBX.enplanements = 984428 -#airport.CBX.connecting = 0 -#airport.CBX.annualizationFactor = 365 -#airport.CBX.averageSize = 2.2 -#airport.CBX.airportMgra = 9350 -# -# Truck model settings: - -truck.DFyear = 2016,2019,2020,2022,2023,2025,2026,2029,2030,2032,2035,2040,2050 -truck.luOverRide = "False" -truck.FFyear = ${year} -# -## Destination zones for the transponder accessibility calculator -transponder.destinations = 4027,2563,2258 -#traffic.sla_limit = 3 -# -## Number of zones where 4996 is the default, but may be changed by Land Use Converter Tool zone split -#zones.count = 4947 -# -############################################################################################# -## EMERGING MOBILITY SECTION: MODIFY WHEN CHANGE AV, TNC, and MICROMOBILITY ASSUMPTIONS -##------------------------------------------------------------------------------------------- -## AV Mobility Scenario Parameters -##------------------------------------------------------------------------------------------- -## AV.Share: the share of vehicles assumed to be AVs in the vehicle fleet; Auto ownership ASCs will be calibrated for different levels of AV penetration -## AV.ProbabilityBoost: the increased probability (multiplicative) for using AVs for tours, based on autos to drivers. The highest this should go is 1.2 -## AV.IVTFactor: the auto in-vehicle time factor to apply to AVs -## AV.ParkingCostFactor: The auto parking cost factor to apply to AVs, assuming some AVs are sent to remote locations or home -## AV.CostPerMileFactor: The auto cost per mile factor to apply to AVs, assuming AVs are more efficient in terms of fuel consumption than human-driven vehicles -## AV.TerminalTimeFactor: The factor to apply to terminal time for AVs, assuming AVs offer curbside passenger pickup/dropoff -## TNC.shared.IVTFactor: The factor to apply to in-vehicle time for shared TNC mode, reflecting out-direction travel for pickup/dropoff of other passengers -# -#Mobility.AV.Share = 0 -#Mobility.AV.ProbabilityBoost.AutosLTDrivers = 1.2 -#Mobility.AV.ProbabilityBoost.AutosGEDrivers = 1.1 -#Mobility.AV.IVTFactor = 0.75 -#Mobility.AV.ParkingCostFactor = 0.5 -Mobility.AV.CostPerMileFactor = 0.7 -#Mobility.AV.TerminalTimeFactor = 0.65 -#Mobility.AV.MinimumAgeDriveAlone = 13 -#Mobility.TNC.shared.IVTFactor = 1.25 -#crossBorder.avShare = 0.0 -# -##------------------------------------------------------------------------------------------- -## Taxi and TNC cost and wait time parameters -##------------------------------------------------------------------------------------------- -## 3 modes: taxi, TNC - single, and TNC - shared -## baseFare: Initial fare -## costPerMile: The cost per mile -## costPerMinute: The cost per minute -## costMinimum: The minimum cost (for TNC modes only) -## -## Wait times are drawn from a distribution by area type (emp+hh)/sq. miles -## The mean and standard deviation is given for each area type range -## The ranges are configurable, set by WaitTimeDistribution.EndPopEmpPerSqMi -# -#taxi.baseFare = 1.78 -#taxi.costPerMile = 1.87 -#taxi.costPerMinute = 0.08 -# -#TNC.single.baseFare = 1.78 -#TNC.single.costPerMile = 1.08 -#TNC.single.costPerMinute = 0.19 -#TNC.single.costMinimum = 5.84 -# -## use lower costs - these are synthesized, need real prices -#TNC.shared.baseFare = 1.78 -#TNC.shared.costPerMile = 0.36 -#TNC.shared.costPerMinute = 0.06 -#TNC.shared.costMinimum = 2.43 -# -##Note: the following comma-separated value properties cannot have spaces between them, or else the RuntimeConfiguration.py code won't work -#TNC.single.waitTime.mean = 10.3,8.5,8.4,6.3,4.7 -#TNC.single.waitTime.sd = 4.1,4.1,4.1,4.1,4.1 -# -#TNC.shared.waitTime.mean = 15.0,15.0,11.0,8.0,7.0 -#TNC.shared.waitTime.sd = 4.1,4.1,4.1,4.1,4.1 -# -#Taxi.waitTime.mean = 26.5,17.3,13.3,9.5,5.5 -#Taxi.waitTime.sd = 6.4,6.4,6.4,6.4,6.4 -# -#WaitTimeDistribution.EndPopEmpPerSqMi = 500,2000,5000,15000,9999999999 -# -##------------------------------------------------------------------------------------------- -## Taxi and TNC vehcicle trip conversion factors -##------------------------------------------------------------------------------------------- -## The following properties are used to split out the taxi, TNC-single, and TNC-shared trips into vehicle trips to be added to the rest of the vehicle trips by occupancy prior to assignment. -# -Taxi.da.share = 0.0 -Taxi.s2.share = 0.9 -Taxi.s3.share = 0.1 -Taxi.passengersPerVehicle = 1.1 -# -TNC.single.da.share = 0.0 -TNC.single.s2.share = 0.8 -TNC.single.s3.share = 0.2 -TNC.single.passengersPerVehicle = 1.2 -# -TNC.shared.da.share = 0.0 -TNC.shared.s2.share = 0.3 -TNC.shared.s3.share = 0.7 -TNC.shared.passengersPerVehicle = 2.0 -# -##------------------------------------------------------------------------------------------- -## Maas Routing Model Properties -##------------------------------------------------------------------------------------------- -Maas.RoutingModel.maxDistanceForPickup = 5 -Maas.RoutingModel.maxDiversionTimeForPickup = 5 -Maas.RoutingModel.minutesPerSimulationPeriod = 5 -Maas.RoutingModel.maxPassengers = 6 -Maas.RoutingModel.maxWalkDistance = 0.15 -Maas.RoutingModel.vehicletrip.output.file = output/TNCTrips.csv -Maas.RoutingModel.vehicletrip.output.matrix = output/TNCVehicleTrips - -Maas.RoutingModel.routeIntrazonal = false -#NULL,DRIVEALONE,SHARED2,SHARED3,WALK,BIKE,WALK_SET,PNR_SET,KNR_SET,TNC_SET,TAXI,TNC_SINGLE,TNC_SHARED,SCHBUS -Maas.RoutingModel.Modes = 0,0,0,0,0,0,0,0,0,0,1,1,1,0 -Maas.RoutingModel.SharedEligible = 0,0,0,0,0,0,0,0,0,0,0,0,1,0 -Maas.RoutingModel.maxDistanceBeforeRefuel = 300 -Maas.RoutingModel.timeRequiredForRefuel = 15 - -Maas.AVAllocationModel.vehicletrip.output.file = output/householdAVTrips.csv -Maas.AVAllocationModel.vehicletrip.output.matrix = output/emptyAVTrips - -Maas.AVAllocation.uec.file = AutonomousVehicleAllocationChoice.xls -Maas.AVAllocation.data.page = 0 -Maas.AVAllocation.vehiclechoice.model.page = 1 -Maas.AVAllocation.parkingchoice.model.page = 2 -Maas.AVAllocation.triputility.model.page = 3 -Mobility.AV.RemoteParkingCostPerHour = ${Mobility.AV.RemoteParkingCostPerHour} -# -## END--EMERGING MOBILITY SECTION -############################################################################################# -# Transit PCE VEH Conversion cliu 8/19/20 -##################################################################################### -transit.bus.pceveh = 3.0 -############################################################################################################################################################################## -## -## CORE MODEL RUN PROPERTIES: CONTROL STEPS RUN IN CORE MODEL -## -############################################################################################################################################################################## -Model.Random.Seed = 1 -# -#RunModel.Clear.MatrixMgr.At.Start = false -# -## Set to true if read the accessibilities from an input file instead of calculating them prior to running CTRAMP -#acc.read.input.file = false -# -## Setting shadow price files to null will reset prices to 0. If running new land-use scenario, set files to null and set maximum iterations to 20. -## Then copy shadow price output files to input directory, set maximum iterations to 1 for any subsequent runs with the same land-use file. -#UsualWorkLocationChoice.ShadowPrice.Input.File = input/ShadowPricingOutput_work_9.csv -#UsualSchoolLocationChoice.ShadowPrice.Input.File = input/ShadowPricingOutput_school_9.csv -#uwsl.ShadowPricing.Work.MaximumIterations = 1 -#uwsl.ShadowPricing.School.MaximumIterations = 1 -#uwsl.ShadowPricing.OutputFile = output/ShadowPricingOutput.csv -# -#uwsl.run.workLocChoice = true -#uwsl.run.schoolLocChoice = true -#uwsl.write.results = true -# -#uwsl.use.new.soa = false -#nmdc.use.new.soa = false -#slc.use.new.soa = false -# -## properties for distributed time coefficient -#distributedTimeCoefficients = true -# -#timeDistribution.mean.work = 1.0 -#timeDistribution.standardDeviation.work = 0.7 -#timeDistribution.mean.nonWork = 1.0 -#timeDistribution.standardDeviation.nonWork = 0.6 -# -#timeDistribution.randomSeed = 2301832 -# -## value of time thresholds for skimming, assignment, mode choice UECs and trip tables ($/hr). -#valueOfTime.threshold.low = 8.81 -#valueOfTime.threshold.med = 18.0 -# -# -## save tour mode choice utilities and probabilities (for debugging purpose) -#TourModeChoice.Save.UtilsAndProbs = true -# -## packet size for distributing households, DO NOT change -#distributed.task.packet.size = 200 -# -##RunModel.RestartWithHhServer = uwsl -#RunModel.RestartWithHhServer = none -##RunModel.RestartWithHhServer = ao -##RunModel.RestartWithHhServer = stf -# -## Model Component run flags; Wu's note: not functional yet -#RunModel.PreAutoOwnership = true -#RunModel.UsualWorkAndSchoolLocationChoice = true -#RunModel.AutoOwnership = true -#RunModel.TransponderChoice = true -#RunModel.FreeParking = true -#RunModel.CoordinatedDailyActivityPattern = true -#RunModel.IndividualMandatoryTourFrequency = true -#RunModel.MandatoryTourModeChoice = true -#RunModel.MandatoryTourDepartureTimeAndDuration = true -#RunModel.SchoolEscortModel = true -#RunModel.JointTourFrequency = true -#RunModel.JointTourLocationChoice = true -#RunModel.JointTourDepartureTimeAndDuration = true -#RunModel.JointTourModeChoice = true -#RunModel.IndividualNonMandatoryTourFrequency = true -#RunModel.IndividualNonMandatoryTourLocationChoice = true -#RunModel.IndividualNonMandatoryTourDepartureTimeAndDuration = true -#RunModel.IndividualNonMandatoryTourModeChoice = true -#RunModel.AtWorkSubTourFrequency = true -#RunModel.AtWorkSubTourLocationChoice = true -#RunModel.AtWorkSubTourDepartureTimeAndDuration = true -#RunModel.AtWorkSubTourModeChoice = true -#RunModel.StopFrequency = true -#RunModel.StopLocation = true -# -############################################################################################################################################################################## -## -## INPUT PROPERTIES -## -############################################################################################################################################################################## -##PopSyn Inputs -PopulationSynthesizer.InputToCTRAMP.HouseholdFile = input/households.csv -PopulationSynthesizer.InputToCTRAMP.PersonFile = input/persons.csv -PopulationSynthesizer.OccupCodes = input/pecas_occ_occsoc_acs.csv -PopulationSynthesizer.IndustryCodes = input/activity_code_indcen_acs.csv -## -## The military industry ranges are used to recode military occupation. This is -## necessary because military workers identify themselves as non-military occupations. -## The models need to be consistent with PECAS, where all military workers are in -## the military occupation category 56. -#PopulationSynthesizer.MilitaryIndustryRange = 9670,9870 -# -## auxiliary inputs, these are scenario-specific -#taz.driveaccess.taps.file = input/accessam.csv -#tap.ptype.file = input/tap.ptype -#taz.parkingtype.file = input/zone.park -#taz.terminal.time.file = input/zone.term -#maz.tap.tapLines = output/tapLines.csv -# -## transit stop attribute file -#transit.stop.file = input/trstop.csv -# -############################################################################################################################################################################## -## -## OUTPUT PROPERTIES -## -############################################################################################################################################################################## -#Results.WriteDataToFiles = true -Results.HouseholdDataFile = output/resident/final_households.csv -Results.PersonDataFile = output/resident/final_persons.csv -#Results.IndivTourDataFile = output/indivTourData.csv -#Results.JointTourDataFile = output/jointTourData.csv -Results.TripDataFile = output/resident/final_trips.csv -#Results.JointTripDataFile = output/jointTripData.csv -#Results.WriteDataToDatabase = false -#Results.HouseholdTable = household_data -#Results.PersonTable = person_data -#Results.IndivTourTable = indiv_tour_data -#Results.JointTourTable = joint_tour_data -#Results.IndivTripTable = indiv_trip_data -#Results.JointTripTable = joint_trip_data -#Results.AutoTripMatrix = output/autoTrips -#Results.TranTripMatrix = output/tranTrips -#Results.NMotTripMatrix = output/nmotTrips -#Results.OthrTripMatrix = output/othrTrips -#Results.PNRFile = output/PNRByTAP_Vehicles.csv -#Results.CBDFile = output/CBDByMGRA_Vehicles.csv -Results.MatrixType = OMX -#Results.segmentByTransponderOwnership = true -# -# -#Results.AutoOwnership = output/aoResults.csv -#read.pre.ao.results = false -#read.pre.ao.filename = output/aoResults_pre.csv -# -#Results.UsualWorkAndSchoolLocationChoice = output/wsLocResults.csv -#read.uwsl.results = false -#read.uwsl.filename = output/wsLocResults_1.csv -# -############################################################################################################################################################################# -# -# CORE MODEL UECS -# -############################################################################################################################################################################# -# UECs for calculating accessibilities -#acc.uec.file = %project.folder%/uec/Accessibilities.xls -#acc.data.page = 0 -#acc.sov.offpeak.page = 1 -#acc.sov.peak.page = 2 -#acc.hov.offpeak.page = 3 -#acc.hov.peak.page = 4 -#acc.maas.offpeak.page = 5 -#acc.maas.peak.page = 6 -#acc.nonmotorized.page = 7 -#acc.constants.page = 8 -#acc.sizeTerm.page = 9 -#acc.schoolSizeTerm.page = 10 -#acc.workerSizeTerm.page = 11 -#acc.dcUtility.uec.file = %project.folder%/uec/Accessibilities_DC.xls -#acc.dcUtility.data.page = 0 -#acc.dcUtility.page = 1 -# -## accessibility file location -#acc.output.file = input/accessibilities.csv -# -##UECs for calculating destination choice based land use accessibilities -#lu.acc.dcUtility.uec.file = %project.folder%/uec/Accessibilities_LU_DC.xls -#lu.acc.dcUtility.data.page = 0 -#lu.acc.dcUtility.page = 1 -#lu.accessibility.alts.file = Acc_LU_alts.csv -# -## land use accessibililty file locations -#lu.acc.output.file = output/luAccessibilities.csv -#lu.acc.mc.logsums.output.file = output/luLogsums.csv -# -## set either or both averaging methods to be used to write LU accessibilities files -## also requires command line parameter "-luAcc true" and acc.read.input.file = false -#lu.acc.simple.averaging.method = true -#lu.acc.logit.averaging.method = true -# -#accessibility.alts.file = Acc_alts.csv -# -##UEC for Mandatory accessibilities -#acc.mandatory.uec.file = %project.folder%/uec/MandatoryAccess.xls -#acc.mandatory.data.page = 0 -#acc.mandatory.auto.page = 1 -#acc.mandatory.autoLogsum.page = 2 -#acc.mandatory.bestWalkTransit.page = 3 -#acc.mandatory.bestDriveTransit.page = 4 -#acc.mandatory.transitLogsum.page = 5 -# -## UECs for auto ownership model -#ao.uec.file = AutoOwnership.xls -#ao.data.page = 0 -#ao.model.page = 1 -# -## UECs for Mandatory tour destination choice model -#uwsl.dc.uec.file = TourDestinationChoice2020.xls -#uwsl.dc2.uec.file = TourDestinationChoice2.xls -#uwsl.soa.uec.file = DestinationChoiceAlternativeSample.xls -#uwsl.soa.alts.file = DestinationChoiceAlternatives.csv -#uwsl.work.soa.SampleSize = 30 -#uwsl.school.soa.SampleSize = 30 -# -## The UEC file for work purposes includes TAZ Size in the expressions -#work.soa.uec.file = TourDcSoaDistance.xls -#work.soa.uec.data = 0 -#work.soa.uec.model = 1 -# -## The UEC file for school purposes does not include TAZ Size in the expressions -## so that the utilities can be stored as exponentiated distance utility matrices -## for univ, hs, gs, and ps, and then multiplied by the various school segment -## size terms for each of these 4 groups of school segments. -#univ.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls -#univ.soa.uec.data = 0 -#univ.soa.uec.model = 1 -# -#hs.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls -#hs.soa.uec.data = 0 -#hs.soa.uec.model = 2 -# -#gs.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls -#gs.soa.uec.data = 0 -#gs.soa.uec.model = 3 -# -#ps.soa.uec.file = TourDcSoaDistanceNoSchoolSize.xls -#ps.soa.uec.data = 0 -#ps.soa.uec.model = 4 -# -##UECs for transponder ownership model -#tc.choice.avgtts.file = output/transponderModelAccessibilities.csv -#tc.uec.file = TransponderOwnership.xls -#tc.data.page = 0 -#tc.model.page = 1 -#tc.everyone.owns = 0 -# -# -##UECs for parking provision model -#fp.uec.file = ParkingProvision.xls -#fp.data.page = 0 -#fp.model.page = 1 -# -##UEC for telecommute model -#te.uec.file = Telecommute.xls -#te.data.page = 0 -#te.model.page = 1 -# -# -##UECs for CDAP model -#cdap.uec.file = CoordinatedDailyActivityPattern.xls -#cdap.data.page = 0 -#cdap.one.person.page = 1 -#cdap.two.person.page = 2 -#cdap.three.person.page = 3 -#cdap.all.person.page = 4 -#cdap.joint.page = 5 -# -##UECs for individual mandatory tour frequency model -#imtf.uec.file = MandatoryTourFrequency.xls -#imtf.data.page = 0 -#imtf.model.page = 1 -# -##UECs for Non-Mandatory tour destination sampling -#nonSchool.soa.uec.file = TourDcSoaDistance.xls -#escort.soa.uec.data = 0 -#escort.soa.uec.model = 2 -#other.nonman.soa.uec.data = 0 -#other.nonman.soa.uec.model = 3 -#atwork.soa.uec.data = 0 -#atwork.soa.uec.model = 4 -# -soa.taz.dist.alts.file = SoaTazDistAlts.csv -# -#nmdc.dist.alts.file = NonMandatoryTlcAlternatives.csv -#nmdc.soa.alts.file = DestinationChoiceAlternatives.csv -#nmdc.soa.SampleSize = 30 -# -##UECs for Non-Mandatory tour destination choice model -#nmdc.uec.file2 = TourDestinationChoice2.xls -#nmdc.uec.file = TourDestinationChoice2020.xls -#nmdc.data.page = 0 -#nmdc.escort.model.page = 7 -#nmdc.shop.model.page = 8 -#nmdc.maint.model.page = 9 -#nmdc.eat.model.page = 10 -#nmdc.visit.model.page = 11 -#nmdc.discr.model.page = 12 -#nmdc.atwork.model.page = 13 -# -## following properties use tod sampling instead of logsums -#nmdc.SampleTODPeriod = true -#nmdc.SampleTODPeriod.file = input/Non_Mand_Tours_ArrDep_Distbn.csv -# -##UECs for Non-Mandatory tour destination sampling -#nmdc.soa.uec.file = DestinationChoiceAlternativeSample.xls -#nmdc.soa.data.page = 0 -#nmdc.soa.escort.model.page = 6 -#nmdc.soa.shop.model.page = 7 -#nmdc.soa.maint.model.page = 7 -#nmdc.soa.eat.model.page = 7 -#nmdc.soa.visit.model.page = 7 -#nmdc.soa.discr.model.page = 7 -#nmdc.soa.atwork.model.page = 8 -# -##UECs for School Escorting Model -#school.escort.uec.filename = SchoolEscorting.xls -#school.escort.alts.file = SchoolEscortingAlts.csv -#school.escort.data.sheet = 0 -#school.escort.outbound.model.sheet = 1 -#school.escort.inbound.conditonal.model.sheet = 2 -#school.escort.outbound.conditonal.model.sheet = 3 -#school.escort.RNG.offset = 384571483 -# -##UECs for tour mode choice model -#tourModeChoice.uec.file = TourModeChoice.xls -#tourModeChoice.maint.model.page = 4 -#tourModeChoice.discr.model.page = 5 -#tourModeChoice.atwork.model.page = 6 -# -## utility coefficients by tour purpose (work, univ, school, maintenance, discretionary, work-based). These are at tour level. -#tour.utility.ivt.coeffs = -0.016,-0.016,-0.01,-0.017,-0.015,-0.032 -#tour.utility.income.coeffs = -0.625,-0.262,-0.262,-0.262,-0.262,-0.262 -#tour.utility.income.exponents = 0.6,0.5,0.5,0.5,0.5,0.5 -# -##UECs for tour TOD choice model -#departTime.uec.file = TourDepartureAndDuration.xls -#departTime.data.page = 0 -#departTime.work.page = 1 -#departTime.univ.page = 2 -#departTime.school.page = 3 -#departTime.escort.page = 4 -#departTime.shop.page = 5 -#departTime.maint.page = 6 -#departTime.eat.page = 7 -#departTime.visit.page = 8 -#departTime.discr.page = 9 -#departTime.atwork.page = 10 -#departTime.alts.file = DepartureTimeAndDurationAlternatives.csv -# -##UECs for joint tour frequency choice model -#jtfcp.uec.file = JointTourFrequency.xls -#jtfcp.alternatives.file = JointAlternatives.csv -#jtfcp.data.page = 0 -#jtfcp.freq.comp.page = 1 -#jtfcp.participate.page = 2 -# -##UECs for individual non-mandatory tour frequency model -#inmtf.uec.file = NonMandatoryIndividualTourFrequency.xls -#inmtf.FrequencyExtension.ProbabilityFile = IndividualNonMandatoryTourFrequencyExtensionProbabilities_p1.csv -#IndividualNonMandatoryTourFrequency.AlternativesList.InputFile = IndividualNonMandatoryTourFrequencyAlternatives.csv -#inmtf.data.page = 0 -#inmtf.perstype1.page = 1 -#inmtf.perstype2.page = 2 -#inmtf.perstype3.page = 3 -#inmtf.perstype4.page = 4 -#inmtf.perstype5.page = 5 -#inmtf.perstype6.page = 6 -#inmtf.perstype7.page = 7 -#inmtf.perstype8.page = 8 -# -##UECs for at work subtour frequency model -#awtf.uec.file = AtWorkSubtourFrequency.xls -#awtf.data.page = 0 -#awtf.model.page = 1 -# -##UECs for stop frequency model -#stf.uec.file = StopFrequency.xls -#stf.purposeLookup.proportions = StopPurposeLookupProportions.csv -#stf.data.page = 0 -#stf.work.page = 1 -#stf.univ.page = 2 -#stf.school.page = 3 -#stf.escort.page = 4 -#stf.shop.page = 5 -#stf.maint.page = 6 -#stf.eat.page = 7 -#stf.visit.page = 8 -#stf.discr.page = 9 -#stf.subtour.page = 10 -# -##UECs for stop location choice model -#slc.uec.file = StopLocationChoice.xls -#slc.uec.data.page = 0 -#slc.mandatory.uec.model.page = 1 -#slc.maintenance.uec.model.page = 2 -#slc.discretionary.uec.model.page = 3 -#slc.alts.file = SlcAlternatives.csv -# -#slc.soa.uec.file = SlcSoaSize.xls -#slc.soa.alts.file = DestinationChoiceAlternatives.csv -# -#auto.slc.soa.distance.uec.file = SlcSoaDistanceUtility.xls -#auto.slc.soa.distance.data.page = 0 -#auto.slc.soa.distance.model.page = 1 -# -#slc.soa.size.uec.file = SlcSoaSize.xls -#slc.soa.size.uec.data.page = 0 -#slc.soa.size.uec.model.page = 1 -# -#stop.depart.arrive.proportions = StopDepartArriveProportions.csv -# -##UECs for trip mode choice model -#tripModeChoice.uec.file = TripModeChoice.xls -# -## utility coefficients by tour purpose (work, univ, school, maintenance, discretionary, work-based). These are at trip level. -#trip.utility.ivt.coeffs = -0.032,-0.032,-0.02,-0.034,-0.03,-0.064 -#trip.utility.income.coeffs = -1.25,-0.524,-0.524,-0.524,-0.524,-0.524 -#trip.utility.income.exponents = 0.6,0.5,0.5,0.5,0.5,0.5 -# -# -##UECs for parking location choice model -#plc.uec.file = ParkLocationChoice.xls -#plc.uec.data.page = 0 -#plc.uec.model.page = 1 -# -#plc.alts.corresp.file = ParkLocationAlts.csv -#plc.alts.file = ParkLocationSampleAlts.csv -# -#mgra.avg.cost.output.file = output/mgraParkingCost.csv -# -#mgra.avg.cost.dist.coeff.work = -8.6 -#mgra.avg.cost.dist.coeff.other = -4.9 -# -#park.cost.reimb.mean = -0.05 -#park.cost.reimb.std.dev = 0.54 -# -##UECs for best transit path finding -#utility.bestTransitPath.uec.file = BestTransitPathUtility.xls -#utility.bestTransitPath.data.page = 0 -#utility.bestTransitPath.tapToTap.page = 1 -#utility.bestTransitPath.walkAccess.page = 2 -#utility.bestTransitPath.driveAccess.page = 3 -#utility.bestTransitPath.walkEgress.page = 4 -#utility.bestTransitPath.driveEgress.page = 5 -#utility.bestTransitPath.driveAccDisutility.page = 6 -#utility.bestTransitPath.driveEgrDisutility.page = 7 -#utility.bestTransitPath.skim.sets = 3 -#utility.bestTransitPath.alts = 4 -#utility.bestTransitPath.maxPathsPerSkimSetForLogsum = 1,1,1 -#utility.bestTransitPath.nesting.coeff = 0.24 -# -##UECs for auto skimming -#skims.auto.uec.file = AutoSkims.xls -#skims.auto.data.page = 0 -#skims.auto.ea.page = 1 -#skims.auto.am.page = 2 -#skims.auto.md.page = 3 -#skims.auto.pm.page = 4 -#skims.auto.ev.page = 5 -# -##UECs for TAZ distances -taz.distance.uec.file = tazDistance.xls -taz.distance.data.page = 0 -taz.od.distance.ea.page = 1 -taz.od.distance.am.page = 2 -taz.od.distance.md.page = 3 -taz.od.distance.pm.page = 4 -taz.od.distance.ev.page = 5 -# -##UECs for TAZ times -taz.od.time.ea.page = 6 -taz.od.time.am.page = 7 -taz.od.time.md.page = 8 -taz.od.time.pm.page = 9 -taz.od.time.ev.page = 10 -# -# -##UECs for walk-transit-walk skimming -#skim.walk.transit.walk.uec.file = WalkTransitWalkSkims.xls -#skim.walk.transit.walk.data.page = 0 -#skim.walk.transit.walk.skim.page = 1 -#skim.walk.transit.walk.skims = 13 -# -##UECs for walk-transit-drive skimming -#skim.walk.transit.drive.uec.file = WalkTransitDriveSkims.xls -#skim.walk.transit.drive.data.page = 0 -#skim.walk.transit.drive.skim.page = 1 -#skim.walk.transit.drive.skims = 13 -# -##UECs for drive-transit-walk skimming -#skim.drive.transit.walk.uec.file = DriveTransitWalkSkims.xls -#skim.drive.transit.walk.data.page = 0 -#skim.drive.transit.walk.skim.page = 1 -#skim.drive.transit.walk.skims = 13 -# -# -###################################################################################### -## IE Model Settings (run as part of CT-RAMP) -###################################################################################### -# -#RunModel.InternalExternal = true -# -#ie.uec.file = InternalExternalTripChoice.xls -#ie.data.page = 0 -#ie.model.page = 1 -#ie.logsum.distance.coeff = -0.05 -#external.tazs = 1,2,3,4,5,6,7,8,9,10,11,12 -# -# -#internalExternal.dc.uec.file = InternalExternalDestinationChoice.xls -#internalExternal.dc.uec.data.page = 0 -#internalExternal.dc.uec.model.page = 1 -#internalExternal.dc.uec.alts.file = InternalExternalDestinationChoiceAlternatives.csv -# -#internalExternal.tour.tod.file = input/internalExternal_tourTOD.csv -# -#internalExternal.trip.mc.uec.file = InternalExternalTripModeChoice.xls -#internalExternal.trip.mc.data.page = 0 -#internalExternal.trip.mc.model.page = 1 -# -#internalExternal.trip.output.file = output/internalExternalTrips.csv -# -#internalExternal.results.autoTripMatrix = output/autoInternalExternalTrips -#internalExternal.results.nMotTripMatrix = output/nmotInternalExternalTrips -#internalExternal.results.tranTripMatrix = output/tranInternalExternalTrips -#internalExternal.results.othrTripMatrix = output/othrInternalExternalTrips -# -###################################################################################### -## Cross-Border Model Settings -###################################################################################### -#crossBorder.purpose.nonsentri.file = input/crossBorder_tourPurpose_nonSENTRI.csv -#crossBorder.purpose.sentri.file = input/crossBorder_tourPurpose_SENTRI.csv -# -#crossBorder.tour.tod.file = input/crossBorder_tourEntryAndReturn.csv -# -#crossBorder.dc.soa.uec.file = CrossBorderDestinationChoiceSample.xls -#crossBorder.dc.soa.data.page = 0 -#crossBorder.dc.soa.model.page = 1 -#crossBorder.dc.soa.size.page = 2 -#crossborder.dc.soa.alts.file = CrossBorderDestinationChoiceSoaAlternatives2020.csv -# -#crossBorder.dc.uec.file = CrossBorderDestinationChoice2020.xls -#crossBorder.dc.data.page = 0 -#crossBorder.dc.model.page = 1 -#crossborder.dc.alts.file = CrossBorderDestinationChoiceAlternatives.csv -# -#crossBorder.dc.colonia.file = input/crossBorder_supercolonia.csv -#crossBorder.dc.colonia.distance.parameter = -0.19 -#crossBorder.dc.soa.sampleRate = 30 -# -##crossBorder.tour.mc.uec.file = CrossBorderTourModeChoice.xls -#crossBorder.tour.mc.uec.file = CrossBorderTourModeChoice2020.xls -#crossBorder.tour.mc.data.page = 0 -#crossBorder.tour.mc.mandatory.model.page = 1 -#crossBorder.tour.mc.nonmandatory.model.page = 2 -#crossBorder.poe.waittime.file = input/crossBorder_pointOfEntryWaitTime.csv -# -#crossBorder.trip.mc.uec.file = CrossBorderTripModeChoice.xls -#crossBorder.trip.mc.data.page = 0 -#crossBorder.trip.mc.model.page = 1 -# -#crossBorder.stop.frequency.file = input/crossBorder_stopFrequency.csv -#crossBorder.stop.purpose.file = input/crossBorder_stopPurpose.csv -# -#crossBorder.slc.soa.uec.file = CrossBorderStopLocationChoiceSample.xls -#crossBorder.slc.soa.data.page = 0 -#crossBorder.slc.soa.model.page = 1 -#crossBorder.slc.soa.alts.file = SoaTazDistAlts.csv -# -#crossBorder.slc.uec.file = CrossBorderStopLocationChoice.xls -#crossBorder.slc.data.page = 0 -#crossBorder.slc.model.page = 1 -# -#crossBorder.stop.outbound.duration.file = input/crossBorder_outboundStopDuration.csv -#crossBorder.stop.inbound.duration.file = input/crossBorder_inboundStopDuration.csv -# -#crossBorder.tour.output.file = output/crossBorderTours.csv -crossBorder.trip.output.file = output/crossborder/final_trips.csv -# -#crossBorder.results.autoTripMatrix = output/autoCrossBorderTrips -#crossBorder.results.nMotTripMatrix = output/nmotCrossBorderTrips -#crossBorder.results.tranTripMatrix = output/tranCrossBorderTrips -#crossBorder.results.othrTripMatrix = output/othrCrossBorderTrips -# -###################################################################################### -## Visitor Model Settings -###################################################################################### -#visitor.business.tour.file = input/visitor_businessFrequency.csv -#visitor.personal.tour.file = input/visitor_personalFrequency.csv -# -#visitor.partySize.file = input/visitor_partySize.csv -#visitor.autoAvailable.file = input/visitor_autoAvailable.csv -#visitor.income.file = input/visitor_income.csv -# -#visitor.dc.soa.uec.file = VisitorDestinationChoiceSample.xls -#visitor.dc.soa.data.page = 0 -#visitor.dc.soa.work.page = 1 -#visitor.dc.soa.recreate.page = 2 -#visitor.dc.soa.dining.page = 3 -#visitor.dc.soa.size.page = 4 -#visitor.dc.soa.alts.file = SoaTazDistAlts.csv -# -#visitor.dc.uec.file = VisitorDestinationChoice.xls -#visitor.dc.data.page = 0 -#visitor.dc.work.page = 1 -#visitor.dc.recreate.page = 2 -#visitor.dc.dining.page = 3 -# -#visitor.tour.tod.file = input/visitor_tourTOD.csv -# -#visitor.mc.uec.file = VisitorTourModeChoice.xls -#visitor.mc.data.page = 0 -#visitor.mc.model.page = 1 -# -#visitor.stop.frequency.file = input/visitor_stopFrequency.csv -#visitor.stop.purpose.file = input/visitor_stopPurpose.csv -#visitor.stop.outbound.duration.file = input/visitor_outboundStopDuration.csv -#visitor.stop.inbound.duration.file = input/visitor_inboundStopDuration.csv -# -#visitor.slc.soa.uec.file = VisitorStopLocationChoiceSample.xls -#visitor.slc.soa.data.page = 0 -#visitor.slc.soa.model.page = 1 -# -#visitor.slc.uec.file = VisitorStopLocationChoice.xls -#visitor.slc.data.page = 0 -#visitor.slc.model.page = 1 -# -#visitor.trip.mc.uec.file = VisitorTripModeChoice.xls -#visitor.trip.mc.data.page = 0 -#visitor.trip.mc.model.page = 1 -# -#visitor.micromobility.uec.file = VisitorMicromobilityChoice.xls -#visitor.micromobility.data.page = 0 -#visitor.micromobility.model.page = 1 -# -# -# -# -#visitor.tour.output.file = output/visitorTours.csv -visitor.trip.output.file = output/visitor/final_trips.csv -# -#visitor.results.autoTripMatrix = output/autoVisitorTrips -#visitor.results.nMotTripMatrix = output/nmotVisitorTrips -#visitor.results.tranTripMatrix = output/tranVisitorTrips -#visitor.results.othrTripMatrix = output/othrVisitorTrips -# -# -## These settings are for building an estimation file, not used for main visitor model code -#visitor.uec.file = VisitorSize.xls -#visitor.uec.data.page = 0 -#visitor.uec.sizeTerms.page = 1 -# -###################################################################################### -## SAN Airport Model Settings -###################################################################################### -#airport.SAN.purpose.file = input/airport_purpose.SAN.csv -#airport.SAN.size.file = input/airport_party.SAN.csv -#airport.SAN.duration.file = input/airport_nights.SAN.csv -#airport.SAN.income.file = input/airport_income.SAN.csv -#airport.SAN.departureTime.file = input/airport_departure.SAN.csv -#airport.SAN.arrivalTime.file = input/airport_arrival.SAN.csv -airport.SAN.output.file = output/airport.SAN/final_trips.csv -# -#airport.SAN.dc.uec.file = AirportDestinationChoice.SAN.xls -#airport.SAN.dc.data.page = 0 -#airport.SAN.dc.size.page = 5 -#airport.SAN.dc.segment1.page = 1 -#airport.SAN.dc.segment2.page = 2 -#airport.SAN.dc.segment3.page = 3 -#airport.SAN.dc.segment4.page = 4 -# -#airport.SAN.mc.uec.file = AirportModeChoice.SAN2016.xls -#airport.SAN.mc.data.page = 0 -#airport.SAN.mc.da.page = 1 -#airport.SAN.mc.s2.page = 2 -#airport.SAN.mc.s3.page = 3 -#airport.SAN.mc.transit.page = 4 -#airport.SAN.mc.accessMode.page = 5 -# -#airport.SAN.externalStationFile = uec/InternalExternalDestinationChoiceAlternatives.csv -# -#airport.SAN.results.autoTripMatrix = output/autoAirportTrips.SAN -#airport.SAN.results.nMotTripMatrix = output/nmotAirportTrips.SAN -#airport.SAN.results.tranTripMatrix = output/tranAirportTrips.SAN -#airport.SAN.results.othrTripMatrix = output/othrAirportTrips.SAN -# -###################################################################################### -## CBX Airport Model Settings -###################################################################################### -#airport.CBX.purpose.file = input/airport_purpose.CBX.csv -#airport.CBX.size.file = input/airport_party.CBX.csv -#airport.CBX.duration.file = input/airport_nights.CBX.csv -#airport.CBX.income.file = input/airport_income.CBX.csv -#airport.CBX.departureTime.file = input/airport_departure.CBX.csv -#airport.CBX.arrivalTime.file = input/airport_arrival.CBX.csv -airport.CBX.output.file = output/airport.CBX/final_trips.csv -RunModel.skipTransitConnector = true -RunModel.skipExternal = false,false,false -SavedFrom = Emme Modeller properties writer Process ID 51972 -SavedLast = Sep-07-2023 07:59:49 -active.coef.dwrongwy = ${active.coef.dwrongwy} -active.coef.dartne2 = ${active.coef.dartne2} -TNC.single.baseFare = ${TNC.single.baseFare} -TNC.shared.costMinimum = ${TNC.shared.costMinimum} -TNC.shared.costPerMinute = ${TNC.shared.costPerMinute} -active.micromobility.fixedCost = ${active.micromobility.fixedCost} -active.micromobility.variableCost = ${active.micromobility.variableCost} -airport.CBX.airportMgra = ${airport.CBX.airportMgra} -TNC.single.costPerMile = ${TNC.single.costPerMile} -active.ebike.ownership = ${active.ebike.ownership} -taxi.costPerMinute = ${taxi.costPerMinute} -airport.SAN.connecting = ${airport.SAN.connecting} -atdm.factor = ${atdm.factor} -active.maxdist.bike.taz = ${active.maxdist.bike.taz} -TNC.single.costMinimum = ${TNC.single.costMinimum} -airport.CBX.connecting = ${airport.CBX.connecting} -active.coef.distcla0 = ${active.coef.distcla0} -active.coef.distcla3 = ${active.coef.distcla3} -smartSignal.factor.MA = ${smartSignal.factor.MA} -airport.CBX.enplanements = ${airport.CBX.enplanements} -active.maxdist.bike.mgra = ${active.maxdist.bike.mgra} -TNC.single.costPerMinute = ${TNC.single.costPerMinute} -Mobility.AV.Share = ${Mobility.AV.Share} -active.bike.minutes.per.mile = ${active.bike.minutes.per.mile} -TNC.shared.costPerMile = ${TNC.shared.costPerMile} -smartSignal.factor.PA = ${smartSignal.factor.PA} -airport.SAN.airportMgra = ${airport.SAN.airportMgra} -active.coef.dbikblvd = ${active.coef.dbikblvd} -crossBorder.tours = ${crossBorder.tours} -tc.everyone.owns = ${tc.everyone.owns} -crossBorder.sentriShare = ${crossBorder.sentriShare} -active.coef.dcyctrac = ${active.coef.dcyctrac} -TNC.shared.baseFare = ${TNC.shared.baseFare} -airport.SAN.enplanements = ${airport.SAN.enplanements} -taxi.baseFare = ${taxi.baseFare} -active.coef.distcla1 = ${active.coef.distcla1} -active.microtransit.fixedCost = ${active.microtransit.fixedCost} -taxi.costPerMile = ${taxi.costPerMile} -active.coef.distcla2 = ${active.coef.distcla2} - -# -##################################################################################### -# Smart Signal Properties wsu 8/22/18 -##################################################################################### -smartSignal.factor.LC = ${smartSignal.factor.LC} -crossborder.dc.soa.alts.file = ${crossborder.dc.soa.alts.file} -visualizer.reference.path = ${visualizer.reference.path} -nmdc.uec.file = ${nmdc.uec.file} -airport.SAN.mc.uec.file = ${airport.SAN.mc.uec.file} -crossBorder.dc.uec.file = ${crossBorder.dc.uec.file} -crossBorder.tour.mc.uec.file = ${crossBorder.tour.mc.uec.file} -uwsl.dc.uec.file = ${uwsl.dc.uec.file} - -# -path.choice.uec.spreadsheet = %project.folder%/uec/BikeTripPathChoice.xls -path.choice.uec.model.sheet = 1 -path.choice.uec.data.sheet = 0 -path.choice.max.path.count = 200 -btpc.alts.file = bike_path_alts.csv -active.logsum.matrix.file.bike.taz = bikeTazLogsum.csv -active.logsum.matrix.file.bike.mgra = bikeMgraLogsum.csv -active.logsum.matrix.file.walk.mgra = walkMgraEquivMinutes.csv -#active.logsum.matrix.file.walk.mgratap = walkMgraTapEquivMinutes.csv - -active.bike.write.derived.network = true -active.bike.derived.network.edges = derivedBikeEdges.csv -active.bike.derived.network.nodes = derivedBikeNodes.csv -active.bike.derived.network.traversals = derivedBikeTraversals.csv - -active.assignment.file.bike = bikeAssignmentResults.csv -active.micromobility.file.walk.mgra = microMgraEquivMinutes.csv -#active.micromobility.file.walk.mgratap = microMgraTapEquivMinutes.csv - -AtTransitConsistency.xThreshold = 1.0 -AtTransitConsistency.yThreshold = 1.0 - -##################################################################################### -# Commercial Vehicle Model Settings -##################################################################################### -#scale factor to use in cvm trip generation. Also, used during demand import to factor-in demand accordingly -cvm.scale_factor = 1 -#scale factors by vehicle (light, medium, and heavy) and time of day (ea,am,md,pm,ev) - used to boost cvm demand -#light vehicles -cvm.scale_light = 1,2,3.5,2,1 -#medium vehicles -cvm.scale_medium = 1,1,1,1,1 -#heavy vehicles -cvm.scale_heavy = 1,1,1,1,1 -#cvm vehicle shares representing portions of the cvm vehicle trips that go to light-heavy trucks. -#share value should be between 0 and 1. 0 representing none will go to light-heavy truck and 1 means all will go. -cvm.share.light = 0.04 -cvm.share.medium = 0.64 -cvm.share.heavy = 0 - -# -################################################################# -# Active Transportation Model Settings -# updated 4/2/2014 wsu -################################################################# -active.node.file = %project.folder%/input/SANDAG_Bike_NODE.dbf -active.node.id = NodeLev_ID -active.node.fieldnames = mgra,taz,x,y,tap,signalized -active.node.columns = MGRA,TAZ,XCOORD,YCOORD,TAP,Signal -active.edge.file = %project.folder%/input/SANDAG_Bike_NET.dbf -active.edge.anode = A -active.edge.bnode = B -active.edge.directional = false -active.edge.fieldnames = functionalClass,distance,gain,bikeClass,lanes,cycleTrack,bikeBlvd,roadsegid -active.edge.columns.ab = Func_Class,Distance,AB_Gain,ABBikeClas,AB_Lanes,Bike2Sep,Bike3Blvd,ROADSEGID -active.edge.columns.ba = Func_Class,Distance,BA_Gain,BABikeClas,BA_Lanes,Bike2Sep,Bike3Blvd,ROADSEGID -active.edge.centroid.field = functionalClass -active.edge.centroid.value = 10 -active.edge.autospermitted.field = functionalClass -active.edge.autospermitted.values = 1,2,3,4,5,6,7 -# distance bins for control of path sampling -active.sample.distance.breaks = 99 -# minimum path sizes of alternative lists for each distance bin -active.sample.pathsizes = 2 -# minimum count of samples for each distance bin -active.sample.count.min = 10 -# maximum count of samples for each distance bin -active.sample.count.max = 100 -# scale of random cost for each sampling iteration where random cost = cost + scale * unif(0,1) * distance -active.sample.random.scale.coef = 0.5 -active.sample.random.scale.link = 0.7 -active.sample.random.seeded = true -active.sample.maxcost = 998 -active.maxdist.walk.mgra = 3.0 -active.maxdist.walk.tap = 1.0 -active.maxdist.micromobility.mgra = 3.0 -active.maxdist.micromobility.tap = 1.0 -active.maxdist.microtransit.mgra = 3.0 -active.maxdist.microtransit.tap = 3.0 -active.output.bike = %project.folder%/output/ -active.output.walk = %project.folder%/output/ -active.coef.nonscenic = 0.3 -active.coef.gain = 0.015 -active.coef.turn = 0.083 -active.coef.signals = 0.04 -active.coef.unlfrma = 0.36 -active.coef.unlfrmi = 0.15 -active.coef.untoma = 0.48 -active.coef.untomi = 0.1 -active.coef.gain.walk = 0.034 - -active.walk.minutes.per.mile = 20 - -active.micromobility.speed = 15 -active.micromobility.rentalTime = 1 -active.micromobility.constant = 60 -active.micromobility.vot = 15 - -micromobility.uec.file = MicromobilityChoice.xls -micromobility.data.page = 0 -micromobility.model.page = 1 - -active.microtransit.speed = 17 -active.microtransit.variableCost = 0.0 -active.microtransit.waitTime = 4.0 -active.microtransit.accessTime = 0.0 -active.microtransit.constant = 120 -active.microtransit.notAvailable = 999 - -active.microtransit.mgra.file = input/mobilityHubMGRAs.csv - -##################################################################################### -##################################################################################### -# Transit Tier 1 EMME Link Name zou 5/7/20 -##################################################################################### -transit.newMode = TIER 1 RAIL -transit.newMode.route = 581,582,583 From 12bf26a09c02cee777e0324ce2a58fcf56dece84 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Thu, 28 Mar 2024 16:39:23 -0700 Subject: [PATCH 42/43] Update hwyShapeExport.py Moved the order in which the IFC column was added because it was writing out a subset of the hwyTcad.csv file and hence necessary attributes were missing in later processes. --- src/main/python/hwyShapeExport.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/main/python/hwyShapeExport.py b/src/main/python/hwyShapeExport.py index 02fbfb4ea..910df3dcd 100644 --- a/src/main/python/hwyShapeExport.py +++ b/src/main/python/hwyShapeExport.py @@ -16,6 +16,13 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: Returns: A GeoPandas GeoDataFrame of the loaded highway network """ + + # temporary so that the sensitivity summary on data lake works + # the sensitivity summary on data lake uses IFC (from TCOV) rather than FC (from TNED) + hwy_tcad = pd.read_csv(os.path.join(scenario_path, "report", "hwyTcad.csv")) + hwy_tcad['IFC'] = hwy_tcad['FC'] + hwy_tcad.to_csv(os.path.join(scenario_path, "report", "hwyTcad.csv"), index=False) + # read in input highway network hwy_tcad = pd.read_csv(os.path.join(scenario_path, "report", "hwyTcad.csv"), usecols=["ID", # highway coverage id @@ -59,11 +66,6 @@ def export_highway_shape(scenario_path: str) -> geopandas.GeoDataFrame: "ABPRELOAD_EV", # preloaded bus flow - to-from - Evening "BAPRELOAD_EV", # preloaded bus flow - from-to - Evening "geometry"]) # WKT geometry - - # temporary so that the sensitivity summary on data lake works - # the sensitivity summary on data lake uses IFC (from TCOV) rather than FC (from TNED) - hwy_tcad['IFC'] = hwy_tcad['FC'] - hwy_tcad.to_csv(os.path.join(scenario_path, "report", "hwyTcad.csv"), index=False) # read in loaded highway network for each time period for tod in ["EA", "AM", "MD", "PM", "EV"]: From 3a339d0d054a4775b24870b2000fd98040642ee1 Mon Sep 17 00:00:00 2001 From: Cundo Arellano <51237056+cundo92@users.noreply.github.com> Date: Tue, 2 Apr 2024 18:17:04 -0700 Subject: [PATCH 43/43] Update import_network.py Import network updates to account for network coding that may result in zero lanes at specific times of day Co-Authored-By: Kevin Bragg <15834594+inrokevin@users.noreply.github.com> --- src/main/emme/toolbox/import/import_network.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/main/emme/toolbox/import/import_network.py b/src/main/emme/toolbox/import/import_network.py index 96c85e900..9819cd794 100644 --- a/src/main/emme/toolbox/import/import_network.py +++ b/src/main/emme/toolbox/import/import_network.py @@ -604,13 +604,18 @@ def create_road_base(self, network, attr_map): for field, (name, tcoved_type, emme_type, desc) in attr_map["LINK"].iteritems(): if tcoved_type in ("TWO_WAY", "HWY_TWO_WAY", "ONE_WAY", "HWY_ONE_WAY"): link_attr_map[field] = (name, tcoved_type.replace("HWY_", ""), emme_type, desc) - + + auto_mode = network.mode("d") + def define_modes(arc): - if arc["FC"] in [11, 12] or arc["ABLNA"] == 0: #or ((arc["HOV"] < 1 or arc["HOV"] > 4) and arc["FC"] != 10): - vehicle_index = int(arc["MINMODE"] / 100)*100 - aux_index = int(arc["MINMODE"] % 100) - return self._transit_mode_lookup[vehicle_index] | self._transit_mode_lookup[aux_index] - return [network.mode('d')] + vehicle_index = int(arc["MINMODE"] / 100)*100 + aux_index = int(arc["MINMODE"] % 100) + veh_modes = self._transit_mode_lookup.get(vehicle_index, set([])) + aux_modes = self._transit_mode_lookup.get(aux_index, set([])) + modes = veh_modes | aux_modes + if arc["FC"] not in [11, 12, 99] and arc["HOV"] != 0: + modes |= set([auto_mode]) + return modes self._create_base_net( hwy_data, network, mode_callback=define_modes, centroid_callback=is_centroid, link_attr_map=link_attr_map)