Skip to content

API Documentation

Controller

RunController - model operation controller.

Main interface to start a TM2PY model run. Provide one or more configuration files in .toml format (by convention a scenario.toml and a model.toml)

Typical usage example: from tm2py.controller import RunController controller = RunController( [“scenario.toml”, “model.toml”]) controller.run()

Or from the command-line: python <path>/tm2py/tm2py/controller.py –s scenario.toml –m model.toml

RunController

Main operational interface for model runs.

Provide one or more config files in TOML (*.toml) format, and a run directory. If the run directory is not provided the root directory of the first config_file is used.

Properties
Internal properties
Source code in tm2py\controller.py
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
class RunController:
    """Main operational interface for model runs.

    Provide one or more config files in TOML (*.toml) format, and a run directory.
    If the run directory is not provided the root directory of the first config_file is used.

    Properties:
        config: root Configuration object
        logger: logger object
        top_sheet: placeholder for top sheet functionality (not implemented yet)
        trace: placeholder for trace functionality (not implemented yet)
        run_dir: root run directory for the model run
        iteration: current running (or last started) iteration
        component: current running (or last started) Component object
        emme_manager: EmmeManager object for centralized Emme-related (highway and
            transit assignments and skims) utilities.
        complete_components: list of components which have completed, tuple of
            (iteration, name, Component object)

    Internal properties:
        _emme_manager: EmmeManager object, cached on first access
        _iteration: current iteration
        _component: current running / last run Component
        _component_name: name of the current / last run component
        _queued_components: list of iteration, name, Component
    """

    def __init__(
        self,
        config_file: Union[Collection[Union[str, Path]], str, Path] = None,
        run_dir: Union[Path, str] = None,
        run_components: Collection[str] = component_cls_map.keys(),
    ):
        """Constructor for RunController class.

        Args:
            config_file: Single or list of config file locations as strings or Path objects.
                Defaults to None.
            run_dir: Model run directory as a Path object or string. If not provided, defaults
                to the directory of the first config_file.
            run_components: List of component names to run. Defaults to all components.
        """
        if run_dir is None:
            run_dir = Path(os.path.abspath(os.path.dirname(config_file[0])))

        self._run_dir = Path(run_dir)

        self.config = Configuration.load_toml(config_file)
        self.has_emme: bool = emme_context()
        # NOTE: Logger opens log file on __enter__ (in run), not ready for logging yet
        # Logger uses self.config.logging
        self.logger = Logger(self)
        self.top_sheet = None
        self.trace = None
        self.completed_components = []

        self._validated_components = set()
        self._emme_manager = None
        self._iteration = None
        self._component = None
        self._component_name = None
        self._queued_components = deque()

        # mapping from defined names referenced in config to Component objects
        self._component_map = {
            k: v(self) for k, v in component_cls_map.items() if k in run_components
        }

        self._queue_components(run_components=run_components)

    def __repr__(self):
        """Legible representation."""
        _str = f"""RunController
            Run Directory: {self.run_dir}
            Iteration: {self.iteration} of {self.run_iterations}
            Component: {self.component_name}
            Completed: {self.completed_components}
            Queued: {self._queued_components}"""
        return _str

    @property
    def run_dir(self) -> Path:
        """The root run directory of the model run."""
        return self._run_dir

    @property
    def run_iterations(self) -> List[int]:
        """List of iterations for this model run."""
        return range(
            max(1, self.config.run.start_iteration), self.config.run.end_iteration + 1
        )

    @property
    def time_period_names(self) -> List[str]:
        """Return input time_period name or names and return list of time_period names.

        Implemented here for easy access for all components.

        Returns: list of uppercased string names of time periods
        """
        return [time.name.upper() for time in self.config.time_periods]

    @property
    def time_period_durations(self) -> dict:
        """Return mapping of time periods to durations in hours."""
        return dict((p.name, p.length_hours) for p in self.config.time_periods)

    @property
    def congested_transit_assn_max_iteration(self) -> dict:
        """Return mapping of time periods to max iteration in congested transit assignment."""
        return dict(
            (p.name, p.congested_transit_assn_max_iteration)
            for p in self.config.time_periods
        )

    @property
    def num_processors(self) -> int:
        return self.emme_manager.num_processors

    @property
    def iteration(self) -> int:
        """Current iteration of model run."""
        return self._iteration

    @property
    def component_name(self) -> str:
        """Name of current component of model run."""
        return self._component_name

    @property
    def iter_component(self) -> Tuple[int, str]:
        """Tuple of the current iteration and component name."""
        return self._iteration, self._component_name

    def component(self) -> Component:
        """Current component of model."""
        return self._component

    @property
    def emme_manager(self) -> EmmeManager:
        """Cached Emme Manager object."""
        if self._emme_manager is None:
            if self.has_emme:
                self._emme_manager = EmmeManager(self, self.config.emme)
            else:
                self.logger.log("Emme not found, skipping Emme-related components")
                # TODO: All of the Emme-related components need to be handled "in place" rather
                # than skippping using a Mock
                from unittest.mock import MagicMock

                self._emme_manager = MagicMock()
        return self._emme_manager

    def get_abs_path(self, rel_path: Union[Path, str]) -> Path:
        """Get the absolute path from the root run directory given a relative path."""
        if not isinstance(rel_path, Path):
            rel_path = Path(rel_path)
        return Path(os.path.join(self.run_dir, rel_path))

    def run(self):
        """Main interface to run model.

        Iterates through the self._queued_components and runs them.
        """
        self._iteration = None
        while self._queued_components:
            self.run_next()

    def run_next(self):
        """Run next component in the queue."""
        if not self._queued_components:
            raise ValueError("No components in queue")
        iteration, name, component = self._queued_components.popleft()
        if self._iteration != iteration:
            self.logger.log(f"Start iteration {iteration}")
        self._iteration = iteration

        # check wamrstart files exist
        if iteration == 0:
            if self.config.warmstart.warmstart:
                if self.config.warmstart.use_warmstart_demand:
                    for source in [
                        "household",
                        "truck",
                        "air_passenger",
                        "internal_external",
                    ]:
                        highway_demand_file = self.get_abs_path(
                            self.config[source].highway_demand_file
                        ).__str__()
                        for time in self.config["time_periods"]:
                            path = highway_demand_file.format(
                                period=time.name, iter=iteration
                            )
                            assert os.path.isfile(
                                path
                            ), f"{path} required as warmstart demand does not exist"
                elif self.config.warmstart.use_warmstart_skim:
                    highway_skim_file = self.get_abs_path(
                        self.config["highway"].output_skim_path
                        / self.config["highway"].output_skim_filename_tmpl
                    ).__str__()
                    for time in self.config["time_periods"]:
                        path = highway_skim_file.format(time_period=time.name)
                        assert os.path.isfile(
                            path
                        ), f"{path} required as warmstart skim does not exist"
                    transit_skim_file = self.get_abs_path(
                        self.config["transit"].output_skim_path
                        / self.config["transit"].output_skim_filename_tmpl
                    ).__str__()
                    for time in self.config["time_periods"]:
                        for tclass in self.config["transit"]["classes"]:
                            path = transit_skim_file.format(
                                time_period=time.name, tclass=tclass.name
                            )
                            assert os.path.isfile(
                                path
                            ), f"{path} required as warmstart skim does not exist"

        self._component = component
        component.run()
        self.completed_components.append((iteration, name, component))

    def _queue_components(self, run_components: Collection[str] = None):
        """Add components per iteration to queue according to input Config.

        Args:
            run_components: if provided, only run these components
        """
        try:
            assert not self._queued_components
        except AssertionError:
            "Components already queued, returning without re-queuing."
            return

        _initial_components = self.config.run.initial_components
        _global_iter_components = self.config.run.global_iteration_components
        _final_components = self.config.run.final_components

        if run_components is not None:
            _initial_components = [
                c for c in _initial_components if c in run_components
            ]
            _global_iter_components = [
                c for c in _global_iter_components if c in run_components
            ]
            _final_components = [c for c in _final_components if c in run_components]

        if self.config.run.start_iteration == 0:
            if self.config.warmstart.warmstart:
                if self.config.warmstart.use_warmstart_skim:
                    if "highway" in _initial_components:
                        _initial_components.remove("highway")
                    if "transit_assign" in _initial_components:
                        _initial_components.remove("transit_assign")
                    if "transit_skim" in _initial_components:
                        _initial_components.remove("transit_skim")
            for _c_name in _initial_components:
                self._add_component_to_queue(0, _c_name)

        # Queue components which are run for each iteration

        _iteration_x_components = itertools.product(
            self.run_iterations, _global_iter_components
        )

        for _iteration, _c_name in _iteration_x_components:
            self._add_component_to_queue(_iteration, _c_name)

        # Queue components which are run after final iteration
        _finalizer_iteration = self.config.run.end_iteration + 1

        for c_name in _final_components:
            self._add_component_to_queue(_finalizer_iteration, _c_name)

        # If start_component specified, remove things before its first occurance
        if self.config.run.start_component:
            _queued_c_names = [c.name for c in self._queued_components]
            if self.config.run.start_component not in _queued_c_names:
                raise ValueError(
                    f"Start component {self.config.run.start_component} not found in queued \
                    components {_queued_c_names}"
                )
            _start_c_index = _queued_c_names.index(self.config.run.start_component)
            self._queued_components = self._queued_components[_start_c_index:]

        print("RUN COMPOMENTS:")
        for _queued_component in self._queued_components:
            print(f"Global iteration {_queued_component[0]}, {_queued_component[1]}")

    def _add_component_to_queue(self, iteration: int, component_name: str):
        """Add component to queue (self._queued_components), first validating its inputs.

        Args:
            iteration (int): iteration to add component to.
            component_name (Component): Component to add to queue.
        """
        _component = self._component_map[component_name]
        if component_name not in self._validated_components:
            _component.validate_inputs()
            self._validated_components.add(component_name)
        self._queued_components.append((iteration, component_name, _component))

component_name property

Name of current component of model run.

congested_transit_assn_max_iteration property

Return mapping of time periods to max iteration in congested transit assignment.

emme_manager property

Cached Emme Manager object.

iter_component property

Tuple of the current iteration and component name.

iteration property

Current iteration of model run.

run_dir property

The root run directory of the model run.

run_iterations property

List of iterations for this model run.

time_period_durations property

Return mapping of time periods to durations in hours.

time_period_names property

Return input time_period name or names and return list of time_period names.

Implemented here for easy access for all components.

Returns: list of uppercased string names of time periods

__init__(config_file=None, run_dir=None, run_components=component_cls_map.keys())

Constructor for RunController class.

Parameters:

Name Type Description Default
config_file Union[Collection[Union[str, Path]], str, Path]

Single or list of config file locations as strings or Path objects. Defaults to None.

None
run_dir Union[Path, str]

Model run directory as a Path object or string. If not provided, defaults to the directory of the first config_file.

None
run_components Collection[str]

List of component names to run. Defaults to all components.

keys()
Source code in tm2py\controller.py
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
def __init__(
    self,
    config_file: Union[Collection[Union[str, Path]], str, Path] = None,
    run_dir: Union[Path, str] = None,
    run_components: Collection[str] = component_cls_map.keys(),
):
    """Constructor for RunController class.

    Args:
        config_file: Single or list of config file locations as strings or Path objects.
            Defaults to None.
        run_dir: Model run directory as a Path object or string. If not provided, defaults
            to the directory of the first config_file.
        run_components: List of component names to run. Defaults to all components.
    """
    if run_dir is None:
        run_dir = Path(os.path.abspath(os.path.dirname(config_file[0])))

    self._run_dir = Path(run_dir)

    self.config = Configuration.load_toml(config_file)
    self.has_emme: bool = emme_context()
    # NOTE: Logger opens log file on __enter__ (in run), not ready for logging yet
    # Logger uses self.config.logging
    self.logger = Logger(self)
    self.top_sheet = None
    self.trace = None
    self.completed_components = []

    self._validated_components = set()
    self._emme_manager = None
    self._iteration = None
    self._component = None
    self._component_name = None
    self._queued_components = deque()

    # mapping from defined names referenced in config to Component objects
    self._component_map = {
        k: v(self) for k, v in component_cls_map.items() if k in run_components
    }

    self._queue_components(run_components=run_components)

__repr__()

Legible representation.

Source code in tm2py\controller.py
138
139
140
141
142
143
144
145
146
def __repr__(self):
    """Legible representation."""
    _str = f"""RunController
        Run Directory: {self.run_dir}
        Iteration: {self.iteration} of {self.run_iterations}
        Component: {self.component_name}
        Completed: {self.completed_components}
        Queued: {self._queued_components}"""
    return _str

component()

Current component of model.

Source code in tm2py\controller.py
202
203
204
def component(self) -> Component:
    """Current component of model."""
    return self._component

get_abs_path(rel_path)

Get the absolute path from the root run directory given a relative path.

Source code in tm2py\controller.py
221
222
223
224
225
def get_abs_path(self, rel_path: Union[Path, str]) -> Path:
    """Get the absolute path from the root run directory given a relative path."""
    if not isinstance(rel_path, Path):
        rel_path = Path(rel_path)
    return Path(os.path.join(self.run_dir, rel_path))

run()

Main interface to run model.

Iterates through the self._queued_components and runs them.

Source code in tm2py\controller.py
227
228
229
230
231
232
233
234
def run(self):
    """Main interface to run model.

    Iterates through the self._queued_components and runs them.
    """
    self._iteration = None
    while self._queued_components:
        self.run_next()

run_next()

Run next component in the queue.

Source code in tm2py\controller.py
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
def run_next(self):
    """Run next component in the queue."""
    if not self._queued_components:
        raise ValueError("No components in queue")
    iteration, name, component = self._queued_components.popleft()
    if self._iteration != iteration:
        self.logger.log(f"Start iteration {iteration}")
    self._iteration = iteration

    # check wamrstart files exist
    if iteration == 0:
        if self.config.warmstart.warmstart:
            if self.config.warmstart.use_warmstart_demand:
                for source in [
                    "household",
                    "truck",
                    "air_passenger",
                    "internal_external",
                ]:
                    highway_demand_file = self.get_abs_path(
                        self.config[source].highway_demand_file
                    ).__str__()
                    for time in self.config["time_periods"]:
                        path = highway_demand_file.format(
                            period=time.name, iter=iteration
                        )
                        assert os.path.isfile(
                            path
                        ), f"{path} required as warmstart demand does not exist"
            elif self.config.warmstart.use_warmstart_skim:
                highway_skim_file = self.get_abs_path(
                    self.config["highway"].output_skim_path
                    / self.config["highway"].output_skim_filename_tmpl
                ).__str__()
                for time in self.config["time_periods"]:
                    path = highway_skim_file.format(time_period=time.name)
                    assert os.path.isfile(
                        path
                    ), f"{path} required as warmstart skim does not exist"
                transit_skim_file = self.get_abs_path(
                    self.config["transit"].output_skim_path
                    / self.config["transit"].output_skim_filename_tmpl
                ).__str__()
                for time in self.config["time_periods"]:
                    for tclass in self.config["transit"]["classes"]:
                        path = transit_skim_file.format(
                            time_period=time.name, tclass=tclass.name
                        )
                        assert os.path.isfile(
                            path
                        ), f"{path} required as warmstart skim does not exist"

    self._component = component
    component.run()
    self.completed_components.append((iteration, name, component))

Configuration

Config implementation and schema.

ActiveModeShortestPathSkimConfig

Bases: ConfigItem

Active mode skim entry.

Source code in tm2py\config.py
658
659
660
661
662
663
664
665
666
@dataclass(frozen=True)
class ActiveModeShortestPathSkimConfig(ConfigItem):
    """Active mode skim entry."""

    mode: str
    roots: str
    leaves: str
    output: str
    max_dist_miles: float = None

ActiveModesConfig

Bases: ConfigItem

Active Mode skim parameters.

Source code in tm2py\config.py
669
670
671
672
673
674
@dataclass(frozen=True)
class ActiveModesConfig(ConfigItem):
    """Active Mode skim parameters."""

    emme_scenario_id: int
    shortest_path_skims: Tuple[ActiveModeShortestPathSkimConfig, ...]

AirPassengerConfig

Bases: ConfigItem

Air passenger model parameters.

Properties

highway_demand_file: output OMX file input_demand_folder: location to find the input demand csvs input_demand_filename_tmpl: filename template for input demand. Should have {year}, {direction} and {airport} variables and end in ‘.csv’ reference_start_year: base start year for input demand tables used to calculate the linear interpolation, as well as in the file name template {year}{direction}{airport}.csv reference_end_year: end year for input demand tables used to calculate the linear interpolation, as well as in the file name template {year}{direction}{airport}.csv airport_names: list of one or more airport names / codes as used in the input file names demand_aggregation: specification of aggregation of by-access mode demand to highway class demand

Source code in tm2py\config.py
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
@dataclass(frozen=True)
class AirPassengerConfig(ConfigItem):
    """Air passenger model parameters.

    Properties

    highway_demand_file: output OMX file
    input_demand_folder: location to find the input demand csvs
    input_demand_filename_tmpl: filename template for input demand. Should have
        {year}, {direction} and {airport} variables and end in '.csv'
    reference_start_year: base start year for input demand tables
        used to calculate the linear interpolation, as well as
        in the file name template {year}_{direction}{airport}.csv
    reference_end_year: end year for input demand tables
        used to calculate the linear interpolation, as well as
        in the file name template {year}_{direction}{airport}.csv
    airport_names: list of one or more airport names / codes as used in
        the input file names
    demand_aggregation: specification of aggregation of by-access mode
        demand to highway class demand
    """

    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    input_demand_folder: pathlib.Path
    input_demand_filename_tmpl: str
    highway_demand_file: str
    reference_start_year: str
    reference_end_year: str
    airport_names: List[str]
    demand_aggregation: List[AirPassengerDemandAggregationConfig]

    @validator("input_demand_filename_tmpl")
    def valid_input_demand_filename_tmpl(cls, value):
        """Validate skim matrix template has correct {}."""

        assert (
            "{year}" in value
        ), "-> 'output_skim_matrixname_tmpl must have {year}, found {value}."
        assert (
            "{direction}" in value
        ), "-> 'output_skim_matrixname_tmpl must have {direction}, found {value}."
        assert (
            "{airport}" in value
        ), "-> 'output_skim_matrixname_tmpl must have {airport}, found {value}."
        return value

valid_input_demand_filename_tmpl(value)

Validate skim matrix template has correct {}.

Source code in tm2py\config.py
391
392
393
394
395
396
397
398
399
400
401
402
403
404
@validator("input_demand_filename_tmpl")
def valid_input_demand_filename_tmpl(cls, value):
    """Validate skim matrix template has correct {}."""

    assert (
        "{year}" in value
    ), "-> 'output_skim_matrixname_tmpl must have {year}, found {value}."
    assert (
        "{direction}" in value
    ), "-> 'output_skim_matrixname_tmpl must have {direction}, found {value}."
    assert (
        "{airport}" in value
    ), "-> 'output_skim_matrixname_tmpl must have {airport}, found {value}."
    return value

AirPassengerDemandAggregationConfig

Bases: ConfigItem

Air passenger demand aggregation input parameters.

Properties
Source code in tm2py\config.py
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
@dataclass(frozen=True)
class AirPassengerDemandAggregationConfig(ConfigItem):
    """Air passenger demand aggregation input parameters.

    Properties:
        name: (src_group_name) name used for the class group in the input columns
            for the trip tables,
        mode: (result_class_name) name used in the output OMX matrix names, note
            that this should match the expected naming convention in the
            HighwayClassDemandConfig name(s)
        access_modes: list of names used for the access modes in the input
            columns for the trip tables
    """

    name: str
    mode: str
    access_modes: Tuple[str, ...]

AssignmentStoppingCriteriaConfig

Bases: ConfigItem

Assignment stop configuration parameters.

Source code in tm2py\config.py
1254
1255
1256
1257
1258
1259
@dataclass(frozen=True)
class AssignmentStoppingCriteriaConfig(ConfigItem):
    "Assignment stop configuration parameters."
    max_iterations: int
    relative_difference: float
    percent_segments_over_capacity: float

CcrWeightsConfig

Bases: ConfigItem

Weights for CCR Configuration.

Source code in tm2py\config.py
1262
1263
1264
1265
1266
1267
1268
1269
1270
@dataclass(frozen=True)
class CcrWeightsConfig(ConfigItem):
    "Weights for CCR Configuration."
    min_seat: float = Field(default=1.0)
    max_seat: float = Field(default=1.4)
    power_seat: float = Field(default=2.2)
    min_stand: float = Field(default=1.4)
    max_stand: float = Field(default=1.6)
    power_stand: float = Field(default=3.4)

ChoiceClassConfig

Bases: ConfigItem

Choice class parameters.

Properties

The end value in the utility equation for class c and property p is:

utility[p].coeff * classes[c].property_factor[p] * sum(skim(classes[c].skim_mode,skim_p) for skim_p in property_to_skim[p])

Source code in tm2py\config.py
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
@dataclass(frozen=True)
class ChoiceClassConfig(ConfigItem):
    """Choice class parameters.

    Properties:
        property_to_skim_toll: Maps a property in the utility equation with a list of skim
            properties. If more than one skim property is listed, they will be summed together
            (e.g. cost if the sum of bridge toll and value toll). This defaults to a value in the
            code.
        property_to_skim_notoll: Maps a property in the utility equation with a list of skim
            properties for no toll choice.If more than one skim property is listed, they will
            be summed together  (e.g. cost if the sum of bridge toll and value toll). This
            defaults to a value in the code.
        property_factors: This will scale the property for this class. e.g. a shared ride cost
            could be applied a factor assuming that the cost is shared among individuals.

    The end value in the utility equation for class c and property p is:

       utility[p].coeff *
       classes[c].property_factor[p] *
       sum(skim(classes[c].skim_mode,skim_p) for skim_p in property_to_skim[p])
    """

    name: str
    skim_mode: Optional[str] = Field(default="da")
    veh_group_name: Optional[str] = Field(default="")
    property_factors: Optional[List[CoefficientConfig]] = Field(default=None)

ClassDemandConfig

Bases: ConfigItem

Input source for demand for highway or transit assignment class.

Used to specify where to find related demand file for this highway or transit class.

Properties
Source code in tm2py\config.py
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
@dataclass(frozen=True)
class ClassDemandConfig(ConfigItem):
    """Input source for demand for highway or transit assignment class.

    Used to specify where to find related demand file for this
    highway or transit class.

    Properties:
        source: reference name of the component section for the
                source "highway_demand_file" (for a highway class)
                or "transit_demand_file" (for a transit class), one of:
                "household", "air_passenger", "internal_external", "truck"
        name: name of matrix in the OMX file, can include "{period}"
                placeholder
        factor: optional, multiplicative factor to generate PCEs from
                trucks or convert person-trips to vehicle-trips for HOVs
    """

    name: str = Field()
    source: str = Literal["household", "air_passenger", "internal_external", "truck"]
    factor: float = Field(default=1.0, gt=0)

CoefficientConfig

Bases: ConfigItem

Coefficient and properties to be used in utility or regression.

Source code in tm2py\config.py
441
442
443
444
445
446
@dataclass(frozen=True)
class CoefficientConfig(ConfigItem):
    """Coefficient and properties to be used in utility or regression."""

    property: str
    coeff: Optional[float] = Field(default=None)

ConfigItem

Bases: ABC

Base class to add partial dict-like interface to tm2py model configuration.

Allow use of .items() [“X”] and .get(“X”) .to_dict() from configuration.

Not to be constructed directly. To be used a mixin for dataclasses representing config schema. Do not use “get” “to_dict”, or “items” for key names.

Source code in tm2py\config.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
class ConfigItem(ABC):
    """Base class to add partial dict-like interface to tm2py model configuration.

    Allow use of .items() ["X"] and .get("X") .to_dict() from configuration.

    Not to be constructed directly. To be used a mixin for dataclasses
    representing config schema.
    Do not use "get" "to_dict", or "items" for key names.
    """

    def __getitem__(self, key):
        """Get item for config. D[key] -> D[key] if key in D, else raise KeyError."""
        return getattr(self, key)

    def items(self):
        """The sub-config objects in config."""
        return self.__dict__.items()

    def get(self, key, default=None):
        """Return the value for key if key is in the dictionary, else default."""
        return self.__dict__.get(key, default)

__getitem__(key)

Get item for config. D[key] -> D[key] if key in D, else raise KeyError.

Source code in tm2py\config.py
26
27
28
def __getitem__(self, key):
    """Get item for config. D[key] -> D[key] if key in D, else raise KeyError."""
    return getattr(self, key)

get(key, default=None)

Return the value for key if key is in the dictionary, else default.

Source code in tm2py\config.py
34
35
36
def get(self, key, default=None):
    """Return the value for key if key is in the dictionary, else default."""
    return self.__dict__.get(key, default)

items()

The sub-config objects in config.

Source code in tm2py\config.py
30
31
32
def items(self):
    """The sub-config objects in config."""
    return self.__dict__.items()

Configuration

Bases: ConfigItem

Source code in tm2py\config.py
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
@dataclass(frozen=True)
class Configuration(ConfigItem):
    """Configuration: root of the model configuration."""

    scenario: ScenarioConfig
    run: RunConfig
    warmstart: WarmStartConfig
    time_periods: Tuple[TimePeriodConfig, ...]
    household: HouseholdConfig
    air_passenger: AirPassengerConfig
    internal_external: InternalExternalConfig
    truck: TruckConfig
    active_modes: ActiveModesConfig
    highway: HighwayConfig
    transit: TransitConfig
    emme: EmmeConfig
    logging: Optional[LoggingConfig] = Field(default_factory=LoggingConfig)

    @classmethod
    def load_toml(
        cls,
        toml_path: Union[List[Union[str, pathlib.Path]], str, pathlib.Path],
    ) -> "Configuration":
        """Load configuration from .toml files(s).

        Normally the config is split into a scenario_config.toml file and a
        model_config.toml file.

        Args:
            toml_path: a valid system path string or Path object to a TOML format config file or
                list of paths of path objects to a set of TOML files.

        Returns:
            A Configuration object
        """
        if not isinstance(toml_path, List):
            toml_path = [toml_path]
        toml_path = list(map(pathlib.Path, toml_path))

        data = _load_toml(toml_path[0])
        for path_item in toml_path[1:]:
            _merge_dicts(data, _load_toml(path_item))
        return cls(**data)

    @validator("highway")
    def maz_skim_period_exists(cls, value, values):
        """Validate highway.maz_to_maz.skim_period refers to a valid period."""
        if "time_periods" in values:
            time_period_names = set(time.name for time in values["time_periods"])
            assert (
                value.maz_to_maz.skim_period in time_period_names
            ), "maz_to_maz -> skim_period -> name not found in time_periods list"
        return value

    @validator("highway", always=True)
    def relative_gap_length(cls, value, values):
        """Validate highway.relative_gaps is a list of length greater or equal to global iterations."""
        if "run" in values:
            assert len(value.relative_gaps) >= (
                values["run"]["end_iteration"] + 1
            ), f"'highway.relative_gaps must be the same or greater length as end_iteration+1,\
                that includes global iteration 0 to {values['run']['end_iteration']}'"
        return value

    @validator("transit", always=True)
    def transit_stop_criteria_length(cls, value, values):
        """Validate transit.congested.stop_criteria is a list of length greater or equal to global iterations."""
        if ("run" in values) & (value.congested_transit_assignment):
            assert len(value.congested.stop_criteria) >= (
                values["run"]["end_iteration"]
            ), f"'transit.stop_criteria must be the same or greater length as end_iteration,\
                that includes global iteration 1 to {values['run']['end_iteration']}'"
        return value

load_toml(toml_path) classmethod

Load configuration from .toml files(s).

Normally the config is split into a scenario_config.toml file and a model_config.toml file.

Parameters:

Name Type Description Default
toml_path Union[List[Union[str, Path]], str, Path]

a valid system path string or Path object to a TOML format config file or list of paths of path objects to a set of TOML files.

required

Returns:

Type Description
Configuration

A Configuration object

Source code in tm2py\config.py
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
@classmethod
def load_toml(
    cls,
    toml_path: Union[List[Union[str, pathlib.Path]], str, pathlib.Path],
) -> "Configuration":
    """Load configuration from .toml files(s).

    Normally the config is split into a scenario_config.toml file and a
    model_config.toml file.

    Args:
        toml_path: a valid system path string or Path object to a TOML format config file or
            list of paths of path objects to a set of TOML files.

    Returns:
        A Configuration object
    """
    if not isinstance(toml_path, List):
        toml_path = [toml_path]
    toml_path = list(map(pathlib.Path, toml_path))

    data = _load_toml(toml_path[0])
    for path_item in toml_path[1:]:
        _merge_dicts(data, _load_toml(path_item))
    return cls(**data)

maz_skim_period_exists(value, values)

Validate highway.maz_to_maz.skim_period refers to a valid period.

Source code in tm2py\config.py
1469
1470
1471
1472
1473
1474
1475
1476
1477
@validator("highway")
def maz_skim_period_exists(cls, value, values):
    """Validate highway.maz_to_maz.skim_period refers to a valid period."""
    if "time_periods" in values:
        time_period_names = set(time.name for time in values["time_periods"])
        assert (
            value.maz_to_maz.skim_period in time_period_names
        ), "maz_to_maz -> skim_period -> name not found in time_periods list"
    return value

relative_gap_length(value, values)

Validate highway.relative_gaps is a list of length greater or equal to global iterations.

Source code in tm2py\config.py
1479
1480
1481
1482
1483
1484
1485
1486
1487
@validator("highway", always=True)
def relative_gap_length(cls, value, values):
    """Validate highway.relative_gaps is a list of length greater or equal to global iterations."""
    if "run" in values:
        assert len(value.relative_gaps) >= (
            values["run"]["end_iteration"] + 1
        ), f"'highway.relative_gaps must be the same or greater length as end_iteration+1,\
            that includes global iteration 0 to {values['run']['end_iteration']}'"
    return value

transit_stop_criteria_length(value, values)

Validate transit.congested.stop_criteria is a list of length greater or equal to global iterations.

Source code in tm2py\config.py
1489
1490
1491
1492
1493
1494
1495
1496
1497
@validator("transit", always=True)
def transit_stop_criteria_length(cls, value, values):
    """Validate transit.congested.stop_criteria is a list of length greater or equal to global iterations."""
    if ("run" in values) & (value.congested_transit_assignment):
        assert len(value.congested.stop_criteria) >= (
            values["run"]["end_iteration"]
        ), f"'transit.stop_criteria must be the same or greater length as end_iteration,\
            that includes global iteration 1 to {values['run']['end_iteration']}'"
    return value

CongestedAssnConfig

Bases: ConfigItem

Congested transit assignment Configuration.

Source code in tm2py\config.py
1328
1329
1330
1331
1332
1333
1334
1335
1336
@dataclass(frozen=True)
class CongestedAssnConfig(ConfigItem):
    "Congested transit assignment Configuration."
    trim_demand_before_congested_transit_assignment: bool = False
    output_trimmed_demand_report_path: str = Field(default=None)
    stop_criteria: Tuple[CongestedTransitStopCriteria, ...] = Field()
    use_peaking_factor: bool = False
    am_peaking_factor: float = Field(default=1.219)
    pm_peaking_factor: float = Field(default=1.262)

CongestedTransitMaxIteration

Bases: ConfigItem

Congested transit assignment time period specific max iteration parameters.

Properties
Source code in tm2py\config.py
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
@dataclass(frozen=True)
class CongestedTransitMaxIteration(ConfigItem):
    """Congested transit assignment time period specific max iteration parameters.

    Properties:
        time_period: time period string
        max_iteration: max iteration specific to time period. In the design of tm2py,
            congested assignment is run only for AM and PM. For EA, MD, and EV, we run
            extended assignment. See code here: tm2py/components/network/transit/transit_assign.py#L465-L466
            Therefore, `max_iteration` here does not impact EA, MD, and EV, this setting
            is only meaningful for AM and PM.
    """

    time_period: str = Field(max_length=4)
    max_iteration: int = Field(ge=1, default=1)

CongestedTransitStopCriteria

Bases: ConfigItem

Congested transit assignment stopping criteria parameters.

Properties
Source code in tm2py\config.py
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
@dataclass(frozen=True)
class CongestedTransitStopCriteria(ConfigItem):
    """Congested transit assignment stopping criteria parameters.

    Properties:
        global_iteration: global iteration number
        normalized_gap: normalized_gap
        relative_gaps: relative gap
        max_iterations: max iterations config, one for each time period
    """

    global_iteration: int = Field(ge=0)
    normalized_gap: float = Field(gt=0)
    relative_gap: float = Field(gt=0)
    max_iterations: Tuple[CongestedTransitMaxIteration, ...] = Field()

CongestedWeightsConfig

Bases: ConfigItem

Weights for Congested Transit Assignment Configuration.

Source code in tm2py\config.py
1273
1274
1275
1276
1277
1278
1279
1280
1281
@dataclass(frozen=True)
class CongestedWeightsConfig(ConfigItem):
    "Weights for Congested Transit Assignment Configuration."
    min_seat: float = Field(default=1.0)
    max_seat: float = Field(default=1.4)
    power_seat: float = Field(default=2.2)
    min_stand: float = Field(default=1.4)
    max_stand: float = Field(default=1.6)
    power_stand: float = Field(default=3.4)

DemandCountyGroupConfig

Bases: ConfigItem

Grouping of counties for assignment and demand files.

Properties
Source code in tm2py\config.py
867
868
869
870
871
872
873
874
875
876
877
@dataclass(frozen=True)
class DemandCountyGroupConfig(ConfigItem):
    """Grouping of counties for assignment and demand files.

    Properties:
        number: id number for this group, must be unique
        counties: list of one or more county names
    """

    number: int = Field()
    counties: Tuple[COUNTY_NAMES, ...] = Field()

EawtWeightsConfig

Bases: ConfigItem

Weights for calculating extra added wait time Configuration.

Source code in tm2py\config.py
1284
1285
1286
1287
1288
1289
1290
1291
@dataclass(frozen=True)
class EawtWeightsConfig(ConfigItem):
    "Weights for calculating extra added wait time Configuration."
    constant: float = Field(default=0.259625)
    weight_inverse_headway: float = Field(default=1.612019)
    vcr: float = Field(default=0.005274)
    exit_proportion: float = Field(default=0.591765)
    default_eawt_factor: float = Field(default=1)

EmmeConfig

Bases: ConfigItem

Emme-specific parameters.

Properties
Source code in tm2py\config.py
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
@dataclass(frozen=True)
class EmmeConfig(ConfigItem):
    """Emme-specific parameters.

    Properties:
        all_day_scenario_id: scenario ID to use for all day
            (initial imported) scenario with all time period data
        project_path: relative path from run_dir to Emme desktop project (.emp)
        highway_database_path: relative path to highway Emmebank
        active_north_database_path:  relative paths to active mode Emmebank for north bay
        active_south_database_path:  relative paths to active mode Emmebank for south bay
        transit_database_path: relative path to transit Emmebank
        num_processors: the number of processors to use in Emme procedures,
            either as an integer, or value MAX, MAX-N. Typically recommend
            using MAX-1 (on desktop systems) or MAX-2 (on servers with many
            logical processors) to leave capacity for background / other tasks.
    """

    all_day_scenario_id: int
    project_path: pathlib.Path
    highway_database_path: pathlib.Path
    active_north_database_path: pathlib.Path
    active_south_database_path: pathlib.Path
    transit_database_path: pathlib.Path
    num_processors: str = Field(pattern=r"^MAX$|^MAX-\d+$|^\d+$")

HighwayCapClassConfig

Bases: ConfigItem

Highway link capacity and speed (‘capclass’) index entry.

Properties
Source code in tm2py\config.py
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
@dataclass(frozen=True)
class HighwayCapClassConfig(ConfigItem):
    """Highway link capacity and speed ('capclass') index entry.

    Properties:
        capclass: cross index for link @capclass lookup
        capacity: value for link capacity, PCE / hour
        free_flow_speed: value for link free flow speed, miles / hour
        critical_speed: value for critical speed (Ja) used in Akcelik
            type functions
    """

    capclass: int = Field(ge=0)
    capacity: float = Field(ge=0)
    free_flow_speed: float = Field(ge=0)
    critical_speed: float = Field(ge=0)

HighwayClassConfig

Bases: ConfigItem

Highway assignment class definition.

Note that excluded_links, skims and toll attribute names include vehicle groups (“{vehicle}”) which reference the list of highway.toll.dst_vehicle_group_names (see HighwayTollsConfig). The default example model config uses: “da”, “sr2”, “sr3”, “vsm”, sml”, “med”, “lrg”

Example single class config

name = “da” description= “drive alone” mode_code= “d” [[highway.classes.demand]] source = “household” name = “SOV_GP_{period}” [[highway.classes.demand]] source = “air_passenger” name = “da” [[highway.classes.demand]] source = “internal_external” name = “da” excluded_links = [“is_toll_da”, “is_sr2”], value_of_time = 18.93, # $ / hr operating_cost_per_mile = 17.23, # cents / mile toll = [“@bridgetoll_da”] skims = [“time”, “dist”, “freeflowtime”, “bridgetoll_da”],

Properties
Source code in tm2py\config.py
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
@dataclass(frozen=True)
class HighwayClassConfig(ConfigItem):
    """Highway assignment class definition.

    Note that excluded_links, skims and toll attribute names include
    vehicle groups ("{vehicle}") which reference the list of
    highway.toll.dst_vehicle_group_names (see HighwayTollsConfig).
    The default example model config uses:
    "da", "sr2", "sr3", "vsm", sml", "med", "lrg"

    Example single class config:
        name = "da"
        description= "drive alone"
        mode_code= "d"
        [[highway.classes.demand]]
            source = "household"
            name = "SOV_GP_{period}"
        [[highway.classes.demand]]
            source = "air_passenger"
            name = "da"
        [[highway.classes.demand]]
            source = "internal_external"
            name = "da"
        excluded_links = ["is_toll_da", "is_sr2"],
        value_of_time = 18.93,  # $ / hr
        operating_cost_per_mile = 17.23,  # cents / mile
        toll = ["@bridgetoll_da"]
        skims = ["time", "dist", "freeflowtime", "bridgetoll_da"],

    Properties:
        name: short (up to 10 character) unique reference name for the class.
            used in attribute and matrix names
        description: longer text used in attribute and matrix descriptions
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords.
            Should be unique in list of :es, unless multiple classes
            have identical excluded_links specification. Cannot be the
            same as used for highway.maz_to_maz.mode_code.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        demand: list of OMX file and matrix keyname references,
            see ClassDemandConfig
        excluded_links: list of keywords to identify links to exclude from
            this class' available subnetwork (generate link.modes)
            Options are:
                - "is_sr": is reserved for shared ride (@useclass in 2,3)
                - "is_sr2": is reserved for shared ride 2+ (@useclass == 2)
                - "is_sr3": is reserved for shared ride 3+ (@useclass == 3)
                - "is_auto_only": is reserved for autos (non-truck) (@useclass != 1)
                - "is_toll_{vehicle}": has a value (non-bridge) toll for the {vehicle} toll group
        toll: list of additional toll cost link attribute (values stored in cents),
            summed, one of "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
        toll_factor: optional, factor to apply to toll values in cost calculation
        pce: optional, passenger car equivalent to convert assigned demand in
            PCE units to vehicles for total assigned vehicle calculations
        skims: list of skim matrices to generate
            Options are:
                "time": pure travel time in minutes
                "dist": distance in miles
                "hovdist": distance on HOV facilities (is_sr2 or is_sr3)
                "tolldist": distance on toll facilities
                    (@tollbooth > highway.tolls.valuetoll_start_tollbooth_code)
                "freeflowtime": free flow travel time in minutes
                "bridgetoll_{vehicle}": bridge tolls, {vehicle} refers to toll group
                "valuetoll_{vehicle}": other, non-bridge tolls, {vehicle} refers to toll group
    """

    name: str = Field(min_length=1, max_length=10)
    veh_group_name: str = Field(min_length=1, max_length=10)
    description: Optional[str] = Field(default="")
    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    pce: Optional[float] = Field(default=1.0, gt=0)
    # Note that excluded_links, skims, and tolls validated under HighwayConfig to include
    # highway.toll.dst_vehicle_group_names names
    excluded_links: Tuple[str, ...] = Field()
    skims: Tuple[str, ...] = Field()
    toll: Tuple[str, ...] = Field()
    toll_factor: Optional[float] = Field(default=None, gt=0)
    demand: Tuple[ClassDemandConfig, ...] = Field()

HighwayConfig

Bases: ConfigItem

Highway assignment and skims parameters.

Properties
Source code in tm2py\config.py
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
@dataclass(frozen=True)
class HighwayConfig(ConfigItem):
    """Highway assignment and skims parameters.

    Properties:
        generic_highway_mode_code: single character unique mode ID for entire
            highway network (no excluded_links)
        relative_gaps: relative gaps for assignment convergence, specific to global iteration, see HighwayRelativeGapConfig
        max_iterations: maximum iterations stopping criteria
        area_type_buffer_dist_miles: used to in calculation to categorize link @areatype
            The area type is determined based on the average density of nearby
            (within this buffer distance) MAZs, using (pop+jobs*2.5)/acres
        drive_access_output_skim_path: relative path for drive access to transit skims
        output_skim_path: relative path template from run dir for OMX output skims
        output_skim_filename_tmpl: template for OMX filename for a time period. Must include
            {time_period} in the string and end in '.omx'.
        output_skim_matrixname_tmpl: template for matrix names within OMX output skims.
            Should include {time_period}, {mode}, and {property}
        tolls: input toll specification, see HighwayTollsConfig
        maz_to_maz: maz-to-maz shortest path assignment and skim specification,
            see HighwayMazToMazConfig
        classes: highway assignment multi-class setup and skim specification,
            see HighwayClassConfig
        capclass_lookup: index cross-reference table from the link @capclass value
            to the free-flow speed, capacity, and critical speed values
        interchange_nodes_file: relative path to the interchange nodes file, this is
            used for calculating highway reliability
        apply_msa_demand: average highway demand with previous iterations'. Default to True.
        reliability: bool to skim highway reliability. Default to true. If true, assignment
            will be run twice in global iterations 0 (warmstart) and 1, to calculate reliability,
            assignment will be run only once in global iterations 2 and 3,
            reliability skim will stay the same as global iteration 1.
            If false, reliability will not be calculated nor skimmed in all global
            iterations, and the resulting reliability skims will be 0.
    """

    generic_highway_mode_code: str = Field(min_length=1, max_length=1)
    relative_gaps: Tuple[HighwayRelativeGapConfig, ...] = Field()
    max_iterations: int = Field(ge=0)
    area_type_buffer_dist_miles: float = Field(gt=0)
    drive_access_output_skim_path: Optional[str] = Field(default=None)
    output_skim_path: pathlib.Path = Field()
    output_skim_filename_tmpl: str = Field()
    output_skim_matrixname_tmpl: str = Field()
    tolls: HighwayTollsConfig = Field()
    maz_to_maz: HighwayMazToMazConfig = Field()
    classes: Tuple[HighwayClassConfig, ...] = Field()
    capclass_lookup: Tuple[HighwayCapClassConfig, ...] = Field()
    interchange_nodes_file: str = Field()
    apply_msa_demand: bool = True
    reliability: bool = Field(default=True)

    @validator("output_skim_filename_tmpl")
    def valid_skim_template(value):
        """Validate skim template has correct {} and extension."""
        assert (
            "{time_period" in value
        ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
        assert (
            value[-4:].lower() == ".omx"
        ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
        return value

    @validator("output_skim_matrixname_tmpl")
    def valid_skim_matrix_name_template(value):
        """Validate skim matrix template has correct {}."""
        assert (
            "{time_period" in value
        ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
        assert (
            "{property" in value
        ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
        assert (
            "{mode" in value
        ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
        return value

    @validator("capclass_lookup")
    def unique_capclass_numbers(cls, value):
        """Validate list of capclass_lookup has unique .capclass values."""
        capclass_ids = [i.capclass for i in value]
        error_msg = "-> capclass value must be unique in list"
        assert len(capclass_ids) == len(set(capclass_ids)), error_msg
        return value

    @validator("classes", pre=True)
    def unique_class_names(cls, value):
        """Validate list of classes has unique .name values."""
        class_names = [highway_class["name"] for highway_class in value]
        error_msg = "-> name value must be unique in list"
        assert len(class_names) == len(set(class_names)), error_msg
        return value

    @validator("classes")
    def validate_class_mode_excluded_links(cls, value, values):
        """Validate list of classes has unique .mode_code or .excluded_links match."""
        # validate if any mode IDs are used twice, that they have the same excluded links sets
        mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
        for i, highway_class in enumerate(value):
            # maz_to_maz.mode_code must be unique
            if "maz_to_maz" in values:
                assert (
                    highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
                ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
            # make sure that if any mode IDs are used twice, they have the same excluded links sets
            if highway_class.mode_code in mode_excluded_links:
                ex_links1 = highway_class["excluded_links"]
                ex_links2 = mode_excluded_links[highway_class["mode_code"]]
                error_msg = (
                    f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                    f"with different excluded links: {ex_links1} and {ex_links2}"
                )
                assert ex_links1 == ex_links2, error_msg
            mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
        return value

    @validator("classes")
    def validate_class_keyword_lists(cls, value, values):
        """Validate classes .skims, .toll, and .excluded_links values."""
        if "tolls" not in values:
            return value
        avail_skims = [
            "time",
            "dist",
            "hovdist",
            "tolldist",
            "freeflowtime",
            "rlbty",
            "autotime",
        ]
        available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
        avail_toll_attrs = []
        for name in values["tolls"].dst_vehicle_group_names:
            toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
            avail_skims.extend(toll_types)
            avail_toll_attrs.extend(["@" + name for name in toll_types])
            available_link_sets.append(f"is_toll_{name}")

        # validate class skim name list and toll attribute against toll setup
        def check_keywords(class_num, key, val, available):
            extra_keys = set(val) - set(available)
            error_msg = (
                f" -> {class_num} -> {key}: unrecognized {key} name(s): "
                f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
            )
            assert not extra_keys, error_msg

        for i, highway_class in enumerate(value):
            check_keywords(i, "skim", highway_class["skims"], avail_skims)
            check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
            check_keywords(
                i,
                "excluded_links",
                highway_class["excluded_links"],
                available_link_sets,
            )
        return value

unique_capclass_numbers(value)

Validate list of capclass_lookup has unique .capclass values.

Source code in tm2py\config.py
1003
1004
1005
1006
1007
1008
1009
@validator("capclass_lookup")
def unique_capclass_numbers(cls, value):
    """Validate list of capclass_lookup has unique .capclass values."""
    capclass_ids = [i.capclass for i in value]
    error_msg = "-> capclass value must be unique in list"
    assert len(capclass_ids) == len(set(capclass_ids)), error_msg
    return value

unique_class_names(value)

Validate list of classes has unique .name values.

Source code in tm2py\config.py
1011
1012
1013
1014
1015
1016
1017
@validator("classes", pre=True)
def unique_class_names(cls, value):
    """Validate list of classes has unique .name values."""
    class_names = [highway_class["name"] for highway_class in value]
    error_msg = "-> name value must be unique in list"
    assert len(class_names) == len(set(class_names)), error_msg
    return value

valid_skim_matrix_name_template(value)

Validate skim matrix template has correct {}.

Source code in tm2py\config.py
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
@validator("output_skim_matrixname_tmpl")
def valid_skim_matrix_name_template(value):
    """Validate skim matrix template has correct {}."""
    assert (
        "{time_period" in value
    ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
    assert (
        "{property" in value
    ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
    assert (
        "{mode" in value
    ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
    return value

valid_skim_template(value)

Validate skim template has correct {} and extension.

Source code in tm2py\config.py
978
979
980
981
982
983
984
985
986
987
@validator("output_skim_filename_tmpl")
def valid_skim_template(value):
    """Validate skim template has correct {} and extension."""
    assert (
        "{time_period" in value
    ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
    assert (
        value[-4:].lower() == ".omx"
    ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
    return value

validate_class_keyword_lists(value, values)

Validate classes .skims, .toll, and .excluded_links values.

Source code in tm2py\config.py
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
@validator("classes")
def validate_class_keyword_lists(cls, value, values):
    """Validate classes .skims, .toll, and .excluded_links values."""
    if "tolls" not in values:
        return value
    avail_skims = [
        "time",
        "dist",
        "hovdist",
        "tolldist",
        "freeflowtime",
        "rlbty",
        "autotime",
    ]
    available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
    avail_toll_attrs = []
    for name in values["tolls"].dst_vehicle_group_names:
        toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
        avail_skims.extend(toll_types)
        avail_toll_attrs.extend(["@" + name for name in toll_types])
        available_link_sets.append(f"is_toll_{name}")

    # validate class skim name list and toll attribute against toll setup
    def check_keywords(class_num, key, val, available):
        extra_keys = set(val) - set(available)
        error_msg = (
            f" -> {class_num} -> {key}: unrecognized {key} name(s): "
            f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
        )
        assert not extra_keys, error_msg

    for i, highway_class in enumerate(value):
        check_keywords(i, "skim", highway_class["skims"], avail_skims)
        check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
        check_keywords(
            i,
            "excluded_links",
            highway_class["excluded_links"],
            available_link_sets,
        )
    return value

Validate list of classes has unique .mode_code or .excluded_links match.

Source code in tm2py\config.py
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
@validator("classes")
def validate_class_mode_excluded_links(cls, value, values):
    """Validate list of classes has unique .mode_code or .excluded_links match."""
    # validate if any mode IDs are used twice, that they have the same excluded links sets
    mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
    for i, highway_class in enumerate(value):
        # maz_to_maz.mode_code must be unique
        if "maz_to_maz" in values:
            assert (
                highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
            ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
        # make sure that if any mode IDs are used twice, they have the same excluded links sets
        if highway_class.mode_code in mode_excluded_links:
            ex_links1 = highway_class["excluded_links"]
            ex_links2 = mode_excluded_links[highway_class["mode_code"]]
            error_msg = (
                f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                f"with different excluded links: {ex_links1} and {ex_links2}"
            )
            assert ex_links1 == ex_links2, error_msg
        mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
    return value

HighwayMazToMazConfig

Bases: ConfigItem

Highway MAZ to MAZ shortest path assignment and skim parameters.

Properties
Source code in tm2py\config.py
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
@dataclass(frozen=True)
class HighwayMazToMazConfig(ConfigItem):
    """Highway MAZ to MAZ shortest path assignment and skim parameters.

    Properties:
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords,
            plus including MAZ connectors.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        max_skim_cost: max shortest path distance to search for MAZ-to-MAZ
            skims, in generized costs units (includes operating cost
            converted to minutes)
        excluded_links: list of keywords to identify links to exclude from
            MAZ-to-MAZ paths, see HighwayClassConfig.excluded_links
        demand_file: relative path to find the input demand files
            can have use a placeholder for {period} and {number}, where the
            {period} is the time_period.name (see TimePeriodConfig)
            and {number} is the demand_count_groups[].number
            (see DemandCountyGroupConfig)
            e.g.: auto_{period}_MAZ_AUTO_{number}_{period}.omx
        demand_county_groups: List of demand county names and
        skim_period: period name to use for the shotest path skims, must
            match one of the names listed in the time_periods
        output_skim_file: relative path to resulting MAZ-to-MAZ skims
    """

    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    max_distance: float = Field(gt=0)
    max_skim_cost: float = Field(gt=0)
    excluded_links: Tuple[str, ...] = Field()
    demand_file: pathlib.Path = Field()
    demand_county_groups: Tuple[DemandCountyGroupConfig, ...] = Field()
    skim_period: str = Field()
    output_skim_file: pathlib.Path = Field()

    @validator("demand_county_groups")
    def unique_group_numbers(cls, value):
        """Validate list of demand_county_groups has unique .number values."""
        group_ids = [group.number for group in value]
        assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
        return value

unique_group_numbers(value)

Validate list of demand_county_groups has unique .number values.

Source code in tm2py\config.py
918
919
920
921
922
923
@validator("demand_county_groups")
def unique_group_numbers(cls, value):
    """Validate list of demand_county_groups has unique .number values."""
    group_ids = [group.number for group in value]
    assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
    return value

HighwayRelativeGapConfig

Bases: ConfigItem

Highway assignment relative gap parameters.

Properties
Source code in tm2py\config.py
718
719
720
721
722
723
724
725
726
727
728
@dataclass(frozen=True)
class HighwayRelativeGapConfig(ConfigItem):
    """Highway assignment relative gap parameters.

    Properties:
        global_iteration: global iteration number
        relative_gap: relative gap
    """

    global_iteration: int = Field(ge=0)
    relative_gap: float = Field(gt=0)

HighwayTollsConfig

Bases: ConfigItem

Highway assignment and skim input tolls and related parameters.

Properties
Source code in tm2py\config.py
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
@dataclass(frozen=True)
class HighwayTollsConfig(ConfigItem):
    """Highway assignment and skim input tolls and related parameters.

    Properties:
        file_path: source relative file path for the highway tolls index CSV
        valuetoll_start_tollbooth_code: tollbooth separates links with "bridge" tolls
            (index < this value) vs. "value" tolls. These toll attributes
            can then be referenced separately in the highway.classes[].tolls
            list
        src_vehicle_group_names: name used for the vehicle toll CSV column IDs,
            of the form "toll{period}_{vehicle}"
        dst_vehicle_group_names: list of names used in destination network
            for the corresponding vehicle group. Length of list must be the same
            as src_vehicle_group_names. Used for toll related attributes and
            resulting skim matrices. Cross-referenced in list of highway.classes[],
            valid keywords for:
                excluded_links: "is_toll_{vehicle}"
                tolls: "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
                skims: "bridgetoll_{vehicle}", "valuetoll_{vehicle}"
    """

    file_path: pathlib.Path = Field()
    valuetoll_start_tollbooth_code: int = Field(gt=1)
    src_vehicle_group_names: Tuple[str, ...] = Field()
    dst_vehicle_group_names: Tuple[str, ...] = Field()

    @validator("dst_vehicle_group_names", always=True)
    def dst_vehicle_group_names_length(cls, value, values):
        """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
        if "src_vehicle_group_names" in values:
            assert len(value) == len(
                values["src_vehicle_group_names"]
            ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
            assert all(
                [len(v) <= 4 for v in value]
            ), "dst_vehicle_group_names must be 4 characters or less"
        return value

dst_vehicle_group_names_length(value, values)

Validate dst_vehicle_group_names has same length as src_vehicle_group_names.

Source code in tm2py\config.py
841
842
843
844
845
846
847
848
849
850
851
@validator("dst_vehicle_group_names", always=True)
def dst_vehicle_group_names_length(cls, value, values):
    """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
    if "src_vehicle_group_names" in values:
        assert len(value) == len(
            values["src_vehicle_group_names"]
        ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
        assert all(
            [len(v) <= 4 for v in value]
        ), "dst_vehicle_group_names must be 4 characters or less"
    return value

HouseholdConfig

Bases: ConfigItem

Household (residents) model parameters.

Source code in tm2py\config.py
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
@dataclass(frozen=True)
class HouseholdConfig(ConfigItem):
    """Household (residents) model parameters."""

    highway_demand_file: pathlib.Path
    transit_demand_file: pathlib.Path
    active_demand_file: pathlib.Path
    highway_maz_ctramp_output_file: pathlib.Path
    OwnedAV_ZPV_factor: float
    TNC_ZPV_factor: float
    ctramp_indiv_trip_file: str
    ctramp_joint_trip_file: str
    ctramp_run_dir: pathlib.Path
    rideshare_mode_split: Dict[str, float]
    taxi_split: Dict[str, float]
    single_tnc_split: Dict[str, float]
    shared_tnc_split: Dict[str, float]
    ctramp_mode_names: Dict[float, str]
    income_segment: Dict[str, Union[float, str, list]]
    ctramp_hh_file: str

HouseholdModeAgg

Bases: ConfigItem

household trip mode aggregation input parameters.

Properties
Source code in tm2py\config.py
304
305
306
307
308
309
310
311
312
313
314
315
@dataclass(frozen=True)
class HouseholdModeAgg(ConfigItem):
    """household trip mode aggregation input parameters.

    Properties:
        name: aggregate name used for the class group in the input columns
            for the trip tables,
        modes: list of mode choice mode names used for the trip tables
    """

    name: str
    modes: Tuple[str, ...]

ImpedanceConfig

Bases: ConfigItem

Blended skims used for accessibility/friction calculations.

Properties:I name: name to store it as, referred to in TripDistribution config skim_mode: name of the mode to use for the blended skim time_blend: blend of time periods to use; mapped to the factors (which should sum to 1)

Source code in tm2py\config.py
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
@dataclass(frozen=True)
class ImpedanceConfig(ConfigItem):
    """Blended skims used for accessibility/friction calculations.

    Properties:I
        name: name to store it as, referred to in TripDistribution config
        skim_mode: name of the mode to use for the blended skim
        time_blend: blend of time periods to use; mapped to the factors (which should sum to 1)
    """

    name: str
    skim_mode: str
    time_blend: Dict[str, float]

    @validator("time_blend", allow_reuse=True)
    def sums_to_one(value):
        """Validate highway.maz_to_maz.skim_period refers to a valid period."""
        assert sum(value.values()) - 1 < 0.0001, "blend factors must sum to 1"
        return value

sums_to_one(value)

Validate highway.maz_to_maz.skim_period refers to a valid period.

Source code in tm2py\config.py
595
596
597
598
599
@validator("time_blend", allow_reuse=True)
def sums_to_one(value):
    """Validate highway.maz_to_maz.skim_period refers to a valid period."""
    assert sum(value.values()) - 1 < 0.0001, "blend factors must sum to 1"
    return value

InternalExternalConfig

Bases: ConfigItem

Internal <-> External model parameters.

Source code in tm2py\config.py
514
515
516
517
518
519
520
521
522
523
524
@dataclass(frozen=True)
class InternalExternalConfig(ConfigItem):
    """Internal <-> External model parameters."""

    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    highway_demand_file: str
    modes: List[str]
    demand: DemandGrowth
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig

LoggingConfig

Bases: ConfigItem

Logging parameters. TODO.

Properties
Source code in tm2py\config.py
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
@dataclass(frozen=True)
class LoggingConfig(ConfigItem):
    """Logging parameters. TODO.

    Properties:
        display_level: filter level for messages to show in console, default
            is STATUS
        run_file_path: relative path to high-level log file for the model run,
            default is tm2py_run_[%Y%m%d_%H%M].log
        run_file_level: filter level for messages recorded in the run log,
            default is INFO
        log_file_path: relative path to general log file with more detail
            than the run_file, default is tm2py_detail_[%Y%m%d_%H%M].log
        log_file_level: optional, filter level for messages recorded in the
            standard log, default is DETAIL
        log_on_error_file_path: relative path to use for fallback log message cache
            on error, default is tm2py_error_[%Y%m%d_%H%M].log
        notify_slack: if true notify_slack messages will be sent, default is False
        use_emme_logbook: if True log messages recorded in the standard log file will
            also be recorded in the Emme logbook, default is True
        iter_component_level: tuple of tuples of iteration, component name, log level.
            Used to override log levels (log_file_level) for debugging and recording
            more detail in the log_file_path.
            Example: [ [2, "highway", "TRACE"] ] to record all messages
            during the highway component run at iteration 2.
    """

    display_level: Optional[LogLevel] = Field(default="STATUS")
    run_file_path: Optional[str] = Field(
        default="tm2py_run_{}.log".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M")
        )
    )
    run_file_level: Optional[LogLevel] = Field(default="INFO")
    log_file_path: Optional[str] = Field(
        default="tm2py_debug_{}.log".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M")
        )
    )
    log_file_level: Optional[LogLevel] = Field(default="DEBUG")
    log_on_error_file_path: Optional[str] = Field(
        default="tm2py_error_{}.log".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M")
        )
    )

    notify_slack: Optional[bool] = Field(default=False)
    use_emme_logbook: Optional[bool] = Field(default=True)
    iter_component_level: Optional[
        Tuple[Tuple[int, ComponentNames, LogLevel], ...]
    ] = Field(default=None)

ManualJourneyLevelsConfig

Bases: ConfigItem

Manual Journey Level Specification

Source code in tm2py\config.py
1185
1186
1187
1188
1189
1190
@dataclass(frozen=True)
class ManualJourneyLevelsConfig(ConfigItem):
    """Manual Journey Level Specification"""

    level_id: int
    group_fare_systems: Tuple[int, ...]

MatrixFactorConfig

Bases: ConfigItem

Mapping of zone or list of zones to factor value.

Source code in tm2py\config.py
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
@dataclass(frozen=True)
class MatrixFactorConfig(ConfigItem):
    """Mapping of zone or list of zones to factor value."""

    zone_index: Optional[Union[int, List[int]]]
    factor: Optional[float] = Field(default=None)
    i_factor: Optional[float] = Field(default=None)
    j_factor: Optional[float] = Field(default=None)
    as_growth_rate: Optional[bool] = Field(default=False)

    @validator("zone_index", allow_reuse=True)
    def valid_zone_index(value):
        """Validate zone index and turn to list if isn't one."""
        if isinstance(value, str):
            value = int(value)
        if isinstance(value, int):
            value = [value]
        assert all([x >= 0 for x in value]), "Zone_index must be greater or equal to 0"
        return value

    @validator("factor", allow_reuse=True)
    def valid_factor(value, values):
        assert (
            "i_factor" not in values.keys()
        ), "Found both `factor` and\
            `i_factor` in MatrixFactorConfig. Should be one or the other."

        assert (
            "j_factor" not in values.keys()
        ), "Found both `factor` and\
            `j_factor` in MatrixFactorConfig. Should be one or the other."
        return value

valid_zone_index(value)

Validate zone index and turn to list if isn’t one.

Source code in tm2py\config.py
417
418
419
420
421
422
423
424
425
@validator("zone_index", allow_reuse=True)
def valid_zone_index(value):
    """Validate zone index and turn to list if isn't one."""
    if isinstance(value, str):
        value = int(value)
    if isinstance(value, int):
        value = [value]
    assert all([x >= 0 for x in value]), "Zone_index must be greater or equal to 0"
    return value

RunConfig

Bases: ConfigItem

Model run parameters.

Note that the components will be executed in the order listed.

Properties
Source code in tm2py\config.py
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
@dataclass(frozen=True)
class RunConfig(ConfigItem):
    """Model run parameters.

    Note that the components will be executed in the order listed.

    Properties:
        start_iteration: start iteration number, 0 to include initial_components
        end_iteration: final iteration number
        start_component: name of component to start with, will skip components
            list prior to this component
        initial_components: list of components to run as initial (0) iteration, in order
        global_iteration_components: list of component to run at every subsequent
            iteration (max(1, start_iteration) to end_iteration), in order.
        final_components: list of components to run after final iteration, in order
    """

    initial_components: Tuple[ComponentNames, ...]
    global_iteration_components: Tuple[ComponentNames, ...]
    final_components: Tuple[ComponentNames, ...]
    start_iteration: int = Field(ge=0)
    end_iteration: int = Field(gt=0)
    start_component: Optional[Union[ComponentNames, EmptyString]] = Field(default="")

    @validator("end_iteration", allow_reuse=True)
    def end_iteration_gt_start(cls, value, values):
        """Validate end_iteration greater than start_iteration."""
        if values.get("start_iteration"):
            assert (
                value >= values["start_iteration"]
            ), f"'end_iteration' ({value}) must be greater than 'start_iteration'\
                ({values['start_iteration']})"
        return value

    @validator("start_component", allow_reuse=True)
    def start_component_used(cls, value, values):
        """Validate start_component is listed in *_components."""
        if not values.get("start_component") or not value:
            return value

        if "start_iteration" in values:
            if values.get("start_iteration") == 0:
                assert value in values.get(
                    "initial_components", [value]
                ), f"'start_component' ({value}) must be one of the components listed in\
                    initial_components if 'start_iteration = 0'"
            else:
                assert value in values.get(
                    "global_iteration_components", [values]
                ), f"'start_component' ({value}) must be one of the components listed in\
                    global_iteration_components if 'start_iteration > 0'"
        return value

end_iteration_gt_start(value, values)

Validate end_iteration greater than start_iteration.

Source code in tm2py\config.py
146
147
148
149
150
151
152
153
154
@validator("end_iteration", allow_reuse=True)
def end_iteration_gt_start(cls, value, values):
    """Validate end_iteration greater than start_iteration."""
    if values.get("start_iteration"):
        assert (
            value >= values["start_iteration"]
        ), f"'end_iteration' ({value}) must be greater than 'start_iteration'\
            ({values['start_iteration']})"
    return value

start_component_used(value, values)

Validate start_component is listed in *_components.

Source code in tm2py\config.py
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
@validator("start_component", allow_reuse=True)
def start_component_used(cls, value, values):
    """Validate start_component is listed in *_components."""
    if not values.get("start_component") or not value:
        return value

    if "start_iteration" in values:
        if values.get("start_iteration") == 0:
            assert value in values.get(
                "initial_components", [value]
            ), f"'start_component' ({value}) must be one of the components listed in\
                initial_components if 'start_iteration = 0'"
        else:
            assert value in values.get(
                "global_iteration_components", [values]
            ), f"'start_component' ({value}) must be one of the components listed in\
                global_iteration_components if 'start_iteration > 0'"
    return value

ScenarioConfig

Bases: ConfigItem

Scenario related parameters.

Properties
Source code in tm2py\config.py
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
@dataclass(frozen=True)
class ScenarioConfig(ConfigItem):
    """Scenario related parameters.

    Properties:
        verify: optional, default False if specified as True components will run
            additional post-process verify step to validate results / outputs
            (not implemented yet)
        maz_landuse_file: relative path to maz_landuse_file used by multiple
            components
        name: scenario name string
        year: model year, must be at least 2005
        landuse_file: TAZ file
    """

    maz_landuse_file: pathlib.Path
    zone_seq_file: pathlib.Path
    landuse_file: pathlib.Path
    landuse_index_column: str
    name: str
    year: int = Field(ge=2005)
    verify: Optional[bool] = Field(default=False)

TimeOfDayClassConfig

Bases: ConfigItem

Configuration for a class of time of day model.

Source code in tm2py\config.py
289
290
291
292
293
294
@dataclass(frozen=True)
class TimeOfDayClassConfig(ConfigItem):
    """Configuration for a class of time of day model."""

    name: str
    time_period_split: List[TimeSplitConfig]

TimeOfDayConfig

Bases: ConfigItem

Configuration for time of day model.

Source code in tm2py\config.py
297
298
299
300
301
@dataclass(frozen=True)
class TimeOfDayConfig(ConfigItem):
    """Configuration for time of day model."""

    classes: List[TimeOfDayClassConfig]

TimePeriodConfig

Bases: ConfigItem

Time time period entry.

Properties
Source code in tm2py\config.py
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
@dataclass(frozen=True)
class TimePeriodConfig(ConfigItem):
    """Time time period entry.

    Properties:
        name: name of the time period, up to four characters
        length_hours: length of the time period in hours
        highway_capacity_factor: factor to use to multiple the per-hour
            capacites in the highway network
        emme_scenario_id: scenario ID to use for Emme per-period
            assignment (highway and transit) scenarios
    """

    name: str = Field(max_length=4)
    start_period: float = Field(gt=0)
    length_hours: float = Field(gt=0)
    highway_capacity_factor: float = Field(gt=0)
    emme_scenario_id: int = Field(ge=1)
    description: Optional[str] = Field(default="")

TimeSplitConfig

Bases: ConfigItem

Split matrix i and j.

i.e. for time of day splits.

Source code in tm2py\config.py
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
@dataclass(frozen=True)
class TimeSplitConfig(ConfigItem):
    """Split matrix i and j.

    i.e. for time of day splits.
    """

    time_period: str
    production: Optional[NonNegativeFloat] = None
    attraction: Optional[NonNegativeFloat] = None
    od: Optional[NonNegativeFloat] = None

    @validator("production", "attraction", "od")
    def less_than_equal_one(cls, v):
        if v:
            assert v <= 1.0, "Value should be less than or equal to 1"
            return v

    def __post_init__(self):
        if self.od and any([self.production, self.attraction]):
            raise ValueError(
                f"TimeSplitConfig: Must either specifify an od or any of\
            production/attraction - not both.\n{self}"
            )

        if not all([self.production, self.attraction]) and any(
            [self.production, self.attraction]
        ):
            raise ValueError(
                f"TimeSplitConfig: Must have both production AND attraction\
            if one of them is specified."
            )

TollChoiceConfig

Bases: ConfigItem

Toll choice parameters.

Properties
Source code in tm2py\config.py
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
@dataclass(frozen=True)
class TollChoiceConfig(ConfigItem):
    """Toll choice parameters.

    Properties:
        property_to_skim_toll: Maps a property in the utility equation with a list of skim
            properties. If more than one skim property is listed, they will be summed together
            (e.g. cost if the sum of bridge toll and value toll). This defaults to a value in the
            code.
        property_to_skim_notoll: Maps a property in the utility equation with a list of skim
            properties for no toll choice.If more than one skim property is listed, they will
            be summed together  (e.g. cost if the sum of bridge toll and value toll). This
            defaults to a value in the code.
    """

    classes: List[ChoiceClassConfig]
    value_of_time: float
    operating_cost_per_mile: float
    property_to_skim_toll: Optional[Dict[str, List[str]]] = Field(default_factory=dict)
    property_to_skim_notoll: Optional[Dict[str, List[str]]] = Field(
        default_factory=dict
    )
    utility: Optional[List[CoefficientConfig]] = Field(default_factory=dict)

TransitClassConfig

Bases: ConfigItem

Transit demand class definition.

Source code in tm2py\config.py
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
@dataclass(frozen=True)
class TransitClassConfig(ConfigItem):
    """Transit demand class definition."""

    skim_set_id: str
    name: str
    description: str
    mode_types: Tuple[str, ...]
    demand: Tuple[ClassDemandConfig, ...]
    required_mode_combo: Optional[Tuple[str, ...]] = Field(default=None)

TransitConfig

Bases: ConfigItem

Transit assignment parameters.

Source code in tm2py\config.py
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
@dataclass(frozen=True)
class TransitConfig(ConfigItem):
    """Transit assignment parameters."""

    modes: Tuple[TransitModeConfig, ...]
    classes: Tuple[TransitClassConfig, ...]
    journey_levels: TransitJourneyLevelsConfig
    apply_msa_demand: bool
    value_of_time: float
    walk_speed: float
    transit_speed: float
    effective_headway_source: str
    initial_wait_perception_factor: float
    transfer_wait_perception_factor: float
    walk_perception_factor: float
    walk_perception_factor: float
    walk_perception_factor_cbd: float
    drive_perception_factor: float
    max_transfers: int
    use_fares: bool
    fare_2015_to_2000_deflator: float
    fares_path: pathlib.Path
    fare_matrix_path: pathlib.Path
    fare_max_transfer_distance_miles: float
    override_connector_times: bool
    use_ccr: bool
    ccr_stop_criteria: Optional[AssignmentStoppingCriteriaConfig]
    ccr_weights: CcrWeightsConfig
    eawt_weights: EawtWeightsConfig
    congested_transit_assignment: bool
    congested: CongestedAssnConfig
    congested_weights: CongestedWeightsConfig
    output_skim_path: pathlib.Path
    output_skim_filename_tmpl: str = Field()
    output_skim_matrixname_tmpl: str = Field()
    output_stop_usage_path: Optional[str] = Field(default=None)
    output_transit_boardings_path: Optional[str] = Field(default=None)
    output_transit_segment_path: Optional[str] = Field(default=None)
    output_station_to_station_flow_path: Optional[str] = Field(default=None)
    output_transfer_at_station_path: Optional[str] = Field(default=None)
    timed_transfer_nodes: Tuple[int, ...] = Field()
    output_transfer_at_station_node_ids: Dict[str, int] = Field()
    max_ccr_iterations: float = None
    split_connectors_to_prevent_walk: bool = False
    input_connector_access_times_path: Optional[str] = Field(default=None)
    input_connector_egress_times_path: Optional[str] = Field(default=None)
    vehicles: Optional[TransitVehicleConfig] = Field(
        default_factory=TransitVehicleConfig
    )

    @validator("use_ccr")
    def deprecate_capacitated_assignment(cls, value, values):
        """Validate use_ccr is false."""
        assert (
            not value
        ), "capacitated transit assignment is deprecated, please set use_ccr to false"
        return value

deprecate_capacitated_assignment(value, values)

Validate use_ccr is false.

Source code in tm2py\config.py
1389
1390
1391
1392
1393
1394
1395
@validator("use_ccr")
def deprecate_capacitated_assignment(cls, value, values):
    """Validate use_ccr is false."""
    assert (
        not value
    ), "capacitated transit assignment is deprecated, please set use_ccr to false"
    return value

TransitJourneyLevelsConfig

Bases: ConfigItem

Transit manual journey levels structure.

Source code in tm2py\config.py
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
@dataclass(frozen=True)
class TransitJourneyLevelsConfig(ConfigItem):
    """Transit manual journey levels structure."""

    use_algorithm: bool = False
    """
    The original translation from Cube to Emme used an algorithm to, as faithfully as possible, reflect transfer fares via journey levels. 
    The algorithm examines fare costs and proximity of transit services to create a set of journey levels that reflects transfer costs. 
    While this algorithm works well, the Bay Area's complex fare system results in numerous journey levels specific to operators with low ridership. 
    The resulting assignment compute therefore expends a lot of resources on these operators. 
    Set this parameter to `True` to use the algorithm. Exactly one of `use_algorithm` or `specify_manually` must be `True`. 
    """
    specify_manually: bool = True
    """
    An alternative to using an algorithm to specify the journey levels is to use specify them manually. 
    If this option is set to `True`, the `manual` parameter can be used to assign fare systems to faresystem groups (or journey levels). 
    Consider, for example, the following three journey levels: 0 - has yet to board transit; 1 - has boarded SF Muni; 2 - has boarded all other transit systems. 
    To specify this configuration, a single `manual` entry identifying the SF Muni fare systems is needed. 
    The other faresystem group is automatically generated in the code with the rest of the faresystems which are not specified in any of the groups.
    See the `manual` entry for an example.
    """
    manual: Optional[Tuple[ManualJourneyLevelsConfig, ...]] = (
        ManualJourneyLevelsConfig(level_id=1, group_fare_systems=(25,)),
    )
    """
    If 'specify_manually' is set to `True`, there should be at least one faresystem group specified here.
    The format includes two entries: `level_id`, which is the serial number of the group specified, 
    and `group_fare_system`, which is a list of all faresystems belonging to that group.
    For example, to specify MUNI as one faresystem group, the right configuration would be:
    [[transit.journey_levels.manual]]
    level_id = 1
    group_fare_systems = [25]
    If there are multiple groups required to be specified, for example, MUNI in one and Caltrain in the other group,
    it can be achieved by adding another entry of `manual`, like:
    [[transit.journey_levels.manual]]
    level_id = 1
    group_fare_systems = [25]
    [[transit.journey_levels.manual]]
    level_id = 2
    group_fare_systems = [12,14]

    """

    @validator("specify_manually")
    def check_exclusivity(cls, v, values):
        """Valdiates that exactly one of specify_manually and use_algorithm is True"""
        use_algorithm = values.get("use_algorithm")
        assert (
            use_algorithm != v
        ), 'Exactly one of "use_algorithm" or "specify_manually" must be True.'
        return v

    @validator("manual", always=True)
    def check_manual(cls, v, values):
        if values.get("specify_manually"):
            assert (
                v is not None and len(v) > 0
            ), "If 'specify_manually' is True, 'manual' cannot be None or empty."
        return v

manual = (ManualJourneyLevelsConfig(level_id=1, group_fare_systems=(25,)),) class-attribute instance-attribute

If ‘specify_manually’ is set to True, there should be at least one faresystem group specified here. The format includes two entries: level_id, which is the serial number of the group specified, and group_fare_system, which is a list of all faresystems belonging to that group. For example, to specify MUNI as one faresystem group, the right configuration would be: [[transit.journey_levels.manual]] level_id = 1 group_fare_systems = [25] If there are multiple groups required to be specified, for example, MUNI in one and Caltrain in the other group, it can be achieved by adding another entry of manual, like: [[transit.journey_levels.manual]] level_id = 1 group_fare_systems = [25][[transit.journey_levels.manual]] level_id = 2 group_fare_systems = [12,14]

specify_manually = True class-attribute instance-attribute

An alternative to using an algorithm to specify the journey levels is to use specify them manually. If this option is set to True, the manual parameter can be used to assign fare systems to faresystem groups (or journey levels). Consider, for example, the following three journey levels: 0 - has yet to board transit; 1 - has boarded SF Muni; 2 - has boarded all other transit systems. To specify this configuration, a single manual entry identifying the SF Muni fare systems is needed. The other faresystem group is automatically generated in the code with the rest of the faresystems which are not specified in any of the groups. See the manual entry for an example.

use_algorithm = False class-attribute instance-attribute

The original translation from Cube to Emme used an algorithm to, as faithfully as possible, reflect transfer fares via journey levels. The algorithm examines fare costs and proximity of transit services to create a set of journey levels that reflects transfer costs. While this algorithm works well, the Bay Area’s complex fare system results in numerous journey levels specific to operators with low ridership. The resulting assignment compute therefore expends a lot of resources on these operators. Set this parameter to True to use the algorithm. Exactly one of use_algorithm or specify_manually must be True.

check_exclusivity(v, values)

Valdiates that exactly one of specify_manually and use_algorithm is True

Source code in tm2py\config.py
1236
1237
1238
1239
1240
1241
1242
1243
@validator("specify_manually")
def check_exclusivity(cls, v, values):
    """Valdiates that exactly one of specify_manually and use_algorithm is True"""
    use_algorithm = values.get("use_algorithm")
    assert (
        use_algorithm != v
    ), 'Exactly one of "use_algorithm" or "specify_manually" must be True.'
    return v

TransitModeConfig

Bases: ConfigItem

Transit mode definition (see also mode in the Emme API).

Source code in tm2py\config.py
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
@dataclass(frozen=True)
class TransitModeConfig(ConfigItem):
    """Transit mode definition (see also mode in the Emme API)."""

    type: Literal[
        "WALK",
        "ACCESS",
        "EGRESS",
        "LOCAL",
        "PREMIUM",
        "DRIVE",
        "PNR_dummy",
        "KNR_dummy",
    ]
    assign_type: Literal["TRANSIT", "AUX_TRANSIT"]
    mode_id: str = Field(min_length=1, max_length=1)
    name: str = Field(max_length=10)
    description: Optional[str] = ""
    in_vehicle_perception_factor: Optional[float] = Field(default=None, ge=0)
    speed_or_time_factor: Optional[str] = Field(default="")
    initial_boarding_penalty: Optional[float] = Field(default=None, ge=0)
    transfer_boarding_penalty: Optional[float] = Field(default=None, ge=0)
    headway_fraction: Optional[float] = Field(default=None, ge=0)
    transfer_wait_perception_factor: Optional[float] = Field(default=None, ge=0)
    eawt_factor: Optional[float] = Field(default=1)

    @validator("in_vehicle_perception_factor", always=True)
    def in_vehicle_perception_factor_valid(cls, value, values):
        """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("speed_or_time_factor", always=True)
    def speed_or_time_factor_valid(cls, value, values):
        """Validate speed_or_time_factor exists if assign_type is AUX_TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
            assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
        return value

    @validator("initial_boarding_penalty", always=True)
    def initial_boarding_penalty_valid(value, values):
        """Validate initial_boarding_penalty exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("transfer_boarding_penalty", always=True)
    def transfer_boarding_penalty_valid(value, values):
        """Validate transfer_boarding_penalty exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("headway_fraction", always=True)
    def headway_fraction_valid(value, values):
        """Validate headway_fraction exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("transfer_wait_perception_factor", always=True)
    def transfer_wait_perception_factor_valid(value, values):
        """Validate transfer_wait_perception_factor exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @classmethod
    @validator("mode_id")
    def mode_id_valid(cls, value):
        """Validate mode_id."""
        assert len(value) == 1, "mode_id must be one character"
        return value

headway_fraction_valid(value, values)

Validate headway_fraction exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1139
1140
1141
1142
1143
1144
@validator("headway_fraction", always=True)
def headway_fraction_valid(value, values):
    """Validate headway_fraction exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

in_vehicle_perception_factor_valid(value, values)

Validate in_vehicle_perception_factor exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1111
1112
1113
1114
1115
1116
@validator("in_vehicle_perception_factor", always=True)
def in_vehicle_perception_factor_valid(cls, value, values):
    """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

initial_boarding_penalty_valid(value, values)

Validate initial_boarding_penalty exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1125
1126
1127
1128
1129
1130
@validator("initial_boarding_penalty", always=True)
def initial_boarding_penalty_valid(value, values):
    """Validate initial_boarding_penalty exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

mode_id_valid(value) classmethod

Validate mode_id.

Source code in tm2py\config.py
1153
1154
1155
1156
1157
1158
@classmethod
@validator("mode_id")
def mode_id_valid(cls, value):
    """Validate mode_id."""
    assert len(value) == 1, "mode_id must be one character"
    return value

speed_or_time_factor_valid(value, values)

Validate speed_or_time_factor exists if assign_type is AUX_TRANSIT.

Source code in tm2py\config.py
1118
1119
1120
1121
1122
1123
@validator("speed_or_time_factor", always=True)
def speed_or_time_factor_valid(cls, value, values):
    """Validate speed_or_time_factor exists if assign_type is AUX_TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
        assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
    return value

transfer_boarding_penalty_valid(value, values)

Validate transfer_boarding_penalty exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1132
1133
1134
1135
1136
1137
@validator("transfer_boarding_penalty", always=True)
def transfer_boarding_penalty_valid(value, values):
    """Validate transfer_boarding_penalty exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

transfer_wait_perception_factor_valid(value, values)

Validate transfer_wait_perception_factor exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1146
1147
1148
1149
1150
1151
@validator("transfer_wait_perception_factor", always=True)
def transfer_wait_perception_factor_valid(value, values):
    """Validate transfer_wait_perception_factor exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

TransitVehicleConfig

Bases: ConfigItem

Transit vehicle definition (see also transit vehicle in the Emme API).

Source code in tm2py\config.py
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
@dataclass(frozen=True)
class TransitVehicleConfig(ConfigItem):
    """Transit vehicle definition (see also transit vehicle in the Emme API)."""

    vehicle_id: Optional[int] = Field(default=None, ge=0)
    mode: Optional[str] = Field(default="")
    name: Optional[str] = Field(default="")
    auto_equivalent: Optional[float] = Field(default=0, ge=0)
    seated_capacity: Optional[int] = Field(default=None, ge=0)
    total_capacity: Optional[int] = Field(default=None, ge=0)

TripDistributionClassConfig

Bases: ConfigItem

Trip Distribution parameters.

Properties
Source code in tm2py\config.py
558
559
560
561
562
563
564
565
566
567
568
569
570
@dataclass(frozen=True)
class TripDistributionClassConfig(ConfigItem):
    """Trip Distribution parameters.

    Properties:
        name: name of class to apply to
        impedance_name: refers to an impedance (skim) matrix to use - often a blended skim.
        use_k_factors: boolean on if to use k-factors
    """

    name: str
    impedance: str
    use_k_factors: bool

TripDistributionConfig

Bases: ConfigItem

Trip Distribution parameters.

Source code in tm2py\config.py
602
603
604
605
606
607
608
609
610
@dataclass(frozen=True)
class TripDistributionConfig(ConfigItem):
    """Trip Distribution parameters."""

    classes: List[TripDistributionClassConfig]
    max_balance_iterations: int
    max_balance_relative_error: float
    friction_factors_file: pathlib.Path
    k_factors_file: Optional[pathlib.Path] = None

TripGenerationClassConfig

Bases: ConfigItem

Trip Generation parameters.

Source code in tm2py\config.py
540
541
542
543
544
545
546
547
548
@dataclass(frozen=True)
class TripGenerationClassConfig(ConfigItem):
    """Trip Generation parameters."""

    name: str
    purpose: Optional[str] = Field(default=None)
    production_formula: Optional[TripGenerationFormulaConfig] = Field(default=None)
    attraction_formula: Optional[TripGenerationFormulaConfig] = Field(default=None)
    balance_to: Optional[str] = Field(default="production")

TripGenerationConfig

Bases: ConfigItem

Trip Generation parameters.

Source code in tm2py\config.py
551
552
553
554
555
@dataclass(frozen=True)
class TripGenerationConfig(ConfigItem):
    """Trip Generation parameters."""

    classes: List[TripGenerationClassConfig]

TripGenerationFormulaConfig

Bases: ConfigItem

TripProductionConfig.

Trip productions or attractions for a zone are the constant plus the sum of the rates * values in land use file for that zone.

Source code in tm2py\config.py
527
528
529
530
531
532
533
534
535
536
537
@dataclass(frozen=True)
class TripGenerationFormulaConfig(ConfigItem):
    """TripProductionConfig.

    Trip productions or attractions for a zone are the constant plus the sum of the rates * values
    in land use file for that zone.
    """

    land_use_rates: List[CoefficientConfig]
    constant: Optional[float] = Field(default=0.0)
    multiplier: Optional[float] = Field(default=1.0)

TruckClassConfig

Bases: ConfigItem

Truck class parameters.

Source code in tm2py\config.py
573
574
575
576
577
578
@dataclass(frozen=True)
class TruckClassConfig(ConfigItem):
    """Truck class parameters."""

    name: str
    description: Optional[str] = ""

TruckConfig

Bases: ConfigItem

Truck model parameters.

Source code in tm2py\config.py
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
@dataclass(frozen=True)
class TruckConfig(ConfigItem):
    """Truck model parameters."""

    classes: List[TruckClassConfig]
    impedances: List[ImpedanceConfig]
    trip_gen: TripGenerationConfig
    trip_dist: TripDistributionConfig
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig
    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    highway_demand_file: str

    """
    @validator("classes")
    def class_consistency(cls, v, values):
        # TODO Can't get to work righ tnow
        _class_names = [c.name for c in v]
        _gen_classes = [c.name for c in values["trip_gen"]]
        _dist_classes = [c.name for c in values["trip_dist"]]
        _time_classes = [c.name for c in values["time_split"]]
        _toll_classes = [c.name for c in values["toll_choice"]]

        assert (
            _class_names == _gen_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.trip_gen ({_gen_classes})."
        assert (
            _class_names == _dist_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.trip_dist ({_dist_classes})."
        assert (
            _class_names == _time_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.time_split ({_time_classes})."
        assert (
            _class_names == _toll_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.toll_choice ({_toll_classes})."

        return v
    """

highway_demand_file instance-attribute

@validator(“classes”) def class_consistency(cls, v, values): # TODO Can’t get to work righ tnow _class_names = [c.name for c in v] _gen_classes = [c.name for c in values[“trip_gen”]] _dist_classes = [c.name for c in values[“trip_dist”]] _time_classes = [c.name for c in values[“time_split”]] _toll_classes = [c.name for c in values[“toll_choice”]]

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
assert (
    _class_names == _gen_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.trip_gen ({_gen_classes})."
assert (
    _class_names == _dist_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.trip_dist ({_dist_classes})."
assert (
    _class_names == _time_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.time_split ({_time_classes})."
assert (
    _class_names == _toll_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.toll_choice ({_toll_classes})."

return v

WarmStartConfig

Bases: ConfigItem

Warm start parameters.

Note that the components will be executed in the order listed.

Properties
Source code in tm2py\config.py
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
@dataclass(frozen=True)
class WarmStartConfig(ConfigItem):
    """Warm start parameters.

    Note that the components will be executed in the order listed.

    Properties:
        warmstart: Boolean indicating whether warmstart demand matrices are used.
            If set to True, the global iteration 0 will either assign warmstart demand for highway and transit, or skip the assignment and just use warmstart skims.
            If set to False, the global iteration 0 will assign zero demand for highway and transit.
        warmstart_skim: Boolean indicating whether to use warmstart skims. If set to True, then skips warmstart assignment in iteraton 0.
        warmstart_demand: Boolean indicating whether to use warmstart demand. If set to True, then runs warmstart assignment in iteration 0.
        warmstart_check: if on, check that demand matrix files exist.
        household_highway_demand_file: file name template of warmstart household highway demand matrices.
        household_transit_demand_file: file name template of warmstart household transit demand matrices.
        air_passenger_highway_demand_file: file name template of warmstart airport highway demand matrices.
        internal_external_highway_demand_file: file name template of warmstart internal-external highway demand matrices.
    """

    warmstart: bool = Field(default=True)
    use_warmstart_skim: bool = Field(default=True)
    use_warmstart_demand: bool = Field(default=False)
    warmstart_check: Optional[bool] = Field(default=False)
    household_highway_demand_file: Optional[str] = Field(default="")
    household_transit_demand_file: Optional[str] = Field(default="")
    air_passenger_highway_demand_file: Optional[str] = Field(default="")
    internal_external_highway_demand_file: Optional[str] = Field(default="")
    truck_highway_demand_file: Optional[str] = Field(default="")

    @validator("warmstart", allow_reuse=True)
    def check_warmstart_method(cls, value, values):
        """When warmstart, either skim or demand should be true."""
        if values.get("warmstart"):
            assert (
                values.use_warmstart_skim != values.use_warmstart_demand
            ), f"'warmstart is on, only one of' {values.use_warmstart_skim} and {values.use_warmstart_demand} can be true"
        return value

check_warmstart_method(value, values)

When warmstart, either skim or demand should be true.

Source code in tm2py\config.py
112
113
114
115
116
117
118
119
@validator("warmstart", allow_reuse=True)
def check_warmstart_method(cls, value, values):
    """When warmstart, either skim or demand should be true."""
    if values.get("warmstart"):
        assert (
            values.use_warmstart_skim != values.use_warmstart_demand
        ), f"'warmstart is on, only one of' {values.use_warmstart_skim} and {values.use_warmstart_demand} can be true"
    return value

Configurations for each component are listed with those components

Components

Base Component

Root component ABC.

Component

Bases: ABC

Template for Component class with several built-in methods.

A component is a piece of the model that can be run independently (of other components) given the required input data and configuration. It communicates information to other components via disk I/O (including the emmebank).

Note: if the component needs data that is not written to disk, it would be considered a subcomponent.

Abstract Methods – Each component class must have the following methods: __init___: constructor, which associates the RunController with the instantiated object run: run the component without any arguments validate_inputs: validate the inputs to the component report_progress: report progress to the user verify: verify the component’s output write_top_sheet: write outputs to topsheet test_component: test the component

Template Class methods - component classes inherit
Template Class Properties - component classes inherit

:: class MyComponent(Component):

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
def __init__(self, controller):
    super().__init__(controller)
    self._parameter = None

def run(self):
    self._step1()
    self._step2()

def _step1(self):
    pass

def _step2(self):
    pass
Source code in tm2py\components\component.py
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
class Component(ABC):
    """Template for Component class with several built-in methods.

    A component is a piece of the model that can be run independently (of other components) given
    the required input data and configuration.  It communicates information to other components via
    disk I/O (including the emmebank).

    Note: if the component needs data that is not written to disk, it would be considered a
    subcomponent.

    Abstract Methods – Each component class must have the following methods:
        __init___: constructor, which associates the RunController with the instantiated object
        run: run the component without any arguments
        validate_inputs: validate the inputs to the component
        report_progress: report progress to the user
        verify: verify the component's output
        write_top_sheet: write outputs to topsheet
        test_component: test the component

    Template Class methods - component classes inherit:
        get_abs_path: convenience method to get absolute path of the run directory


    Template Class Properties - component classes inherit:
        controller: RunController object
        config: Config object
        time_period_names: convenience property
        top_sheet: topsheet object
        logger: logger object
        trace: trace object

    Example:
    ::
        class MyComponent(Component):

        def __init__(self, controller):
            super().__init__(controller)
            self._parameter = None

        def run(self):
            self._step1()
            self._step2()

        def _step1(self):
            pass

        def _step2(self):
            pass
    """

    def __init__(self, controller: RunController):
        """Model component template/abstract base class.

        Args:
            controller (RunController): Reference to the run controller object.
        """
        self._controller = controller
        self._trace = None

    @property
    def controller(self):
        """Parent controller."""
        return self._controller

    @property
    def emme_manager(self):
        return self.controller.emme_manager

    def get_abs_path(self, path: Union[Path, str]) -> str:
        """Convenince method to get absolute path from run directory."""
        if not os.path.isabs(path):
            return self.controller.get_abs_path(path).__str__()
        else:
            return path

    @property
    def time_period_names(self) -> List[str]:
        """Return input time_period name or names and return list of time_period names.

        Implemented here for easy access for all components.

        Returns: list of uppercased string names of time periods
        """
        return self.controller.time_period_names

    @property
    def time_period_durations(self) -> dict:
        """Return mapping of time periods to durations in hours."""
        return self.controller.time_period_durations

    @property
    def congested_transit_assn_max_iteration(self) -> dict:
        """Return mapping of time periods to max iteration in congested transit assignment."""
        return self.controller.congested_transit_assn_max_iteration

    @property
    def top_sheet(self):
        """Reference to top sheet."""
        return self.controller.top_sheet

    @property
    def logger(self):
        """Reference to logger."""
        return self.controller.logger

    @property
    def trace(self):
        """Reference to trace."""
        return self._trace

    @abstractmethod
    def validate_inputs(self):
        """Validate inputs are correct at model initiation, raise on error."""

    @abstractmethod
    def run(self):
        """Run model component."""

    # @abstractmethod
    def report_progress(self):
        """Write progress to log file."""

    # @abstractmethod
    def verify(self):
        """Verify component outputs / results."""

    # @abstractmethod
    def write_top_sheet(self):
        """Write key outputs to the model top sheet."""

congested_transit_assn_max_iteration property

Return mapping of time periods to max iteration in congested transit assignment.

controller property

Parent controller.

logger property

Reference to logger.

time_period_durations property

Return mapping of time periods to durations in hours.

time_period_names property

Return input time_period name or names and return list of time_period names.

Implemented here for easy access for all components.

Returns: list of uppercased string names of time periods

top_sheet property

Reference to top sheet.

trace property

Reference to trace.

__init__(controller)

Model component template/abstract base class.

Parameters:

Name Type Description Default
controller RunController

Reference to the run controller object.

required
Source code in tm2py\components\component.py
79
80
81
82
83
84
85
86
def __init__(self, controller: RunController):
    """Model component template/abstract base class.

    Args:
        controller (RunController): Reference to the run controller object.
    """
    self._controller = controller
    self._trace = None

get_abs_path(path)

Convenince method to get absolute path from run directory.

Source code in tm2py\components\component.py
 97
 98
 99
100
101
102
def get_abs_path(self, path: Union[Path, str]) -> str:
    """Convenince method to get absolute path from run directory."""
    if not os.path.isabs(path):
        return self.controller.get_abs_path(path).__str__()
    else:
        return path

report_progress()

Write progress to log file.

Source code in tm2py\components\component.py
148
149
def report_progress(self):
    """Write progress to log file."""

run() abstractmethod

Run model component.

Source code in tm2py\components\component.py
143
144
145
@abstractmethod
def run(self):
    """Run model component."""

validate_inputs() abstractmethod

Validate inputs are correct at model initiation, raise on error.

Source code in tm2py\components\component.py
139
140
141
@abstractmethod
def validate_inputs(self):
    """Validate inputs are correct at model initiation, raise on error."""

verify()

Verify component outputs / results.

Source code in tm2py\components\component.py
152
153
def verify(self):
    """Verify component outputs / results."""

write_top_sheet()

Write key outputs to the model top sheet.

Source code in tm2py\components\component.py
156
157
def write_top_sheet(self):
    """Write key outputs to the model top sheet."""

FileFormatError

Bases: Exception

Exception raised when a file is not in the expected format.

Source code in tm2py\components\component.py
16
17
18
19
20
21
22
23
24
25
26
class FileFormatError(Exception):
    """Exception raised when a file is not in the expected format."""

    def __init__(self, f, *args):
        """Exception for invalid file formats."""
        super().__init__(args)
        self.f = f

    def __str__(self):
        """String representation for FileFormatError."""
        return f"The {self.f} is not a valid format."

__init__(f, *args)

Exception for invalid file formats.

Source code in tm2py\components\component.py
19
20
21
22
def __init__(self, f, *args):
    """Exception for invalid file formats."""
    super().__init__(args)
    self.f = f

__str__()

String representation for FileFormatError.

Source code in tm2py\components\component.py
24
25
26
def __str__(self):
    """String representation for FileFormatError."""
    return f"The {self.f} is not a valid format."

Subcomponent

Bases: Component

Template for sub-component class.

A sub-component is a more loosly defined component that allows for input into the run() method. It is used to break-up larger processes into smaller chunks which can be: (1) re-used across components (i.e toll choice) (2) updated/subbed in to a parent component(s) run method based on the expected API (3) easier to test, understand and debug. (4) more consistent with the algorithms we understand from transportation planning 101

Source code in tm2py\components\component.py
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
class Subcomponent(Component):
    """Template for sub-component class.

    A sub-component is a more loosly defined component that allows for input into the run()
    method.  It is used to break-up larger processes into smaller chunks which can be:
    (1) re-used across components (i.e toll choice)
    (2) updated/subbed in to a parent component(s) run method based on the expected API
    (3) easier to test, understand and debug.
    (4) more consistent with the algorithms we understand from transportation planning 101
    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for model sub-component abstract base class.

        Only calls the super class constructor.

        Args:
            controller (RunController): Reference to the run controller object.
            component (Component): Reference to the parent component object.
        """
        super().__init__(controller)
        self.component = component

    @abstractmethod
    def run(self, *args, **kwargs):
        """Run sub-component, allowing for multiple inputs.

        Allowing for inputs to the run() method is what differentiates a sub-component from
        a component.
        """

__init__(controller, component)

Constructor for model sub-component abstract base class.

Only calls the super class constructor.

Parameters:

Name Type Description Default
controller RunController

Reference to the run controller object.

required
component Component

Reference to the parent component object.

required
Source code in tm2py\components\component.py
171
172
173
174
175
176
177
178
179
180
181
def __init__(self, controller: RunController, component: Component):
    """Constructor for model sub-component abstract base class.

    Only calls the super class constructor.

    Args:
        controller (RunController): Reference to the run controller object.
        component (Component): Reference to the parent component object.
    """
    super().__init__(controller)
    self.component = component

run(*args, **kwargs) abstractmethod

Run sub-component, allowing for multiple inputs.

Allowing for inputs to the run() method is what differentiates a sub-component from a component.

Source code in tm2py\components\component.py
183
184
185
186
187
188
189
@abstractmethod
def run(self, *args, **kwargs):
    """Run sub-component, allowing for multiple inputs.

    Allowing for inputs to the run() method is what differentiates a sub-component from
    a component.
    """

Demand Components

Demand loading from OMX to Emme database.

EmmeDemand

Abstract base class to import and average demand.

Source code in tm2py\components\demand\prepare_demand.py
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
class EmmeDemand:
    """Abstract base class to import and average demand."""

    def __init__(self, controller: RunController):
        """Constructor for PrepareDemand class.

        Args:
            controller (RunController): Run controller for the current run.
        """
        self.controller = controller
        self._emmebank = None
        self._scenario = None
        self._source_ref_key = None

    @property
    def logger(self):
        """Reference to logger."""
        return self.controller.logger

    def _read(
        self, path: str, name: str, num_zones, factor: float = None
    ) -> NumpyArray:
        """Read matrix array from OMX file at path with name, and multiple by factor (if specified).

        Args:
            path: full path to OMX file
            name: name of the OMX matrix / key
            factor: optional factor to apply to matrix
        """
        with OMXManager(path, "r") as omx_file:
            demand = omx_file.read(name)
            omx_file.close()
        if factor is not None:
            demand = factor * demand
        demand = self._redim_demand(demand, num_zones)
        # self.logger.log(f"{name} sum: {demand.sum()}", level=3)
        return demand

    @staticmethod
    def _redim_demand(demand, num_zones):
        _shape = demand.shape
        if _shape < (num_zones, num_zones):
            demand = np.pad(
                demand, ((0, num_zones - _shape[0]), (0, num_zones - _shape[1]))
            )
        elif _shape > (num_zones, num_zones):
            ValueError(
                f"Provided demand matrix is larger ({_shape}) than the \
                specified number of zones: {num_zones}"
            )

        return demand

    def _save_demand(
        self,
        name: str,
        demand: NumpyArray,
        description: str = None,
        apply_msa: bool = False,
    ):
        """Save demand array to Emme matrix with name, optional description.

        Matrix will be created if it does not exist and the model is on iteration 0.

        Args:
            name: name of the matrix in the Emmebank
            demand: NumpyArray, demand array to save
            description: str, optional description to use in the Emmebank
            apply_msa: bool, default False: use MSA on matrix with current array
                values if model is on iteration >= 1
        """
        matrix = self._emmebank.emmebank.matrix(f'mf"{name}"')
        msa_iteration = self.controller.iteration
        if not apply_msa or msa_iteration <= 1:
            if not matrix:
                ident = self._emmebank.emmebank.available_matrix_identifier("FULL")
                matrix = self._emmebank.emmebank.create_matrix(ident)
                matrix.name = name
                if description is not None:
                    matrix.description = description
        else:
            if not matrix:
                raise Exception(f"error averaging demand: matrix {name} does not exist")
            prev_demand = matrix.get_numpy_data(self._scenario.id)
            demand = prev_demand + (1.0 / msa_iteration) * (demand - prev_demand)
        self.logger.log(f"{name} sum: {demand.sum()}", level="DEBUG")
        matrix.set_numpy_data(demand, self._scenario.id)

logger property

Reference to logger.

__init__(controller)

Constructor for PrepareDemand class.

Parameters:

Name Type Description Default
controller RunController

Run controller for the current run.

required
Source code in tm2py\components\demand\prepare_demand.py
30
31
32
33
34
35
36
37
38
39
def __init__(self, controller: RunController):
    """Constructor for PrepareDemand class.

    Args:
        controller (RunController): Run controller for the current run.
    """
    self.controller = controller
    self._emmebank = None
    self._scenario = None
    self._source_ref_key = None

PrepareHighwayDemand

Bases: EmmeDemand

Import and average highway demand.

Demand is imported from OMX files based on reference file paths and OMX matrix names in highway assignment config (highway.classes). The demand is average using MSA with the current demand matrices (in the Emmebank) if the controller.iteration > 1.

Parameters:

Name Type Description Default
controller RunController

parent RunController object

required
Source code in tm2py\components\demand\prepare_demand.py
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
class PrepareHighwayDemand(EmmeDemand):
    """Import and average highway demand.

    Demand is imported from OMX files based on reference file paths and OMX
    matrix names in highway assignment config (highway.classes).
    The demand is average using MSA with the current demand matrices
    (in the Emmebank) if the controller.iteration > 1.

    Args:
        controller: parent RunController object
    """

    def __init__(self, controller: RunController):
        """Constructor for PrepareHighwayDemand.

        Args:
            controller (RunController): Reference to run controller object.
        """
        super().__init__(controller)
        self.controller = controller
        self.config = self.controller.config.highway
        self._highway_emmebank = None

    def validate_inputs(self):
        # TODO
        pass

    @property
    def highway_emmebank(self):
        if self._highway_emmebank == None:
            self._highway_emmebank = self.controller.emme_manager.highway_emmebank
            self._emmebank = self._highway_emmebank
        return self._highway_emmebank

    # @LogStartEnd("prepare highway demand")
    def run(self):
        """Open combined demand OMX files from demand models and prepare for assignment."""

        self.highway_emmebank.zero_matrix
        for time in self.controller.time_period_names:
            for klass in self.config.classes:
                self._prepare_demand(klass.name, klass.description, klass.demand, time)

    def _prepare_demand(
        self,
        name: str,
        description: str,
        demand_config: List[Dict[str, Union[str, float]]],
        time_period: str,
    ):
        """Load demand from OMX files and save to Emme matrix for highway assignment.

        Average with previous demand (MSA) if the current iteration > 1

        Args:
            name (str): the name of the highway assignment class
            description (str): the description for the highway assignment class
            demand_config (dict): the list of file cross-reference(s) for the demand to be loaded
                {"source": <name of demand model component>,
                 "name": <OMX key name>,
                 "factor": <factor to apply to demand in this file>}
            time_period (str): the time time_period ID (name)
        """
        self._scenario = self.highway_emmebank.scenario(time_period)
        num_zones = len(self._scenario.zone_numbers)
        demand = self._read_demand(demand_config[0], time_period, num_zones)
        for file_config in demand_config[1:]:
            demand = demand + self._read_demand(file_config, time_period, num_zones)
        demand_name = f"{time_period}_{name}"
        description = f"{time_period} {description} demand"
        self._save_demand(
            demand_name, demand, description, apply_msa=self.config.apply_msa_demand
        )

    def _read_demand(self, file_config, time_period, num_zones):
        # Load demand from cross-referenced source file,
        # the named demand model component under the key highway_demand_file
        source = file_config["source"]
        name = file_config["name"].format(period=time_period.upper())
        path = self.controller.get_abs_path(
            self.controller.config[source].highway_demand_file
        ).__str__()
        return self._read(
            path.format(period=time_period, iter=self.controller.iteration),
            name,
            num_zones,
        )

    @LogStartEnd("Prepare household demand matrices.")
    def prepare_household_demand(self):
        """Prepares highway and transit household demand matrices from trip lists produced by CT-RAMP."""
        iteration = self.controller.iteration

        # Create folders if they don't exist
        pathlib.Path(
            self.controller.get_abs_path(
                self.controller.config.household.highway_demand_file
            )
        ).parents[0].mkdir(parents=True, exist_ok=True)
        pathlib.Path(
            self.controller.get_abs_path(
                self.controller.config.household.transit_demand_file
            )
        ).parents[0].mkdir(parents=True, exist_ok=True)
        #    pathlib.Path(self.controller.get_abs_path(self.controller.config.household.active_demand_file)).parents[0].mkdir(parents=True, exist_ok=True)

        indiv_trip_file = (
            self.controller.config.household.ctramp_indiv_trip_file.format(
                iteration=iteration
            )
        )
        joint_trip_file = (
            self.controller.config.household.ctramp_joint_trip_file.format(
                iteration=iteration
            )
        )
        it_full, jt_full = pd.read_csv(indiv_trip_file), pd.read_csv(joint_trip_file)

        # Add time period, expanded count
        time_period_start = dict(
            zip(
                [c.name.upper() for c in self.controller.config.time_periods],
                [c.start_period for c in self.controller.config.time_periods],
            )
        )
        # the last time period needs to be filled in because the first period may or may not start at midnight
        time_periods_sorted = sorted(
            time_period_start, key=lambda x: time_period_start[x]
        )  # in upper case
        first_period = time_periods_sorted[0]
        periods_except_last = time_periods_sorted[:-1]
        breakpoints = [time_period_start[tp] for tp in time_periods_sorted]
        it_full["time_period"] = (
            pd.cut(
                it_full.stop_period,
                breakpoints,
                right=False,
                labels=periods_except_last,
            )
            .cat.add_categories(time_periods_sorted[-1])
            .fillna(time_periods_sorted[-1])
            .astype(str)
        )
        jt_full["time_period"] = (
            pd.cut(
                jt_full.stop_period,
                breakpoints,
                right=False,
                labels=periods_except_last,
            )
            .cat.add_categories(time_periods_sorted[-1])
            .fillna(time_periods_sorted[-1])
            .astype(str)
        )
        it_full["eq_cnt"] = 1 / it_full.sampleRate
        it_full["eq_cnt"] = np.where(
            it_full["trip_mode"].isin([3, 4, 5]),
            0.5 * it_full["eq_cnt"],
            np.where(
                it_full["trip_mode"].isin([6, 7, 8]),
                0.35 * it_full["eq_cnt"],
                it_full["eq_cnt"],
            ),
        )
        jt_full["eq_cnt"] = jt_full.num_participants / jt_full.sampleRate
        zp_cav = self.controller.config.household.OwnedAV_ZPV_factor
        zp_tnc = self.controller.config.household.TNC_ZPV_factor

        maz_taz_df = pd.read_csv(
            self.controller.get_abs_path(self.controller.config.scenario.landuse_file),
            usecols=["MAZ", "TAZ"],
        )
        it_full = it_full.merge(
            maz_taz_df, left_on="orig_mgra", right_on="MAZ", how="left"
        ).rename(columns={"TAZ": "orig_taz"})
        it_full = it_full.merge(
            maz_taz_df, left_on="dest_mgra", right_on="MAZ", how="left"
        ).rename(columns={"TAZ": "dest_taz"})
        jt_full = jt_full.merge(
            maz_taz_df, left_on="orig_mgra", right_on="MAZ", how="left"
        ).rename(columns={"TAZ": "orig_taz"})
        jt_full = jt_full.merge(
            maz_taz_df, left_on="dest_mgra", right_on="MAZ", how="left"
        ).rename(columns={"TAZ": "dest_taz"})
        it_full["trip_mode"] = np.where(
            it_full["trip_mode"] == 14, 13, it_full["trip_mode"]
        )
        jt_full["trip_mode"] = np.where(
            jt_full["trip_mode"] == 14, 13, jt_full["trip_mode"]
        )

        num_zones = self.num_internal_zones
        OD_full_index = pd.MultiIndex.from_product(
            [range(1, num_zones + 1), range(1, num_zones + 1)]
        )

        def combine_trip_lists(it, jt, trip_mode):
            # combines individual trip list and joint trip list
            combined_trips = pd.concat(
                [it[(it["trip_mode"] == trip_mode)], jt[(jt["trip_mode"] == trip_mode)]]
            )
            combined_sum = combined_trips.groupby(["orig_taz", "dest_taz"])[
                "eq_cnt"
            ].sum()
            return combined_sum.reindex(OD_full_index, fill_value=0).unstack().values

        def create_zero_passenger_trips(
            trips, deadheading_factor, trip_modes=[1, 2, 3]
        ):
            zpv_trips = trips.loc[
                (trips["avAvailable"] == 1) & (trips["trip_mode"].isin(trip_modes))
            ]
            zpv_trips["eq_cnt"] = zpv_trips["eq_cnt"] * deadheading_factor
            zpv_trips = zpv_trips.rename(
                columns={"dest_taz": "orig_taz", "orig_taz": "dest_taz"}
            )
            return zpv_trips

        # create zero passenger trips for auto modes
        if it_full["avAvailable"].sum() > 0:
            it_zpav_trp = create_zero_passenger_trips(
                it_full, zp_cav, trip_modes=[1, 2, 3]
            )
            it_zptnc_trp = create_zero_passenger_trips(it_full, zp_tnc, trip_modes=[9])
            # Combining zero passenger trips to trip files
            it_full = pd.concat(
                [it_full, it_zpav_trp, it_zptnc_trp], ignore_index=True
            ).reset_index(drop=True)

        if jt_full["avAvailable"].sum() > 0:
            jt_zpav_trp = create_zero_passenger_trips(
                jt_full, zp_cav, trip_modes=[1, 2, 3]
            )
            jt_zptnc_trp = create_zero_passenger_trips(jt_full, zp_tnc, trip_modes=[9])
            # Combining zero passenger trips to trip files
            jt_full = pd.concat(
                [jt_full, jt_zpav_trp, jt_zptnc_trp], ignore_index=True
            ).reset_index(drop=True)

        # read properties from config

        mode_name_dict = self.controller.config.household.ctramp_mode_names
        income_segment_config = self.controller.config.household.income_segment

        if income_segment_config["enabled"]:
            # This only affects highway trip tables.

            hh_file = self.controller.config.household.ctramp_hh_file.format(
                iteration=iteration
            )
            hh = pd.read_csv(hh_file, usecols=["hh_id", "income"])
            it_full = it_full.merge(hh, on="hh_id", how="left")
            jt_full = jt_full.merge(hh, on="hh_id", how="left")

            suffixes = income_segment_config["segment_suffixes"]

            it_full["income_seg"] = pd.cut(
                it_full["income"],
                right=False,
                bins=income_segment_config["cutoffs"] + [float("inf")],
                labels=suffixes,
            ).astype(str)

            jt_full["income_seg"] = pd.cut(
                jt_full["income"],
                right=False,
                bins=income_segment_config["cutoffs"] + [float("inf")],
                labels=suffixes,
            ).astype(str)
        else:
            it_full["income_seg"] = ""
            jt_full["income_seg"] = ""
            suffixes = [""]

        # groupby objects for combinations of time period - income segmentation, used for highway modes only
        it_grp = it_full.groupby(["time_period", "income_seg"])
        jt_grp = jt_full.groupby(["time_period", "income_seg"])

        for time_period in time_periods_sorted:
            self.logger.debug(
                f"Producing household demand matrices for period {time_period}"
            )

            highway_out_file = OMXManager(
                self.controller.get_abs_path(
                    self.controller.config.household.highway_demand_file
                )
                .__str__()
                .format(period=time_period, iter=self.controller.iteration),
                "w",
            )
            transit_out_file = OMXManager(
                self.controller.get_abs_path(
                    self.controller.config.household.transit_demand_file
                )
                .__str__()
                .format(period=time_period, iter=self.controller.iteration),
                "w",
            )
            # active_out_file = OMXManager(
            #    self.controller.get_abs_path(self.controller.config.household.active_demand_file).__str__().format(period=time_period), 'w')

            # hsr_trips_file = _omx.open_file(
            #    self.controller.get_abs_path(self.controller.config.household.hsr_demand_file).format(year=self.controller.config.scenario.year, period=time_period))

            # interregional_trips_file = _omx.open_file(
            #   self.controller.get_abs_path(self.controller.config.household.interregional_demand_file).format(year=self.controller.config.scenario.year, period=time_period))

            highway_out_file.open()
            transit_out_file.open()
            # active_out_file.open()

            # Transit and active modes: one matrix per time period per mode
            it = it_full[it_full.time_period == time_period]
            jt = jt_full[jt_full.time_period == time_period]

            for trip_mode in mode_name_dict:
                #                if trip_mode in [9,10]:
                #                    matrix_name =  mode_name_dict[trip_mode]
                #                    self.logger.debug(f"Writing out mode {mode_name_dict[trip_mode]}")
                #                    active_out_file.write_array(numpy_array=combine_trip_lists(it,jt, trip_mode), name = matrix_name)

                if trip_mode == 11:
                    matrix_name = "WLK_TRN_WLK"
                    self.logger.debug(f"Writing out mode WLK_TRN_WLK")
                    # other_trn_trips = np.array(hsr_trips_file[matrix_name])+np.array(interregional_trips_file[matrix_name])
                    transit_out_file.write_array(
                        numpy_array=(combine_trip_lists(it, jt, trip_mode)),
                        name=matrix_name,
                    )

                elif trip_mode in [12, 13]:
                    it_outbound, it_inbound = it[it.inbound == 0], it[it.inbound == 1]
                    jt_outbound, jt_inbound = jt[jt.inbound == 0], jt[jt.inbound == 1]

                    matrix_name = f"{mode_name_dict[trip_mode].upper()}_TRN_WLK"
                    # other_trn_trips = np.array(hsr_trips_file[matrix_name])+np.array(interregional_trips_file[matrix_name])
                    self.logger.debug(
                        f"Writing out mode {mode_name_dict[trip_mode].upper() + '_TRN_WLK'}"
                    )
                    transit_out_file.write_array(
                        numpy_array=(
                            combine_trip_lists(it_outbound, jt_outbound, trip_mode)
                        ),
                        name=matrix_name,
                    )

                    matrix_name = f"WLK_TRN_{mode_name_dict[trip_mode].upper()}"
                    # other_trn_trips = np.array(hsr_trips_file[matrix_name])+np.array(interregional_trips_file[matrix_name])
                    self.logger.debug(
                        f"Writing out mode {'WLK_TRN_' + mode_name_dict[trip_mode].upper()}"
                    )
                    transit_out_file.write_array(
                        numpy_array=(
                            combine_trip_lists(it_inbound, jt_inbound, trip_mode)
                        ),
                        name=matrix_name,
                    )

            # Highway modes: one matrix per suffix (income class) per time period per mode
            for suffix in suffixes:
                highway_cache = {}

                if (time_period, suffix) in it_grp.groups.keys():
                    it = it_grp.get_group((time_period, suffix))
                else:
                    it = pd.DataFrame(None, columns=it_full.columns)

                if (time_period, suffix) in jt_grp.groups.keys():
                    jt = jt_grp.get_group((time_period, suffix))
                else:
                    jt = pd.DataFrame(None, columns=jt_full.columns)

                for trip_mode in sorted(mode_name_dict):
                    # Python preserves keys in the order they are inserted but
                    # mode_name_dict originates from TOML, which does not guarantee
                    # that the ordering of keys is preserved.  See
                    # https://github.com/toml-lang/toml/issues/162

                    if trip_mode in [
                        1,
                        2,
                        3,
                        4,
                        5,
                        6,
                        7,
                        8,
                        9,
                        10,
                        15,
                        16,
                        17,
                    ]:  # currently hard-coded based on Travel Mode trip mode codes
                        highway_cache[mode_name_dict[trip_mode]] = combine_trip_lists(
                            it, jt, trip_mode
                        )
                        out_mode = f"{mode_name_dict[trip_mode].upper()}"
                        matrix_name = (
                            f"{out_mode}_{suffix}_{time_period.upper()}"
                            if suffix
                            else f"{out_mode}_{time_period.upper()}"
                        )
                        highway_out_file.write_array(
                            numpy_array=highway_cache[mode_name_dict[trip_mode]],
                            name=matrix_name,
                        )

                    elif trip_mode in [15, 16]:
                        # identify the correct mode split factors for da, sr2, sr3
                        self.logger.debug(
                            f"Splitting ridehail trips into shared ride trips"
                        )
                        ridehail_split_factors = defaultdict(float)
                        splits = self.controller.config.household.rideshare_mode_split
                        for key in splits:
                            out_mode_split = self.controller.config.household.__dict__[
                                f"{key}_split"
                            ]
                            for out_mode in out_mode_split:
                                ridehail_split_factors[out_mode] += (
                                    out_mode_split[out_mode] * splits[key]
                                )

                        ridehail_trips = combine_trip_lists(it, jt, trip_mode)
                        for out_mode in ridehail_split_factors:
                            matrix_name = f"{out_mode}_{suffix}" if suffix else out_mode
                            self.logger.debug(f"Writing out mode {out_mode}")
                            highway_cache[out_mode] += (
                                (ridehail_trips * ridehail_split_factors[out_mode])
                                .astype(float)
                                .round(2)
                            )
                            highway_out_file.write_array(
                                numpy_array=highway_cache[out_mode], name=matrix_name
                            )

            highway_out_file.close()
            transit_out_file.close()
            # active_out_file.close()

    @property
    def num_internal_zones(self):
        df = pd.read_csv(
            self.controller.get_abs_path(self.controller.config.scenario.landuse_file),
            usecols=[self.controller.config.scenario.landuse_index_column],
        )
        return len(df["TAZ"].unique())

    @property
    def num_total_zones(self):
        self._emmebank_path = self.controller.get_abs_path(
            self.controller.config.emme.highway_database_path
        )
        self._emmebank = self.controller.emme_manager.emmebank(self._emmebank_path)
        time_period = self.controller.config.time_periods[0].name
        scenario = self.get_emme_scenario(
            self._emmebank.path, time_period
        )  # any scenario id works
        return len(scenario.zone_numbers)

__init__(controller)

Constructor for PrepareHighwayDemand.

Parameters:

Name Type Description Default
controller RunController

Reference to run controller object.

required
Source code in tm2py\components\demand\prepare_demand.py
149
150
151
152
153
154
155
156
157
158
def __init__(self, controller: RunController):
    """Constructor for PrepareHighwayDemand.

    Args:
        controller (RunController): Reference to run controller object.
    """
    super().__init__(controller)
    self.controller = controller
    self.config = self.controller.config.highway
    self._highway_emmebank = None

prepare_household_demand()

Prepares highway and transit household demand matrices from trip lists produced by CT-RAMP.

Source code in tm2py\components\demand\prepare_demand.py
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
@LogStartEnd("Prepare household demand matrices.")
def prepare_household_demand(self):
    """Prepares highway and transit household demand matrices from trip lists produced by CT-RAMP."""
    iteration = self.controller.iteration

    # Create folders if they don't exist
    pathlib.Path(
        self.controller.get_abs_path(
            self.controller.config.household.highway_demand_file
        )
    ).parents[0].mkdir(parents=True, exist_ok=True)
    pathlib.Path(
        self.controller.get_abs_path(
            self.controller.config.household.transit_demand_file
        )
    ).parents[0].mkdir(parents=True, exist_ok=True)
    #    pathlib.Path(self.controller.get_abs_path(self.controller.config.household.active_demand_file)).parents[0].mkdir(parents=True, exist_ok=True)

    indiv_trip_file = (
        self.controller.config.household.ctramp_indiv_trip_file.format(
            iteration=iteration
        )
    )
    joint_trip_file = (
        self.controller.config.household.ctramp_joint_trip_file.format(
            iteration=iteration
        )
    )
    it_full, jt_full = pd.read_csv(indiv_trip_file), pd.read_csv(joint_trip_file)

    # Add time period, expanded count
    time_period_start = dict(
        zip(
            [c.name.upper() for c in self.controller.config.time_periods],
            [c.start_period for c in self.controller.config.time_periods],
        )
    )
    # the last time period needs to be filled in because the first period may or may not start at midnight
    time_periods_sorted = sorted(
        time_period_start, key=lambda x: time_period_start[x]
    )  # in upper case
    first_period = time_periods_sorted[0]
    periods_except_last = time_periods_sorted[:-1]
    breakpoints = [time_period_start[tp] for tp in time_periods_sorted]
    it_full["time_period"] = (
        pd.cut(
            it_full.stop_period,
            breakpoints,
            right=False,
            labels=periods_except_last,
        )
        .cat.add_categories(time_periods_sorted[-1])
        .fillna(time_periods_sorted[-1])
        .astype(str)
    )
    jt_full["time_period"] = (
        pd.cut(
            jt_full.stop_period,
            breakpoints,
            right=False,
            labels=periods_except_last,
        )
        .cat.add_categories(time_periods_sorted[-1])
        .fillna(time_periods_sorted[-1])
        .astype(str)
    )
    it_full["eq_cnt"] = 1 / it_full.sampleRate
    it_full["eq_cnt"] = np.where(
        it_full["trip_mode"].isin([3, 4, 5]),
        0.5 * it_full["eq_cnt"],
        np.where(
            it_full["trip_mode"].isin([6, 7, 8]),
            0.35 * it_full["eq_cnt"],
            it_full["eq_cnt"],
        ),
    )
    jt_full["eq_cnt"] = jt_full.num_participants / jt_full.sampleRate
    zp_cav = self.controller.config.household.OwnedAV_ZPV_factor
    zp_tnc = self.controller.config.household.TNC_ZPV_factor

    maz_taz_df = pd.read_csv(
        self.controller.get_abs_path(self.controller.config.scenario.landuse_file),
        usecols=["MAZ", "TAZ"],
    )
    it_full = it_full.merge(
        maz_taz_df, left_on="orig_mgra", right_on="MAZ", how="left"
    ).rename(columns={"TAZ": "orig_taz"})
    it_full = it_full.merge(
        maz_taz_df, left_on="dest_mgra", right_on="MAZ", how="left"
    ).rename(columns={"TAZ": "dest_taz"})
    jt_full = jt_full.merge(
        maz_taz_df, left_on="orig_mgra", right_on="MAZ", how="left"
    ).rename(columns={"TAZ": "orig_taz"})
    jt_full = jt_full.merge(
        maz_taz_df, left_on="dest_mgra", right_on="MAZ", how="left"
    ).rename(columns={"TAZ": "dest_taz"})
    it_full["trip_mode"] = np.where(
        it_full["trip_mode"] == 14, 13, it_full["trip_mode"]
    )
    jt_full["trip_mode"] = np.where(
        jt_full["trip_mode"] == 14, 13, jt_full["trip_mode"]
    )

    num_zones = self.num_internal_zones
    OD_full_index = pd.MultiIndex.from_product(
        [range(1, num_zones + 1), range(1, num_zones + 1)]
    )

    def combine_trip_lists(it, jt, trip_mode):
        # combines individual trip list and joint trip list
        combined_trips = pd.concat(
            [it[(it["trip_mode"] == trip_mode)], jt[(jt["trip_mode"] == trip_mode)]]
        )
        combined_sum = combined_trips.groupby(["orig_taz", "dest_taz"])[
            "eq_cnt"
        ].sum()
        return combined_sum.reindex(OD_full_index, fill_value=0).unstack().values

    def create_zero_passenger_trips(
        trips, deadheading_factor, trip_modes=[1, 2, 3]
    ):
        zpv_trips = trips.loc[
            (trips["avAvailable"] == 1) & (trips["trip_mode"].isin(trip_modes))
        ]
        zpv_trips["eq_cnt"] = zpv_trips["eq_cnt"] * deadheading_factor
        zpv_trips = zpv_trips.rename(
            columns={"dest_taz": "orig_taz", "orig_taz": "dest_taz"}
        )
        return zpv_trips

    # create zero passenger trips for auto modes
    if it_full["avAvailable"].sum() > 0:
        it_zpav_trp = create_zero_passenger_trips(
            it_full, zp_cav, trip_modes=[1, 2, 3]
        )
        it_zptnc_trp = create_zero_passenger_trips(it_full, zp_tnc, trip_modes=[9])
        # Combining zero passenger trips to trip files
        it_full = pd.concat(
            [it_full, it_zpav_trp, it_zptnc_trp], ignore_index=True
        ).reset_index(drop=True)

    if jt_full["avAvailable"].sum() > 0:
        jt_zpav_trp = create_zero_passenger_trips(
            jt_full, zp_cav, trip_modes=[1, 2, 3]
        )
        jt_zptnc_trp = create_zero_passenger_trips(jt_full, zp_tnc, trip_modes=[9])
        # Combining zero passenger trips to trip files
        jt_full = pd.concat(
            [jt_full, jt_zpav_trp, jt_zptnc_trp], ignore_index=True
        ).reset_index(drop=True)

    # read properties from config

    mode_name_dict = self.controller.config.household.ctramp_mode_names
    income_segment_config = self.controller.config.household.income_segment

    if income_segment_config["enabled"]:
        # This only affects highway trip tables.

        hh_file = self.controller.config.household.ctramp_hh_file.format(
            iteration=iteration
        )
        hh = pd.read_csv(hh_file, usecols=["hh_id", "income"])
        it_full = it_full.merge(hh, on="hh_id", how="left")
        jt_full = jt_full.merge(hh, on="hh_id", how="left")

        suffixes = income_segment_config["segment_suffixes"]

        it_full["income_seg"] = pd.cut(
            it_full["income"],
            right=False,
            bins=income_segment_config["cutoffs"] + [float("inf")],
            labels=suffixes,
        ).astype(str)

        jt_full["income_seg"] = pd.cut(
            jt_full["income"],
            right=False,
            bins=income_segment_config["cutoffs"] + [float("inf")],
            labels=suffixes,
        ).astype(str)
    else:
        it_full["income_seg"] = ""
        jt_full["income_seg"] = ""
        suffixes = [""]

    # groupby objects for combinations of time period - income segmentation, used for highway modes only
    it_grp = it_full.groupby(["time_period", "income_seg"])
    jt_grp = jt_full.groupby(["time_period", "income_seg"])

    for time_period in time_periods_sorted:
        self.logger.debug(
            f"Producing household demand matrices for period {time_period}"
        )

        highway_out_file = OMXManager(
            self.controller.get_abs_path(
                self.controller.config.household.highway_demand_file
            )
            .__str__()
            .format(period=time_period, iter=self.controller.iteration),
            "w",
        )
        transit_out_file = OMXManager(
            self.controller.get_abs_path(
                self.controller.config.household.transit_demand_file
            )
            .__str__()
            .format(period=time_period, iter=self.controller.iteration),
            "w",
        )
        # active_out_file = OMXManager(
        #    self.controller.get_abs_path(self.controller.config.household.active_demand_file).__str__().format(period=time_period), 'w')

        # hsr_trips_file = _omx.open_file(
        #    self.controller.get_abs_path(self.controller.config.household.hsr_demand_file).format(year=self.controller.config.scenario.year, period=time_period))

        # interregional_trips_file = _omx.open_file(
        #   self.controller.get_abs_path(self.controller.config.household.interregional_demand_file).format(year=self.controller.config.scenario.year, period=time_period))

        highway_out_file.open()
        transit_out_file.open()
        # active_out_file.open()

        # Transit and active modes: one matrix per time period per mode
        it = it_full[it_full.time_period == time_period]
        jt = jt_full[jt_full.time_period == time_period]

        for trip_mode in mode_name_dict:
            #                if trip_mode in [9,10]:
            #                    matrix_name =  mode_name_dict[trip_mode]
            #                    self.logger.debug(f"Writing out mode {mode_name_dict[trip_mode]}")
            #                    active_out_file.write_array(numpy_array=combine_trip_lists(it,jt, trip_mode), name = matrix_name)

            if trip_mode == 11:
                matrix_name = "WLK_TRN_WLK"
                self.logger.debug(f"Writing out mode WLK_TRN_WLK")
                # other_trn_trips = np.array(hsr_trips_file[matrix_name])+np.array(interregional_trips_file[matrix_name])
                transit_out_file.write_array(
                    numpy_array=(combine_trip_lists(it, jt, trip_mode)),
                    name=matrix_name,
                )

            elif trip_mode in [12, 13]:
                it_outbound, it_inbound = it[it.inbound == 0], it[it.inbound == 1]
                jt_outbound, jt_inbound = jt[jt.inbound == 0], jt[jt.inbound == 1]

                matrix_name = f"{mode_name_dict[trip_mode].upper()}_TRN_WLK"
                # other_trn_trips = np.array(hsr_trips_file[matrix_name])+np.array(interregional_trips_file[matrix_name])
                self.logger.debug(
                    f"Writing out mode {mode_name_dict[trip_mode].upper() + '_TRN_WLK'}"
                )
                transit_out_file.write_array(
                    numpy_array=(
                        combine_trip_lists(it_outbound, jt_outbound, trip_mode)
                    ),
                    name=matrix_name,
                )

                matrix_name = f"WLK_TRN_{mode_name_dict[trip_mode].upper()}"
                # other_trn_trips = np.array(hsr_trips_file[matrix_name])+np.array(interregional_trips_file[matrix_name])
                self.logger.debug(
                    f"Writing out mode {'WLK_TRN_' + mode_name_dict[trip_mode].upper()}"
                )
                transit_out_file.write_array(
                    numpy_array=(
                        combine_trip_lists(it_inbound, jt_inbound, trip_mode)
                    ),
                    name=matrix_name,
                )

        # Highway modes: one matrix per suffix (income class) per time period per mode
        for suffix in suffixes:
            highway_cache = {}

            if (time_period, suffix) in it_grp.groups.keys():
                it = it_grp.get_group((time_period, suffix))
            else:
                it = pd.DataFrame(None, columns=it_full.columns)

            if (time_period, suffix) in jt_grp.groups.keys():
                jt = jt_grp.get_group((time_period, suffix))
            else:
                jt = pd.DataFrame(None, columns=jt_full.columns)

            for trip_mode in sorted(mode_name_dict):
                # Python preserves keys in the order they are inserted but
                # mode_name_dict originates from TOML, which does not guarantee
                # that the ordering of keys is preserved.  See
                # https://github.com/toml-lang/toml/issues/162

                if trip_mode in [
                    1,
                    2,
                    3,
                    4,
                    5,
                    6,
                    7,
                    8,
                    9,
                    10,
                    15,
                    16,
                    17,
                ]:  # currently hard-coded based on Travel Mode trip mode codes
                    highway_cache[mode_name_dict[trip_mode]] = combine_trip_lists(
                        it, jt, trip_mode
                    )
                    out_mode = f"{mode_name_dict[trip_mode].upper()}"
                    matrix_name = (
                        f"{out_mode}_{suffix}_{time_period.upper()}"
                        if suffix
                        else f"{out_mode}_{time_period.upper()}"
                    )
                    highway_out_file.write_array(
                        numpy_array=highway_cache[mode_name_dict[trip_mode]],
                        name=matrix_name,
                    )

                elif trip_mode in [15, 16]:
                    # identify the correct mode split factors for da, sr2, sr3
                    self.logger.debug(
                        f"Splitting ridehail trips into shared ride trips"
                    )
                    ridehail_split_factors = defaultdict(float)
                    splits = self.controller.config.household.rideshare_mode_split
                    for key in splits:
                        out_mode_split = self.controller.config.household.__dict__[
                            f"{key}_split"
                        ]
                        for out_mode in out_mode_split:
                            ridehail_split_factors[out_mode] += (
                                out_mode_split[out_mode] * splits[key]
                            )

                    ridehail_trips = combine_trip_lists(it, jt, trip_mode)
                    for out_mode in ridehail_split_factors:
                        matrix_name = f"{out_mode}_{suffix}" if suffix else out_mode
                        self.logger.debug(f"Writing out mode {out_mode}")
                        highway_cache[out_mode] += (
                            (ridehail_trips * ridehail_split_factors[out_mode])
                            .astype(float)
                            .round(2)
                        )
                        highway_out_file.write_array(
                            numpy_array=highway_cache[out_mode], name=matrix_name
                        )

        highway_out_file.close()
        transit_out_file.close()

run()

Open combined demand OMX files from demand models and prepare for assignment.

Source code in tm2py\components\demand\prepare_demand.py
172
173
174
175
176
177
178
def run(self):
    """Open combined demand OMX files from demand models and prepare for assignment."""

    self.highway_emmebank.zero_matrix
    for time in self.controller.time_period_names:
        for klass in self.config.classes:
            self._prepare_demand(klass.name, klass.description, klass.demand, time)

PrepareTransitDemand

Bases: EmmeDemand

Import transit demand.

Demand is imported from OMX files based on reference file paths and OMX matrix names in transit assignment config (transit.classes). The demand is average using MSA with the current demand matrices (in the Emmebank) if transit.apply_msa_demand is true if the controller.iteration > 1.

Source code in tm2py\components\demand\prepare_demand.py
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
class PrepareTransitDemand(EmmeDemand):
    """Import transit demand.

    Demand is imported from OMX files based on reference file paths and OMX
    matrix names in transit assignment config (transit.classes).
    The demand is average using MSA with the current demand matrices (in the
    Emmebank) if transit.apply_msa_demand is true if the
    controller.iteration > 1.

    """

    def __init__(self, controller: "RunController"):
        """Constructor for PrepareTransitDemand.

        Args:
            controller: RunController object.
        """
        super().__init__(controller)
        self.controller = controller
        self.config = self.controller.config.transit
        self._transit_emmebank = None

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO

    @property
    def transit_emmebank(self):
        if not self._transit_emmebank:
            self._transit_emmebank = self.controller.emme_manager.transit_emmebank
            self._emmebank = self._transit_emmebank
        return self._transit_emmebank

    @LogStartEnd("Prepare transit demand")
    def run(self):
        """Open combined demand OMX files from demand models and prepare for assignment."""
        self._source_ref_key = "transit_demand_file"
        self.transit_emmebank.zero_matrix
        _time_period_tclass = itertools.product(
            self.controller.time_period_names, self.config.classes
        )
        for _time_period, _tclass in _time_period_tclass:
            self._prepare_demand(
                _tclass.skim_set_id, _tclass.description, _tclass.demand, _time_period
            )

    def _prepare_demand(
        self,
        name: str,
        description: str,
        demand_config: List[Dict[str, Union[str, float]]],
        time_period: str,
    ):
        """Load demand from OMX files and save to Emme matrix for transit assignment.

        Average with previous demand (MSA) if the current iteration > 1 and
        config.transit.apply_msa_demand is True

        Args:
            name (str): the name of the transit assignment class in the OMX files, usually a number
            description (str): the description for the transit assignment class
            demand_config (dict): the list of file cross-reference(s) for the demand to be loaded
                {"source": <name of demand model component>,
                 "name": <OMX key name>,
                 "factor": <factor to apply to demand in this file>}
            time_period (str): the time _time_period ID (name)
        """
        self._scenario = self.transit_emmebank.scenario(time_period)
        num_zones = len(self._scenario.zone_numbers)
        demand = self._read_demand(demand_config[0], time_period, name, num_zones)
        for file_config in demand_config[1:]:
            demand = demand + self._read_demand(
                file_config, time_period, name, num_zones
            )
        demand_name = f"TRN_{name}_{time_period}"
        description = f"{time_period} {description} demand"
        apply_msa = self.config.apply_msa_demand
        self._save_demand(demand_name, demand, description, apply_msa=apply_msa)

    def _read_demand(self, file_config, time_period, skim_set, num_zones):
        # Load demand from cross-referenced source file,
        # the named demand model component under the key highway_demand_file
        if (
            self.controller.config.warmstart.warmstart
            and self.controller.iteration == 0
        ):
            source = file_config["source"]
            path = self.controller.get_abs_path(
                self.controller.config[source].transit_demand_file
            ).__str__()
        else:
            source = file_config["source"]
            path = self.controller.get_abs_path(
                self.controller.config[source].transit_demand_file
            ).__str__()
        name = file_config["name"]
        return self._read(
            path.format(period=time_period, iter=self.controller.iteration),
            name,
            num_zones,
        )

__init__(controller)

Constructor for PrepareTransitDemand.

Parameters:

Name Type Description Default
controller 'RunController'

RunController object.

required
Source code in tm2py\components\demand\prepare_demand.py
610
611
612
613
614
615
616
617
618
619
def __init__(self, controller: "RunController"):
    """Constructor for PrepareTransitDemand.

    Args:
        controller: RunController object.
    """
    super().__init__(controller)
    self.controller = controller
    self.config = self.controller.config.transit
    self._transit_emmebank = None

run()

Open combined demand OMX files from demand models and prepare for assignment.

Source code in tm2py\components\demand\prepare_demand.py
632
633
634
635
636
637
638
639
640
641
642
643
@LogStartEnd("Prepare transit demand")
def run(self):
    """Open combined demand OMX files from demand models and prepare for assignment."""
    self._source_ref_key = "transit_demand_file"
    self.transit_emmebank.zero_matrix
    _time_period_tclass = itertools.product(
        self.controller.time_period_names, self.config.classes
    )
    for _time_period, _tclass in _time_period_tclass:
        self._prepare_demand(
            _tclass.skim_set_id, _tclass.description, _tclass.demand, _time_period
        )

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\prepare_demand.py
621
622
def validate_inputs(self):
    """Validate the inputs."""

avg_matrix_msa(prev_avg_matrix, this_iter_matrix, msa_iteration)

Average matrices based on Method of Successive Averages (MSA).

Parameters:

Name Type Description Default
prev_avg_matrix NumpyArray

Previously averaged matrix

required
this_iter_matrix NumpyArray

Matrix for this iteration

required
msa_iteration int

MSA iteration

required

Returns:

Name Type Description
NumpyArray NumpyArray

MSA Averaged matrix for this iteration.

Source code in tm2py\components\demand\prepare_demand.py
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
def avg_matrix_msa(
    prev_avg_matrix: NumpyArray, this_iter_matrix: NumpyArray, msa_iteration: int
) -> NumpyArray:
    """Average matrices based on Method of Successive Averages (MSA).

    Args:
        prev_avg_matrix (NumpyArray): Previously averaged matrix
        this_iter_matrix (NumpyArray): Matrix for this iteration
        msa_iteration (int): MSA iteration

    Returns:
        NumpyArray: MSA Averaged matrix for this iteration.
    """
    if msa_iteration < 1:
        return this_iter_matrix
    result_matrix = prev_avg_matrix + (1.0 / msa_iteration) * (
        this_iter_matrix - prev_avg_matrix
    )
    return result_matrix

Household Demand

Personal travel demand generated by household members.

Placeholder docstring for CT-RAMP related components for household residents’ model.

HouseholdModel

Bases: Component

Run household resident model.

Source code in tm2py\components\demand\household.py
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
class HouseholdModel(Component):
    """Run household resident model."""

    def validate_inputs(self):
        """Validates inputs for component."""
        pass

    @LogStartEnd()
    def run(self):
        """Run the the household resident travel demand model.

        Steps:
            1. Starts household manager.
            2. Starts matrix manager.
            3. Starts resident travel model (CTRAMP).
            4. Cleans up CTRAMP java.
        """
        self.config = self.controller.config.household
        self._start_household_manager()
        self._start_matrix_manager()
        self._run_resident_model()
        self._stop_java()
        # consume ctramp person trip list and create trip tables for assignment
        self._prepare_demand_for_assignment()
        self._copy_auto_maz_demand()

    def _prepare_demand_for_assignment(self):
        prep_demand = PrepareHighwayDemand(self.controller)
        prep_demand.prepare_household_demand()

    def _start_household_manager(self):
        commands = [
            f"cd /d {self.controller.run_dir}",
            f"CALL {self.controller.run_dir}\\CTRAMP\\runtime\\CTRampEnv.bat",
            "set PATH=%CD%\\CTRAMP\\runtime;C:\\Windows\\System32;%JAVA_PATH%\\bin;"
            "%TPP_PATH%;%PYTHON_PATH%;%PYTHON_PATH%\\condabin;%PYTHON_PATH%\\envs",
            f"CALL {self.controller.run_dir}\\CTRAMP\\runtime\\runHhMgr.cmd %JAVA_PATH% %HOST_IP_ADDRESS%",
        ]
        run_process(commands, name="start_household_manager")

    def _start_matrix_manager(self):
        commands = [
            f"cd /d {self.controller.run_dir}",
            f"CALL {self.controller.run_dir}\\CTRAMP\\runtime\\CTRampEnv.bat",
            "set PATH=%CD%\\CTRAMP\\runtime;C:\\Windows\\System32;%JAVA_PATH%\\bin;"
            "%TPP_PATH%;%PYTHON_PATH%;%PYTHON_PATH%\\condabin;%PYTHON_PATH%\\envs",
            f"CALL {self.controller.run_dir}\\CTRAMP\\runtime\\runMtxMgr.cmd %HOST_IP_ADDRESS% %JAVA_PATH%",
        ]
        run_process(commands, name="start_matrix_manager")

    def _run_resident_model(self):
        sample_rate_iteration = {1: 0.05, 2: 0.5, 3: 1, 4: 0.02, 5: 0.02}
        iteration = self.controller.iteration
        sample_rate = sample_rate_iteration[iteration]
        _shutil.copyfile("CTRAMP\\runtime\\mtctm2.properties", "mtctm2.properties")
        commands = [
            f"cd /d {self.controller.run_dir}",
            f"CALL {self.controller.run_dir}\\CTRAMP\\runtime\\CTRampEnv.bat",
            "set PATH=%CD%\\CTRAMP\\runtime;C:\\Windows\\System32;%JAVA_PATH%\\bin;"
            "%TPP_PATH%;%PYTHON_PATH%;%PYTHON_PATH%\\condabin;%PYTHON_PATH%\\envs",
            f"CALL {self.controller.run_dir}\\CTRAMP\\runtime\\runMTCTM2ABM.cmd {sample_rate} {iteration} %JAVA_PATH%",
        ]
        run_process(commands, name="run_resident_model")

    @staticmethod
    def _stop_java():
        run_process(['taskkill /im "java.exe" /F'])

    def _copy_auto_maz_demand(self):
        time_period_names = self.time_period_names

        for period in time_period_names:
            for maz_group in [1, 2, 3]:
                output_path = (
                    self.controller.get_abs_path(
                        self.controller.config.highway.maz_to_maz.demand_file
                    )
                    .__str__()
                    .format(
                        period=period, number=maz_group, iter=self.controller.iteration
                    )
                )

                input_path = (
                    self.controller.get_abs_path(
                        self.config.highway_maz_ctramp_output_file
                    )
                    .__str__()
                    .format(period=period, number=maz_group)
                )

                _shutil.copyfile(input_path, output_path)

    def _consolidate_demand_for_assign(self):
        """
        CTRAMP writes out demands in separate omx files, e.g.
        ctramp_output\\auto_@p@_SOV_GP_@p@.mat
        ctramp_output\\auto_@p@_SOV_PAY_@p@.mat
        ctramp_output\\auto_@p@_SR2_GP_@p@.mat
        ctramp_output\\auto_@p@_SR2_HOV_@p@.mat
        ctramp_output\\auto_@p@_SR2_PAY_@p@.mat
        ctramp_output\\auto_@p@_SR3_GP_@p@.mat
        ctramp_output\\auto_@p@_SR3_HOV_@p@.mat
        ctramp_output\\auto_@p@_SR3_PAY_@p@.mat
        ctramp_output\\Nonmotor_@p@_BIKE_@p@.mat
        ctramp_output\\Nonmotor_@p@_WALK_@p@.mat
        ctramp_output\\other_@p@_SCHLBUS_@p@.mat

        Need to combine demands for one period into one omx file.
        """
        time_period_names = self.time_period_names

        # auto TAZ
        for period in time_period_names:
            output_path = (
                self.controller.get_abs_path(self.config.highway_demand_file)
                .__str__()
                .format(period=period, iter=self.controller.iteration)
            )
            output_omx = omx.open_file(output_path, "w")
            for mode_agg in self.config.mode_agg:
                if mode_agg.name == "transit":
                    continue
                for mode in mode_agg.modes:
                    input_path = (
                        self.controller.get_abs_path(
                            self.config.highway_taz_ctramp_output_file
                        )
                        .__str__()
                        .format(period=period, mode_agg=mode_agg.name, mode=mode)
                    )
                    input_omx = omx.open_file(input_path, "r")
                    core_name = mode + "_" + period.upper()
                    output_omx[core_name] = input_omx[core_name][:, :]
                    input_omx.close()

            output_omx.close()

        # auto MAZ
        self._copy_auto_maz_demand()

        # transit TAP
        # for period in time_period_names:
        #    for set in ["set1", "set2", "set3"]:
        #        output_path = (
        #            self.controller.get_abs_path(self.config.transit_demand_file)
        #            .__str__()
        #            .format(period=period, iter=self.controller.iteration, set=set)
        #        )
        #        output_omx = omx.open_file(output_path, "w")
        #        for mode_agg in self.config.mode_agg:
        #            if mode_agg.name != "transit":
        #                continue
        #            for mode in mode_agg.modes:
        #                input_path = (
        #                    self.controller.get_abs_path(
        #                        self.config.transit_tap_ctramp_output_file
        #                    )
        #                    .__str__()
        #                    .format(
        #                        period=period,
        #                        mode_agg=mode_agg.name,
        #                        mode=mode,
        #                        set=set,
        #                    )
        #                )
        #                input_omx = omx.open_file(input_path, "r")
        #                core_name = mode + "_TRN_" + set + "_" + period.upper()
        #                output_omx[core_name] = input_omx[core_name][:, :]
        #                input_omx.close()
        #
        #        output_omx.close()
        # transit TAZ
        for period in time_period_names:
            output_path = (
                self.controller.get_abs_path(self.config.transit_demand_file)
                .__str__()
                .format(period=period, iter=self.controller.iteration)
            )
            output_omx = omx.open_file(output_path, "w")
            for mode_agg in self.config.mode_agg:
                if mode_agg.name != "transit":
                    continue
                for mode in mode_agg.modes:
                    input_path = (
                        self.controller.get_abs_path(
                            self.config.transit_taz_ctramp_output_file
                        )
                        .__str__()
                        .format(
                            period=period,
                            mode_agg=mode_agg.name,
                            mode=mode,
                        )
                    )
                    input_omx = omx.open_file(input_path, "r")
                    core_name = mode + "_TRN_" + period.upper()
                    output_omx[core_name] = input_omx[core_name][:, :]
                    input_omx.close()

            output_omx.close()

run()

Run the the household resident travel demand model.

Steps
  1. Starts household manager.
  2. Starts matrix manager.
  3. Starts resident travel model (CTRAMP).
  4. Cleans up CTRAMP java.
Source code in tm2py\components\demand\household.py
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
@LogStartEnd()
def run(self):
    """Run the the household resident travel demand model.

    Steps:
        1. Starts household manager.
        2. Starts matrix manager.
        3. Starts resident travel model (CTRAMP).
        4. Cleans up CTRAMP java.
    """
    self.config = self.controller.config.household
    self._start_household_manager()
    self._start_matrix_manager()
    self._run_resident_model()
    self._stop_java()
    # consume ctramp person trip list and create trip tables for assignment
    self._prepare_demand_for_assignment()
    self._copy_auto_maz_demand()

validate_inputs()

Validates inputs for component.

Source code in tm2py\components\demand\household.py
16
17
18
def validate_inputs(self):
    """Validates inputs for component."""
    pass

Bases: ConfigItem

Household (residents) model parameters.

Source code in tm2py\config.py
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
@dataclass(frozen=True)
class HouseholdConfig(ConfigItem):
    """Household (residents) model parameters."""

    highway_demand_file: pathlib.Path
    transit_demand_file: pathlib.Path
    active_demand_file: pathlib.Path
    highway_maz_ctramp_output_file: pathlib.Path
    OwnedAV_ZPV_factor: float
    TNC_ZPV_factor: float
    ctramp_indiv_trip_file: str
    ctramp_joint_trip_file: str
    ctramp_run_dir: pathlib.Path
    rideshare_mode_split: Dict[str, float]
    taxi_split: Dict[str, float]
    single_tnc_split: Dict[str, float]
    shared_tnc_split: Dict[str, float]
    ctramp_mode_names: Dict[float, str]
    income_segment: Dict[str, Union[float, str, list]]
    ctramp_hh_file: str

Air Passenger Demand

Module containing the AirPassenger class which builds the airport trip matrices.

AirPassenger

Bases: Component

Builds the airport trip matrices.

input: nonres/{year}_{tofrom}{airport}.csv output: five time-of-day-specific OMX files with matrices DA, SR2, SR3

Notes: These are independent of level-of-service.

Note that the reference names, years, file paths and other key details are controlled via the config, air_passenger section. See the AirPassengerConfig doc for details on specifying these inputs.

The following details are based on the default config values.

Creates air passenger vehicle trip tables for the Bay Area’s three major airports, namely SFO, OAK, and SJC. Geoff Gosling, a consultant, created vehicle trip tables segmented by time of day, travel mode, and access/egress direction (i.e. to the airport or from the airport) for years 2007 and 2035. The tables are based on a 2006 Air Passenger survey, which was conducted at SFO and OAK (but not SJC).

The travel modes are as follows

(a) escort (drive alone, shared ride 2, and shared ride 3+) (b) park (da, sr2, & sr3+) © rental car (da, sr2, & sr3+) (d) taxi ((da, sr2, & sr3+) (e) limo (da, sr2, & sr3+) (f) shared ride van (all assumed to be sr3); (g) hotel shuttle (all assumed to be sr3); and, (h) charter bus (all assumed to be sr3).

The shared ride van, hotel shuttle, and charter bus modes are assumed to have no deadhead travel. The return escort trip is included, as are the deadhead limo and taxi trips.

The scripts reads in csv files adapted from Mr. Gosling’s Excel files, and creates a highway-assignment ready OMX matrix file for each time-of-day interval.

Assumes that no air passengers use HOT lanes (probably not exactly true in certain future year scenarios, but the assumption is made here as a simplification). Simple linear interpolations are used to estimate vehicle demand in years other than 2007 and 2035, including 2015, 2020, 2025, 2030, and 2040.

Transit travel to the airports is not included in these vehicle trip tables.

Input

Year-, access/egress-, and airport-specific database file with 90 columns of data for each TAZ. There are 18 columns for each time-of-day interval as follows: (1) Escort, drive alone (2) Escort, shared ride 2 (3) Escort, shared ride 3+ (4) Park, drive alone (5) Park, shared ride 2 (6) Park, shared ride 3+ (7) Rental car, drive alone (8) Rental car, shared ride 2 (9) Rental car, shared ride 3+ (10) Taxi, drive alone (11) Taxi, shared ride 2 (12) Taxi, shared ride 3+ (13) Limo, drive alone (14) Limo, shared ride 2 (15) Limo, shared ride 3+ (16) Shared ride van, shared ride 3+ (17) Hotel shuttle, shared ride 3+ (18) Charter bus, shared ride 3+

Five time-of-day-specific tables, each containing origin/destination vehicle matrices for the following modes: (1) drive alone (DA) (2) shared ride 2 (SR2) (3) shared ride 3+ (SR3)

Internal properties

_start_year _end_year _mode_groups: _out_names:

Source code in tm2py\components\demand\air_passenger.py
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
class AirPassenger(Component):
    """Builds the airport trip matrices.

    input: nonres/{year}_{tofrom}{airport}.csv
    output: five time-of-day-specific OMX files with matrices DA, SR2, SR3

    Notes:
    These are independent of level-of-service.

    Note that the reference names, years, file paths and other key details
    are controlled via the config, air_passenger section. See the
    AirPassengerConfig doc for details on specifying these inputs.

    The following details are based on the default config values.

    Creates air passenger vehicle trip tables for the Bay Area's three major
    airports, namely SFO, OAK, and SJC.  Geoff Gosling, a consultant, created
    vehicle trip tables segmented by time of day, travel mode, and access/egress
    direction (i.e. to the airport or from the airport) for years 2007 and 2035.
    The tables are based on a 2006 Air Passenger survey, which was conducted
    at SFO and OAK (but not SJC).

    The travel modes are as follows:
        (a) escort (drive alone, shared ride 2, and shared ride 3+)
        (b) park (da, sr2, & sr3+)
        (c) rental car (da, sr2, & sr3+)
        (d) taxi ((da, sr2, & sr3+)
        (e) limo (da, sr2, & sr3+)
        (f) shared ride van (all assumed to be sr3);
        (g) hotel shuttle (all assumed to be sr3); and,
        (h) charter bus (all assumed to be sr3).

    The shared ride van, hotel shuttle, and charter bus modes are assumed to
    have no deadhead travel. The return escort trip is included, as are the
    deadhead limo and taxi trips.

    The scripts reads in csv files adapted from Mr. Gosling's Excel files,
    and creates a highway-assignment ready OMX matrix file for each time-of-day
    interval.

    Assumes that no air passengers use HOT lanes (probably not exactly true
    in certain future year scenarios, but the assumption is made here as a
    simplification).  Simple linear interpolations are used to estimate vehicle
    demand in years other than 2007 and 2035, including 2015, 2020, 2025, 2030,
    and 2040.

    Transit travel to the airports is not included in these vehicle trip tables.

    Input:
        Year-, access/egress-, and airport-specific database file with 90 columns
        of data for each TAZ.  There are 18 columns for each time-of-day interval
        as follows:
                (1)   Escort, drive alone
                (2)   Escort, shared ride 2
                (3)   Escort, shared ride 3+
                (4)   Park, drive alone
                (5)   Park, shared ride 2
                (6)   Park, shared ride 3+
                (7)   Rental car, drive alone
                (8)   Rental car, shared ride 2
                (9)   Rental car, shared ride 3+
                (10)  Taxi, drive alone
                (11)  Taxi, shared ride 2
                (12)  Taxi, shared ride 3+
                (13)  Limo, drive alone
                (14)  Limo, shared ride 2
                (15)  Limo, shared ride 3+
                (16)  Shared ride van, shared ride 3+
                (17)  Hotel shuttle, shared ride 3+
                (18)  Charter bus, shared ride 3+

     Output:
     Five time-of-day-specific tables, each containing origin/destination vehicle
     matrices for the following modes:
               (1) drive alone (DA)
               (2) shared ride 2 (SR2)
               (3) shared ride 3+ (SR3)

    Internal properties:
        _start_year
        _end_year
        _mode_groups:
        _out_names:
    """

    def __init__(self, controller: RunController):
        """Build the airport trip matrices.

        Args:
            controller: parent Controller object
        """
        super().__init__(controller)

        self.config = self.controller.config.air_passenger

        self.start_year = self.config.reference_start_year
        self.end_year = self.config.reference_end_year
        self.scenario_year = self.controller.config.scenario.year

        self.airports = self.controller.config.air_passenger.airport_names

        self._demand_classes = None
        self._access_mode_groups = None
        self._class_modes = None

    @property
    def classes(self):
        return [c.name for c in self.config.demand_aggregation]

    @property
    def demand_classes(self):
        if not self._demand_classes:
            self._demand_classes = {c.name: c for c in self.config.demand_aggregation}
        return self._demand_classes

    @property
    def access_mode_groups(self):
        if not self._access_mode_groups:
            self._access_mode_groups = {
                c_name: c.access_modes for c_name, c in self.demand_classes.items()
            }
        return self._access_mode_groups

    @property
    def class_modes(self):
        if self._class_modes is None:
            self._class_modes = {
                c_name: c.mode for c_name, c in self.demand_classes.items()
            }
        return self._class_modes

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run the Air Passenger Demand model to generate the demand matrices.

        Steps:
            1. Load the demand data from the CSV files.
            2. Aggregate the demand data into the assignable classes.
            3. Create the demand matrices be interpolating the demand data.
            4. Write the demand matrices to OMX files.
        """

        input_demand = self._load_air_pax_demand()
        aggr_demand = self._aggregate_demand(input_demand)

        demand = interpolate_dfs(
            aggr_demand,
            [self.start_year, self.end_year],
            self.scenario_year,
        )
        self._export_result(demand)

    def _load_air_pax_demand(self) -> pd.DataFrame:
        """Loads demand from the CSV files into single pandas dataframe.

        Uses the following configs to determine the input file names and paths:
        - self.config.air_passenger.input_demand_folder
        - self.config.air_passenger.airport_names
        - self.config.air_passenger.reference_start_year
        - self.config.air_passenger.reference_end_year

        Using the pattern: f"{year}_{direction}{airport}.csv"

        Returns: pandas dataframe with the following columns:
            (1) airport
            (2) time_of_day
            (3) access_mode
            (4) demand
        """

        _start_demand_df = self._get_air_demand_for_year(self.start_year)
        _end_demand_df = self._get_air_demand_for_year(self.end_year)

        _air_pax_demand_df = pd.merge(
            _start_demand_df,
            _end_demand_df,
            how="outer",
            suffixes=(f"_{self.start_year}", f"_{self.end_year}"),
            on=["ORIG", "DEST"],
        )

        _grouped_air_pax_demand_df = _air_pax_demand_df.groupby(["ORIG", "DEST"]).sum()
        return _grouped_air_pax_demand_df

    def _input_demand_filename(self, airport, year, direction):
        _file_name = self.config.input_demand_filename_tmpl.format(
            airport=airport, year=year, direction=direction
        )

        return os.path.join(
            self.get_abs_path(self.config.input_demand_folder), _file_name
        )

    def _get_air_demand_for_year(self, year) -> pd.DataFrame:
        """Creates a dataframe of concatenated data from CSVs for all airport x direction combos.

        Args:
            year (str): year of demand

        Returns:
            pd.DataFrame: concatenation of all CSVs that were read in as a dataframe
        """
        _airport_direction = itertools.product(
            self.airports,
            ["to", "from"],
        )
        demand_df = None
        for airport, direction in _airport_direction:
            _df = pd.read_csv(self._input_demand_filename(airport, year, direction))
            if demand_df is not None:
                demand_df = pd.concat([demand_df, _df])
            else:
                demand_df = _df

        return demand_df

    def _aggregate_demand(self, input_demand: pd.DataFrame) -> pd.DataFrame:
        """Aggregate demand accross access modes to assignable classes for each year.

        Args:
            input_demand: pandas dataframe with the columns for each combo of:
                {_period}_{_access}_{_group}_{_year}
        """
        aggr_demand = pd.DataFrame()

        _year_tp_group_accessmode = itertools.product(
            [self.start_year, self.end_year],
            self.time_period_names,
            self.access_mode_groups.items(),
        )

        # TODO This should be done entirely in pandas using group-by
        for _year, _period, (_class, _access_modes) in _year_tp_group_accessmode:
            data = input_demand[
                [f"{_period}_{_access}_{_class}_{_year}" for _access in _access_modes]
            ]
            aggr_demand[f"{_period}_{_class}_{_year}"] = data.sum(axis=1)

        return aggr_demand

    def _export_result(self, demand_df: pd.DataFrame):
        """Export resulting model year demand to OMX files by period."""
        path_tmplt = self.get_abs_path(self.config.output_trip_table_directory)
        os.makedirs(os.path.dirname(path_tmplt), exist_ok=True)

        for _period in self.time_period_names:
            _file_path = os.path.join(
                path_tmplt, self.config.outfile_trip_table_tmp.format(period=_period)
            )
            df_to_omx(
                demand_df,
                {
                    _mode: f"{_period}_{_class}"
                    for _class, _mode in self.class_modes.items()
                },
                _file_path,
                orig_column="ORIG",
                dest_column="DEST",
            )

__init__(controller)

Build the airport trip matrices.

Parameters:

Name Type Description Default
controller RunController

parent Controller object

required
Source code in tm2py\components\demand\air_passenger.py
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
def __init__(self, controller: RunController):
    """Build the airport trip matrices.

    Args:
        controller: parent Controller object
    """
    super().__init__(controller)

    self.config = self.controller.config.air_passenger

    self.start_year = self.config.reference_start_year
    self.end_year = self.config.reference_end_year
    self.scenario_year = self.controller.config.scenario.year

    self.airports = self.controller.config.air_passenger.airport_names

    self._demand_classes = None
    self._access_mode_groups = None
    self._class_modes = None

run()

Run the Air Passenger Demand model to generate the demand matrices.

Steps
  1. Load the demand data from the CSV files.
  2. Aggregate the demand data into the assignable classes.
  3. Create the demand matrices be interpolating the demand data.
  4. Write the demand matrices to OMX files.
Source code in tm2py\components\demand\air_passenger.py
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
@LogStartEnd()
def run(self):
    """Run the Air Passenger Demand model to generate the demand matrices.

    Steps:
        1. Load the demand data from the CSV files.
        2. Aggregate the demand data into the assignable classes.
        3. Create the demand matrices be interpolating the demand data.
        4. Write the demand matrices to OMX files.
    """

    input_demand = self._load_air_pax_demand()
    aggr_demand = self._aggregate_demand(input_demand)

    demand = interpolate_dfs(
        aggr_demand,
        [self.start_year, self.end_year],
        self.scenario_year,
    )
    self._export_result(demand)

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\air_passenger.py
154
155
156
157
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

Bases: ConfigItem

Air passenger demand aggregation input parameters.

Properties
Source code in tm2py\config.py
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
@dataclass(frozen=True)
class AirPassengerDemandAggregationConfig(ConfigItem):
    """Air passenger demand aggregation input parameters.

    Properties:
        name: (src_group_name) name used for the class group in the input columns
            for the trip tables,
        mode: (result_class_name) name used in the output OMX matrix names, note
            that this should match the expected naming convention in the
            HighwayClassDemandConfig name(s)
        access_modes: list of names used for the access modes in the input
            columns for the trip tables
    """

    name: str
    mode: str
    access_modes: Tuple[str, ...]

Commercial Demand

Commercial vehicle / truck model module.

CommercialVehicleModel

Bases: Component

Commercial Vehicle demand model.

Generates truck demand matrices from
  • land use
  • highway network impedances
  • parameters
Segmented into four truck types

(1) very small trucks (two-axle, four-tire), (2) small trucks (two-axle, six-tire), (3) medium trucks (three-axle), (4) large or combination (four or more axle) trucks.

(1) MAZ csv data file with the employment and household counts.

(2) Skims (3) K-Factors (4)

Notes: (1) Based on the BAYCAST truck model, no significant updates. (2) Combined Chuck’s calibration adjustments into the NAICS-based model coefficients.

Source code in tm2py\components\demand\commercial.py
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
class CommercialVehicleModel(Component):
    """Commercial Vehicle demand model.

    Generates truck demand matrices from:
        - land use
        - highway network impedances
        - parameters

    Segmented into four truck types:
        (1) very small trucks (two-axle, four-tire),
        (2) small trucks (two-axle, six-tire),
        (3) medium trucks (three-axle),
        (4) large or combination (four or more axle) trucks.

    Input:  (1) MAZ csv data file with the employment and household counts.
            (2) Skims
            (3) K-Factors
            (4)
    Output:

    Notes:
    (1) Based on the BAYCAST truck model, no significant updates.
    (2) Combined Chuck's calibration adjustments into the NAICS-based model coefficients.
    """

    def __init__(self, controller: RunController):
        """Constructor for the CommercialVehicleTripGeneration component.

        Args:
            controller (RunController): Run controller for model run.
        """
        super().__init__(controller)

        self.config = self.controller.config.truck
        self.sub_components = {
            "trip generation": CommercialVehicleTripGeneration(controller, self),
            "trip distribution": CommercialVehicleTripDistribution(controller, self),
            "time of day": CommercialVehicleTimeOfDay(controller, self),
            "toll choice": CommercialVehicleTollChoice(controller, self),
        }

        self.trk_impedances = {imp.name: imp for imp in self.config.impedances}

        # Emme matrix management (lazily evaluated)
        self._matrix_cache = None

        # Interim Results
        self.total_tripends_df = None
        self.daily_demand_dict = None
        self.trkclass_tp_demand_dict = None
        self.trkclass_tp_toll_demand_dict = None

    @property
    def purposes(self):
        return list(
            set([trk_class.purpose for trk_class in self.config.trip_gen.classes])
        )

    @property
    def classes(self):
        return [trk_class.name for trk_class in self.config.classes]

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO

    @LogStartEnd()
    def run(self):
        """Run commercial vehicle model."""
        self.total_tripends_df = self.sub_components["trip generation"].run()
        self.daily_demand_dict = self.sub_components["trip distribution"].run(
            self.total_tripends_df
        )
        self.trkclass_tp_demand_dict = self.sub_components["time of day"].run(
            self.daily_demand_dict
        )
        self.trkclass_tp_toll_demand_dict = self.sub_components["toll choice"].run(
            self.trkclass_tp_demand_dict
        )
        self._export_results_as_omx(self.trkclass_tp_toll_demand_dict)

    @property
    def emmebank(self):
        """Reference to highway assignment Emmebank.

        TODO
            This should really be in the controller?
            Or part of network.skims?
        """
        self._emmebank = self.controller.emme_manager.highway_emmebank
        return self._emmebank

    @property
    def emme_scenario(self):
        """Return emme scenario from emmebank.

        Use first valid scenario for reference Zone IDs.

        TODO
            This should really be in the controller?
            Or part of network.skims?
        """
        _ref_scenario_name = self.controller.config.time_periods[0].name
        return self.emmebank.scenario(_ref_scenario_name)

    @property
    def matrix_cache(self):
        """Access to MatrixCache to Emmebank for given emme_scenario."""
        if self._matrix_cache is None:
            self._matrix_cache = MatrixCache(self.emme_scenario)
        return self._matrix_cache

    @LogStartEnd(level="DEBUG")
    def _export_results_as_omx(self, class_demand):
        """Export assignable class demands to OMX files by time-of-day."""
        outdir = self.get_abs_path(self.config.output_trip_table_directory)
        os.makedirs(os.path.dirname(outdir), exist_ok=True)
        for period, matrices in class_demand.items():
            with OMXManager(
                os.path.join(
                    outdir, self.config.outfile_trip_table_tmp.format(period=period)
                ),
                "w",
            ) as output_file:
                for name, data in matrices.items():
                    output_file.write_array(data, name)

emme_scenario property

Return emme scenario from emmebank.

Use first valid scenario for reference Zone IDs.

TODO This should really be in the controller? Or part of network.skims?

emmebank property

Reference to highway assignment Emmebank.

TODO This should really be in the controller? Or part of network.skims?

matrix_cache property

Access to MatrixCache to Emmebank for given emme_scenario.

__init__(controller)

Constructor for the CommercialVehicleTripGeneration component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
Source code in tm2py\components\demand\commercial.py
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
def __init__(self, controller: RunController):
    """Constructor for the CommercialVehicleTripGeneration component.

    Args:
        controller (RunController): Run controller for model run.
    """
    super().__init__(controller)

    self.config = self.controller.config.truck
    self.sub_components = {
        "trip generation": CommercialVehicleTripGeneration(controller, self),
        "trip distribution": CommercialVehicleTripDistribution(controller, self),
        "time of day": CommercialVehicleTimeOfDay(controller, self),
        "toll choice": CommercialVehicleTollChoice(controller, self),
    }

    self.trk_impedances = {imp.name: imp for imp in self.config.impedances}

    # Emme matrix management (lazily evaluated)
    self._matrix_cache = None

    # Interim Results
    self.total_tripends_df = None
    self.daily_demand_dict = None
    self.trkclass_tp_demand_dict = None
    self.trkclass_tp_toll_demand_dict = None

run()

Run commercial vehicle model.

Source code in tm2py\components\demand\commercial.py
124
125
126
127
128
129
130
131
132
133
134
135
136
137
@LogStartEnd()
def run(self):
    """Run commercial vehicle model."""
    self.total_tripends_df = self.sub_components["trip generation"].run()
    self.daily_demand_dict = self.sub_components["trip distribution"].run(
        self.total_tripends_df
    )
    self.trkclass_tp_demand_dict = self.sub_components["time of day"].run(
        self.daily_demand_dict
    )
    self.trkclass_tp_toll_demand_dict = self.sub_components["toll choice"].run(
        self.trkclass_tp_demand_dict
    )
    self._export_results_as_omx(self.trkclass_tp_toll_demand_dict)

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\commercial.py
120
121
def validate_inputs(self):
    """Validate the inputs."""

CommercialVehicleTimeOfDay

Bases: Subcomponent

Commercial vehicle (truck) Time of Day Split for 4 sizes of truck.

Input: Trips origin and destination matrices by 4 truck sizes Ouput: 20 trips origin and destination matrices by 4 truck sizes by 5 times periods

Note

The diurnal factors are taken from the BAYCAST-90 model with adjustments made

during calibration to the very small truck values to better match counts.

Source code in tm2py\components\demand\commercial.py
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
class CommercialVehicleTimeOfDay(Subcomponent):
    """Commercial vehicle (truck) Time of Day Split for 4 sizes of truck.

    Input:  Trips origin and destination matrices by 4 truck sizes
    Ouput:  20 trips origin and destination matrices by 4 truck sizes by 5 times periods

    Note:
        The diurnal factors are taken from the BAYCAST-90 model with adjustments made
    during calibration to the very small truck values to better match counts.
    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for the CommercialVehicleTimeOfDay component.

        Args:
            controller (RunController): Run controller for model run.
            component (Component): Parent component of sub-component
        """
        super().__init__(controller, component)

        self.config = self.component.config.time_of_day

        self.split_factor = "od"
        self._class_configs = None
        self._class_period_splits = None

    @property
    def time_periods(self):
        return self.controller.config.time_periods

    @property
    def classes(self):
        return [trk_class.name for trk_class in self.config.classes]

    @property
    def class_configs(self):
        if not self._class_configs:
            self._class_configs = {c.name: c for c in self.config.classes}
        return self._class_configs

    @property
    def class_period_splits(self):
        """Returns split fraction dictonary mapped to [time period class][time period]."""
        if not self._class_period_splits:
            self._class_period_splits = {
                c_name: {c.time_period: c for c in config.time_period_split}
                for c_name, config in self.class_configs.items()
            }

        return self._class_period_splits

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(
        self, daily_demand: Dict[str, NumpyArray]
    ) -> Dict[str, Dict[str, NumpyArray]]:
        """Splits the daily demand by time of day based on factors in the config.

        Uses self.config.truck.classes.{class_name}.time_of_day_split to split the daily demand.

        #TODO use TimePeriodSplit
        Args:
            daily_demand: dictionary of truck type name to numpy array of
                truck type daily demand

        Returns:
             Nested dictionary of truck class: time period name => numpy array of demand
        """
        trkclass_tp_demand_dict = defaultdict(dict)

        _class_timeperiod = itertools.product(self.classes, self.time_period_names)

        for _t_class, _tp in _class_timeperiod:
            trkclass_tp_demand_dict[_t_class][_tp] = np.around(
                self.class_period_splits[_t_class][_tp.lower()][self.split_factor]
                * daily_demand[_t_class],
                decimals=2,
            )

        return trkclass_tp_demand_dict

class_period_splits property

Returns split fraction dictonary mapped to [time period class][time period].

__init__(controller, component)

Constructor for the CommercialVehicleTimeOfDay component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
component Component

Parent component of sub-component

required
Source code in tm2py\components\demand\commercial.py
765
766
767
768
769
770
771
772
773
774
775
776
777
778
def __init__(self, controller: RunController, component: Component):
    """Constructor for the CommercialVehicleTimeOfDay component.

    Args:
        controller (RunController): Run controller for model run.
        component (Component): Parent component of sub-component
    """
    super().__init__(controller, component)

    self.config = self.component.config.time_of_day

    self.split_factor = "od"
    self._class_configs = None
    self._class_period_splits = None

run(daily_demand)

Splits the daily demand by time of day based on factors in the config.

Uses self.config.truck.classes.{class_name}.time_of_day_split to split the daily demand.

TODO use TimePeriodSplit

Args: daily_demand: dictionary of truck type name to numpy array of truck type daily demand

Returns:

Type Description
Dict[str, Dict[str, NumpyArray]]

Nested dictionary of truck class: time period name => numpy array of demand

Source code in tm2py\components\demand\commercial.py
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
@LogStartEnd()
def run(
    self, daily_demand: Dict[str, NumpyArray]
) -> Dict[str, Dict[str, NumpyArray]]:
    """Splits the daily demand by time of day based on factors in the config.

    Uses self.config.truck.classes.{class_name}.time_of_day_split to split the daily demand.

    #TODO use TimePeriodSplit
    Args:
        daily_demand: dictionary of truck type name to numpy array of
            truck type daily demand

    Returns:
         Nested dictionary of truck class: time period name => numpy array of demand
    """
    trkclass_tp_demand_dict = defaultdict(dict)

    _class_timeperiod = itertools.product(self.classes, self.time_period_names)

    for _t_class, _tp in _class_timeperiod:
        trkclass_tp_demand_dict[_t_class][_tp] = np.around(
            self.class_period_splits[_t_class][_tp.lower()][self.split_factor]
            * daily_demand[_t_class],
            decimals=2,
        )

    return trkclass_tp_demand_dict

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\commercial.py
805
806
807
808
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

CommercialVehicleTollChoice

Bases: Subcomponent

Commercial vehicle (truck) toll choice.

A binomial choice model for very small, small, medium, and large trucks. A separate value toll paying versus no value toll paying path choice model is applied to each of the twenty time period and vehicle type combinations.

(1) Trip tables by time of day and truck class

(2) Skims providing the time and cost for value toll and non-value toll paths for each; the matrix names in the OMX files are: “{period}{cls_name}_time” “{period}{cls_name}dist” “{period}{cls_name}bridgetoll{grp_name}” “{period}{cls_name}toll_time” “{period}{cls_name}toll_dist” “{period}{cls_name}toll_bridgetoll{grp_name}” “{period}_{cls_name}toll_valuetoll{grp_name}” Where period is the assignment period, cls_name is the truck assignment class name (as very small, small and medium truck are assigned as the same class) and grp_name is the truck type name (as the tolls are calculated separately for very small, small and medium).

(1) TOLLCLASS is a code, 1 through 10 are reserved for bridges; 11 and up is

reserved for value toll facilities.

1
2
3
4
5
6
7
    (2)  All costs should be coded in year 2000 cents
    (3)  The 2-axle fee is used for very small trucks
    (4)  The 2-axle fee is used for small trucks
    (5)  The 3-axle fee is used for medium trucks
    (6)  The average of the 5-axle and 6-axle fee is used for large trucks
         (about the midpoint of the fee schedule).
    (7)  The in-vehicle time coefficient is from the work trip mode choice model.
Source code in tm2py\components\demand\commercial.py
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
class CommercialVehicleTollChoice(Subcomponent):
    """Commercial vehicle (truck) toll choice.

    A binomial choice model for very small, small, medium, and large trucks.
    A separate value toll paying versus no value toll paying path choice
    model is applied to each of the twenty time period and vehicle type combinations.

    Input:  (1) Trip tables by time of day and truck class
            (2) Skims providing the time and cost for value toll and non-value toll paths
            for each; the matrix names in the OMX files are:
                "{period}_{cls_name}_time"
                "{period}_{cls_name}_dist"
                "{period}_{cls_name}_bridgetoll{grp_name}"
                "{period}_{cls_name}toll_time"
                "{period}_{cls_name}toll_dist"
                "{period}_{cls_name}toll_bridgetoll{grp_name}"
                "{period}_{cls_name}toll_valuetoll{grp_name}"
            Where period is the assignment period, cls_name is the truck assignment
            class name (as very small, small and medium truck are assigned as the
            same class) and grp_name is the truck type name (as the tolls are
            calculated separately for very small, small and medium).

    Results: a total of forty demand matrices, by time of day, truck type and toll/non-toll.

    Notes:  (1)  TOLLCLASS is a code, 1 through 10 are reserved for bridges; 11 and up is
                 reserved for value toll facilities.
            (2)  All costs should be coded in year 2000 cents
            (3)  The 2-axle fee is used for very small trucks
            (4)  The 2-axle fee is used for small trucks
            (5)  The 3-axle fee is used for medium trucks
            (6)  The average of the 5-axle and 6-axle fee is used for large trucks
                 (about the midpoint of the fee schedule).
            (7)  The in-vehicle time coefficient is from the work trip mode choice model.
    """

    def __init__(self, controller, component):
        """Constructor for Commercial Vehicle Toll Choice.

        Also calls Subclass __init__().

        Args:
            controller: model run controller
            component: parent component
        """
        super().__init__(controller, component)

        self.config = self.component.config.toll_choice

        self.sub_components = {
            "toll choice calculator": TollChoiceCalculator(
                controller,
                self,
                self.config,
            ),
        }

        # shortcut
        self._toll_choice = self.sub_components["toll choice calculator"]
        self._toll_choice.toll_skim_suffix = "trk"

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self, trkclass_tp_demand_dict):
        """Split per-period truck demands into nontoll and toll classes.

        Uses OMX skims output from highway assignment: traffic_skims_{period}.omx"""

        _tclass_time_combos = itertools.product(
            self.time_period_names, self.config.classes
        )

        class_demands = defaultdict(dict)
        for _time_period, _tclass in _tclass_time_combos:
            _split_demand = self._toll_choice.run(
                trkclass_tp_demand_dict[_tclass.name][_time_period],
                _tclass.name,
                _time_period,
            )

            class_demands[_time_period][_tclass.name] = _split_demand["non toll"]
            class_demands[_time_period][f"{_tclass.name}toll"] = _split_demand["toll"]
        return class_demands

__init__(controller, component)

Constructor for Commercial Vehicle Toll Choice.

Also calls Subclass init().

Parameters:

Name Type Description Default
controller

model run controller

required
component

parent component

required
Source code in tm2py\components\demand\commercial.py
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
def __init__(self, controller, component):
    """Constructor for Commercial Vehicle Toll Choice.

    Also calls Subclass __init__().

    Args:
        controller: model run controller
        component: parent component
    """
    super().__init__(controller, component)

    self.config = self.component.config.toll_choice

    self.sub_components = {
        "toll choice calculator": TollChoiceCalculator(
            controller,
            self,
            self.config,
        ),
    }

    # shortcut
    self._toll_choice = self.sub_components["toll choice calculator"]
    self._toll_choice.toll_skim_suffix = "trk"

run(trkclass_tp_demand_dict)

Split per-period truck demands into nontoll and toll classes.

Uses OMX skims output from highway assignment: traffic_skims_{period}.omx

Source code in tm2py\components\demand\commercial.py
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
@LogStartEnd()
def run(self, trkclass_tp_demand_dict):
    """Split per-period truck demands into nontoll and toll classes.

    Uses OMX skims output from highway assignment: traffic_skims_{period}.omx"""

    _tclass_time_combos = itertools.product(
        self.time_period_names, self.config.classes
    )

    class_demands = defaultdict(dict)
    for _time_period, _tclass in _tclass_time_combos:
        _split_demand = self._toll_choice.run(
            trkclass_tp_demand_dict[_tclass.name][_time_period],
            _tclass.name,
            _time_period,
        )

        class_demands[_time_period][_tclass.name] = _split_demand["non toll"]
        class_demands[_time_period][f"{_tclass.name}toll"] = _split_demand["toll"]
    return class_demands

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\commercial.py
900
901
902
903
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

CommercialVehicleTripDistribution

Bases: Subcomponent

Commercial vehicle (truck) Trip Distribution for 4 sizes of truck.

The four truck types are

(1) very small trucks (two-axle, four-tire), (2) small trucks (two-axle, six-tire), (3) medium trucks (three-axle), (4) large or combination (four or more axle) trucks.

(1) Trips by 4 truck sizes

(2) highway skims for truck, time, distance, bridgetoll and value toll (3) friction factors lookup table (4) k-factors matrix

A simple gravity model is used to distribute the truck trips, with separate friction factors used for each class of truck.

A blended travel time is used as the impedance measure, specifically the weighted average of the AM travel time (one-third weight) and the midday travel time (two-thirds weight).

Input

Level-of-service matrices for the AM peak period (6 am to 10 am) and midday period (10 am to 3 pm) which contain truck-class specific estimates of congested travel time (in minutes)

A matrix of k-factors, as calibrated by Chuck Purvis. Note the very small truck model does not use k-factors; the small, medium, and large trucks use the same k-factors.

A table of friction factors in text format with the following fields, space separated: - impedance measure (blended travel time); - friction factors for very small trucks; - friction factors for small trucks; - friction factors for medium trucks; and, - friction factors for large trucks.

Notes on distribution steps

load nonres/truck_kfactors_taz.csv load nonres/truckFF.dat Apply friction factors and kfactors to produce balancing matrix apply the gravity models using friction factors from nonres/truckFF.dat (note the very small trucks do not use the K-factors) Can use Emme matrix balancing for this - important note: reference matrices by name and ensure names are unique Trips rounded to 0.01, causes some instability in results

Notes: (1) Based on the BAYCAST truck model, no significant updates. (2) Combined Chuck’s calibration adjustments into the NAICS-based model coefficients.

Source code in tm2py\components\demand\commercial.py
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
class CommercialVehicleTripDistribution(Subcomponent):
    """Commercial vehicle (truck) Trip Distribution for 4 sizes of truck.

    The four truck types are:
        (1) very small trucks (two-axle, four-tire),
        (2) small trucks (two-axle, six-tire),
        (3) medium trucks (three-axle),
        (4) large or combination (four or more axle) trucks.

    Input:  (1) Trips by 4 truck sizes
            (2) highway skims for truck, time, distance, bridgetoll and value toll
            (3) friction factors lookup table
            (4) k-factors matrix
    Ouput:  Trips origin and destination matrices by 4 truck sizes

    A simple gravity model is used to distribute the truck trips, with
    separate friction factors used for each class of truck.

    A blended travel time is used as the impedance measure, specifically the weighted average
    of the AM travel time (one-third weight) and the midday travel time (two-thirds weight).

    Input:
        Level-of-service matrices for the AM peak period (6 am to 10 am) and midday
        period (10 am to 3 pm) which contain truck-class specific estimates of
        congested travel time (in minutes)

        A matrix of k-factors, as calibrated by Chuck Purvis.  Note the very small truck model
        does not use k-factors; the small, medium, and large trucks use the same k-factors.

        A table of friction factors in text format with the following fields, space separated:
        - impedance measure (blended travel time);
        - friction factors for very small trucks;
        - friction factors for small trucks;
        - friction factors for medium trucks; and,
        - friction factors for large trucks.

    Notes on distribution steps:
        load nonres/truck_kfactors_taz.csv
        load nonres/truckFF.dat
        Apply friction factors and kfactors to produce balancing matrix
        apply the gravity models using friction factors from nonres/truckFF.dat
        (note the very small trucks do not use the K-factors)
        Can use Emme matrix balancing for this - important note: reference
        matrices by name and ensure names are unique
        Trips rounded to 0.01, causes some instability in results

    Results: four total daily trips by truck type

    Notes:
    (1) Based on the BAYCAST truck model, no significant updates.
    (2) Combined Chuck's calibration adjustments into the NAICS-based model coefficients.

    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for the CommercialVehicleTripDistribution component.

        Args:
            controller (RunController): Run controller for model run.
            component (Component): Parent component of sub-component
        """
        super().__init__(controller, component)

        self.config = self.component.config.trip_dist
        self._k_factors = None
        self._blended_skims = {}
        self._friction_factors = None
        self._friction_factor_matrices = {}

        self._class_config = None

    @property
    def class_config(self):
        if not self._class_config:
            self._class_config = {c.name: c for c in self.config.classes}

        return self._class_config

    @property
    def k_factors(self):
        """Zone-to-zone values of truck K factors.

        Returns:
             NumpyArray: Zone-to-zone values of truck K factors.
        """
        if self._k_factors is None:
            self._k_factors = self._load_k_factors()
        return self._k_factors

    def _load_k_factors(self):
        """Loads k-factors from self.config.truck.k_factors_file csv file.

        Returns:
            NumpyArray: Zone-to-zone values of truck K factors.

        """
        """return zonal_csv_to_matrices(
            self.get_abs_path(self.config.k_factors_file),
            i_column="I_taz_tm2_v2_2",
            j_column="J_taz_tm2_v2_2",
            value_columns="truck_k",
            fill_zones=True,
            default_value=0,
            max_zone=max(self.component.emme_scenario.zone_numbers),
        )["truck_k"].values"""
        data = pd.read_csv(self.get_abs_path(self.config.k_factors_file))
        zones = np.unique(data["I_taz_tm2_v2_2"])
        num_data_zones = len(zones)
        row_index = np.searchsorted(zones, data["I_taz_tm2_v2_2"])
        col_index = np.searchsorted(zones, data["J_taz_tm2_v2_2"])
        k_factors = np.zeros((num_data_zones, num_data_zones))
        k_factors[row_index, col_index] = data["truck_k"]
        num_zones = len(self.component.emme_scenario.zone_numbers)
        padding = ((0, num_zones - num_data_zones), (0, num_zones - num_data_zones))
        k_factors = np.pad(k_factors, padding)

        return k_factors

    def blended_skims(self, mode: str):
        """Get blended skim. Creates it if doesn't already exist.

        Args:
            mode (str): Mode for skim

        Returns:
            _type_: _description_
        """
        if mode not in self._blended_skims:
            self._blended_skims[mode] = get_blended_skim(
                self.controller,
                mode=mode,
                blend=self.component.trk_impedances[mode]["time_blend"],
            )
        return self._blended_skims[mode]

    def friction_factor_matrices(
        self, trk_class: str, k_factors: Union[None, NumpyArray] = None
    ) -> NumpyArray:
        """Zone to zone NumpyArray of impedances for a given truck class.

        Args:
            trk_class (str): Truck class abbreviated name
            k_factors (Union[None,NumpyArray]): If not None, gives an zone-by-zone array of
                k-factors--additive impedances to be added on top of friciton factors.
                Defaults to None.

        Returns:
            NumpyArray: Zone-by-zone matrix of friction factors
        """
        if trk_class not in self._friction_factor_matrices.keys():
            self._friction_factor_matrices[
                trk_class
            ] = self._calculate_friction_factor_matrix(
                trk_class,
                self.class_config[trk_class].impedance,
                self.k_factors,
                self.class_config[trk_class].use_k_factors,
            )

        return self._friction_factor_matrices[trk_class]

    @LogStartEnd(level="DEBUG")
    def _calculate_friction_factor_matrix(
        self,
        segment_name,
        blended_skim_name: str,
        k_factors: Union[None, NumpyArray] = None,
        use_k_factors: bool = False,
    ):
        """Calculates friction matrix by interpolating time; optionally multiplying by k_factors.

        Args:
            segment_name: Name of the segment to calculate the friction factors for (i.e. vstruck)
            blended_skim_name (str): Name of blended skim
            k_factors (Union[None,NumpyArray): Optional k-factors matrix

        Returns:
            friction_matrix NumpyArray: friction matrix for a truck class
        """
        _friction_matrix = np.interp(
            self.blended_skims(blended_skim_name),
            self.friction_factors["time"].tolist(),
            self.friction_factors[segment_name],
        )

        if use_k_factors:
            if k_factors is not None:
                _friction_matrix = _friction_matrix * k_factors

        return _friction_matrix

    @property
    def friction_factors(self):
        """Table of friction factors for each time band by truck class.

        Returns:
            pd.DataFrame: DataFrame of friction factors read from disk.
        """
        if self._friction_factors is None:
            self._friction_factors = self._read_ffactors()
        return self._friction_factors

    def _read_ffactors(self) -> pd.DataFrame:
        """Load friction factors lookup tables from csv file to dataframe.

        Reads from file: config.truck.friction_factors_file with following assumed column order:
            time: Time
            vsmtrk: Very Small Truck FF
            smltrk: Small Truck FF
            medtrk: Medium Truck FF
            lrgtrk: Large Truck FF
        """
        _file_path = self.get_abs_path(self.config.friction_factors_file)
        return pd.read_csv(_file_path)

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self, tripends_df) -> Dict[str, NumpyArray]:
        """Run commercial vehicle trip distribution."""
        daily_demand_dict = {
            tc: self._distribute_ods(tripends_df, tc) for tc in self.component.classes
        }

        return daily_demand_dict

    @LogStartEnd(level="DEBUG")
    def _distribute_ods(
        self,
        tripends_df: pd.DataFrame,
        trk_class: str,
        orig_factor: float = 0.5,
        dest_factor: float = 0.5,
    ) -> NumpyArray:
        """Distribute a trip ends for a given a truck class.

        Args:
            tripends_df: dataframe with trip ends as "{trk_class}_prod" and{trk_class}_attr".
            trk_class: name of truck class to distribute.
            orig_factor (float, optional): Amount to factor towards origins. Defaults to 0.5.
            dest_factor (float, optional): Amount to factor towards destinations. Defaults to 0.5.

        Returns:
            NumpyArray: Distributed trip ends for given truck class
        """
        if orig_factor + dest_factor != 1.0:
            raise ValueError(
                "orig_factor ({orig_factor}) and dest_factor ({dest_factor}) must\
                sum to 1.0"
            )

        _prod_attr_matrix = self._matrix_balancing(
            tripends_df[f"{trk_class}_productions"].to_numpy(),
            tripends_df[f"{trk_class}_attractions"].to_numpy(),
            trk_class,
        )
        daily_demand = (
            orig_factor * _prod_attr_matrix
            + dest_factor * _prod_attr_matrix.transpose()
        )

        self.logger.log(
            f"{trk_class}, prod sum: {_prod_attr_matrix.sum()}, "
            f"daily sum: {daily_demand.sum()}",
            level="DEBUG",
        )

        return daily_demand

    def _matrix_balancing(
        self,
        orig_totals: NumpyArray,
        dest_totals: NumpyArray,
        trk_class: str,
    ) -> NumpyArray:
        """Distribute origins and destinations based on friction factors for a givein truck class.

        Args:
            orig_totals: Total demand for origins as a numpy array
            dest_totals: Total demand for destinations as a numpy array
            trk_class (str): Truck class name



        """
        matrix_balancing = self.controller.emme_manager.modeller.tool(
            "inro.emme.matrix_calculation.matrix_balancing"
        )
        matrix_round = self.controller.emme_manager.modeller.tool(
            "inro.emme.matrix_calculation.matrix_controlled_rounding"
        )

        # Transfer numpy to emmebank
        _ff_emme_mx_name = self.component.matrix_cache.set_data(
            f"{trk_class}_friction",
            self.friction_factor_matrices(trk_class),
            matrix_type="FULL",
        ).name

        _orig_tots_emme_mx_name = self.component.matrix_cache.set_data(
            f"{trk_class}_prod", orig_totals, matrix_type="ORIGIN"
        ).name

        _dest_tots_emme_mx_name = self.component.matrix_cache.set_data(
            f"{trk_class}_attr", dest_totals, matrix_type="DESTINATION"
        ).name

        # Create a destination matrix for output to live in Emmebank
        _result_emme_mx_name = self.component.matrix_cache.get_or_init_matrix(
            f"{trk_class}_daily_demand"
        ).name

        spec = {
            "od_values_to_balance": _ff_emme_mx_name,
            "origin_totals": _orig_tots_emme_mx_name,
            "destination_totals": _dest_tots_emme_mx_name,
            "allowable_difference": 0.01,
            "max_relative_error": self.config.max_balance_relative_error,
            "max_iterations": self.config.max_balance_iterations,
            "results": {"od_balanced_values": _result_emme_mx_name},
            "performance_settings": {
                "allowed_memory": None,
                "number_of_processors": self.controller.num_processors,
            },
            "type": "MATRIX_BALANCING",
        }
        matrix_balancing(spec, scenario=self.component.emme_scenario)

        matrix_round(
            _result_emme_mx_name,
            _result_emme_mx_name,
            min_demand=0.01,
            values_to_round="ALL_NON_ZERO",
            scenario=self.component.emme_scenario,
        )

        return self.component.matrix_cache.get_data(_result_emme_mx_name)

friction_factors property

Table of friction factors for each time band by truck class.

Returns:

Type Description

pd.DataFrame: DataFrame of friction factors read from disk.

k_factors property

Zone-to-zone values of truck K factors.

Returns:

Name Type Description
NumpyArray

Zone-to-zone values of truck K factors.

__init__(controller, component)

Constructor for the CommercialVehicleTripDistribution component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
component Component

Parent component of sub-component

required
Source code in tm2py\components\demand\commercial.py
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
def __init__(self, controller: RunController, component: Component):
    """Constructor for the CommercialVehicleTripDistribution component.

    Args:
        controller (RunController): Run controller for model run.
        component (Component): Parent component of sub-component
    """
    super().__init__(controller, component)

    self.config = self.component.config.trip_dist
    self._k_factors = None
    self._blended_skims = {}
    self._friction_factors = None
    self._friction_factor_matrices = {}

    self._class_config = None

blended_skims(mode)

Get blended skim. Creates it if doesn’t already exist.

Parameters:

Name Type Description Default
mode str

Mode for skim

required

Returns:

Name Type Description
_type_

description

Source code in tm2py\components\demand\commercial.py
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
def blended_skims(self, mode: str):
    """Get blended skim. Creates it if doesn't already exist.

    Args:
        mode (str): Mode for skim

    Returns:
        _type_: _description_
    """
    if mode not in self._blended_skims:
        self._blended_skims[mode] = get_blended_skim(
            self.controller,
            mode=mode,
            blend=self.component.trk_impedances[mode]["time_blend"],
        )
    return self._blended_skims[mode]

friction_factor_matrices(trk_class, k_factors=None)

Zone to zone NumpyArray of impedances for a given truck class.

Parameters:

Name Type Description Default
trk_class str

Truck class abbreviated name

required
k_factors Union[None, NumpyArray]

If not None, gives an zone-by-zone array of k-factors–additive impedances to be added on top of friciton factors. Defaults to None.

None

Returns:

Name Type Description
NumpyArray NumpyArray

Zone-by-zone matrix of friction factors

Source code in tm2py\components\demand\commercial.py
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
def friction_factor_matrices(
    self, trk_class: str, k_factors: Union[None, NumpyArray] = None
) -> NumpyArray:
    """Zone to zone NumpyArray of impedances for a given truck class.

    Args:
        trk_class (str): Truck class abbreviated name
        k_factors (Union[None,NumpyArray]): If not None, gives an zone-by-zone array of
            k-factors--additive impedances to be added on top of friciton factors.
            Defaults to None.

    Returns:
        NumpyArray: Zone-by-zone matrix of friction factors
    """
    if trk_class not in self._friction_factor_matrices.keys():
        self._friction_factor_matrices[
            trk_class
        ] = self._calculate_friction_factor_matrix(
            trk_class,
            self.class_config[trk_class].impedance,
            self.k_factors,
            self.class_config[trk_class].use_k_factors,
        )

    return self._friction_factor_matrices[trk_class]

run(tripends_df)

Run commercial vehicle trip distribution.

Source code in tm2py\components\demand\commercial.py
632
633
634
635
636
637
638
639
@LogStartEnd()
def run(self, tripends_df) -> Dict[str, NumpyArray]:
    """Run commercial vehicle trip distribution."""
    daily_demand_dict = {
        tc: self._distribute_ods(tripends_df, tc) for tc in self.component.classes
    }

    return daily_demand_dict

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\commercial.py
627
628
629
630
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

CommercialVehicleTripGeneration

Bases: Subcomponent

Commercial vehicle (truck) Trip Generation for 4 sizes of truck.

The four truck types are

(1) very small trucks (two-axle, four-tire), (2) small trucks (two-axle, six-tire), (3) medium trucks (three-axle), (4) large or combination (four or more axle) trucks.

Trip generation

Use linear regression models to generate trip ends, balancing attractions to productions. Based on BAYCAST truck model.

The truck trip generation models for small trucks (two-axle, six tire), medium trucks (three-axle), and large or combination (four or more axle) trucks are taken directly from the study: “I-880 Intermodal Corridor Study: Truck Travel in the San Francisco Bay Area”, prepared by Barton Aschman in December 1992. The coefficients are on page 223 of this report.

The very small truck generation model is based on the Phoenix four-tire truck model documented in the TMIP Quick Response Freight Manual.

Note that certain production models previously used SIC-based employment categories. To both maintain consistency with the BAYCAST truck model and update the model to use NAICS-based employment categories, new regression models were estimated relating the NAICS-based employment data with the SIC-based-predicted trips. The goal here is not to create a new truck model, but to mimic the old model with the available data. Please see the excel spreadsheet TruckModel.xlsx for details. The NAICS-based model results replicate the SIC-based model results quite well.

Source code in tm2py\components\demand\commercial.py
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
class CommercialVehicleTripGeneration(Subcomponent):
    """Commercial vehicle (truck) Trip Generation for 4 sizes of truck.

    The four truck types are:
        (1) very small trucks (two-axle, four-tire),
        (2) small trucks (two-axle, six-tire),
        (3) medium trucks (three-axle),
        (4) large or combination (four or more axle) trucks.

    Input:  (1) MAZ csv data file with the employment and household counts.
    Ouput:  Trips by 4 truck sizes

    Trip generation
    ---------------
    Use linear regression models to generate trip ends,
    balancing attractions to productions. Based on BAYCAST truck model.

    The truck trip generation models for small trucks (two-axle, six tire),
    medium trucks (three-axle), and large or combination (four or more axle)
    trucks are taken directly from the study: "I-880 Intermodal Corridor Study:
    Truck Travel in the San Francisco Bay Area", prepared by Barton Aschman in
    December 1992.  The coefficients are on page 223 of this report.

    The very small truck generation model is based on the Phoenix four-tire
    truck model documented in the TMIP Quick Response Freight Manual.

    Note that certain production models previously used SIC-based employment
    categories.  To both maintain consistency with the BAYCAST truck model and
    update the model to use NAICS-based employment categories, new regression
    models were estimated relating the NAICS-based employment data with the
    SIC-based-predicted trips.  The goal here is not to create a new truck
    model, but to mimic the old model with the available data.  Please see
    the excel spreadsheet TruckModel.xlsx for details.  The NAICS-based model
    results replicate the SIC-based model results quite well.
    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for the CommercialVehicleTripGeneration component.

        Args:
            controller (RunController): Run controller for model run.
            component (Component): Parent component of sub-component
        """
        super().__init__(controller, component)
        self.config = self.component.config.trip_gen

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run commercial vehicle trip distribution."""
        _landuse_df = self._aggregate_landuse()
        _unbalanced_tripends_df = self._generate_trip_ends(_landuse_df)
        _balanced_tripends_df = self._balance_pa(_unbalanced_tripends_df)
        total_tripends_df = self._aggregate_by_class(_balanced_tripends_df)
        return total_tripends_df

    @LogStartEnd(level="DEBUG")
    def _aggregate_landuse(self) -> pd.DataFrame:
        """Aggregates landuse data from input CSV by MAZ to TAZ and employment groups.

        TOTEMP, total employment (same regardless of classification system)
        RETEMPN, retail trade employment per the NAICS classification system
        FPSEMPN, financial and professional services employment per NAICS
        HEREMPN, health, educational, and recreational employment per  NAICS
        OTHEMPN, other employment per the NAICS classification system
        AGREMPN, agricultural employment per the NAICS classificatin system
        MWTEMPN, manufacturing, warehousing, and transportation employment per NAICS
        TOTHH, total households
        """
        maz_data_file = self.get_abs_path(
            self.controller.config.scenario.maz_landuse_file
        )
        maz_input_data = pd.read_csv(maz_data_file)
        zones = self.component.emme_scenario.zone_numbers
        maz_input_data = maz_input_data[maz_input_data["TAZ_ORIGINAL"].isin(zones)]
        taz_input_data = maz_input_data.groupby(["TAZ_ORIGINAL"]).sum()
        taz_input_data = taz_input_data.sort_values(by="TAZ_ORIGINAL")
        # combine categories
        taz_landuse = pd.DataFrame()
        for total_column, sub_categories in _land_use_aggregation.items():
            taz_landuse[total_column] = taz_input_data[sub_categories].sum(axis=1)
        taz_landuse.reset_index(inplace=True)
        return taz_landuse

    @LogStartEnd(level="DEBUG")
    def _generate_trip_ends(self, landuse_df: pd.DataFrame) -> pd.DataFrame:
        """Generate productions and attractions by class based on landuse and truck trip rates.

        Args:
            landuse_df (pd.DataFrame): DataFrame with aggregated landuse data.
                Expected columns for landuse are: AGREMPN, RETEMPN, FPSEMPN, HEREMPN,
                MWTEMPN, OTHEMPN, TOTEMP, TOTHH

        Returns:
            pd.DataFrame: DataFrame with unbalanced production and attraction trip ends.
        """
        tripends_df = pd.DataFrame()

        _class_pa = itertools.product(
            self.config.classes,
            ["production_formula", "attraction_formula"],
        )

        # TODO Do this with multi-indexing rather than relying on column naming

        for _c, _pa in _class_pa:
            _trip_type = _c.purpose
            _trk_class = _c.name

            if _pa.endswith("_formula"):
                _pa_short = _pa.split("_")[0]

            # linked trips (non-garage-based) - attractions (equal productions)
            if (_trip_type == "linked") & (_pa_short == "attraction"):
                tripends_df[f"{_trip_type}_{_trk_class}_{_pa_short}s"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ]
            else:
                _constant = _c[_pa].constant
                _multiplier = _c[_pa].multiplier

                land_use_rates = pd.DataFrame(_c[_pa].land_use_rates).T
                land_use_rates = land_use_rates.rename(
                    columns=land_use_rates.loc["property"]
                ).drop("property", axis=0)

                _rate_trips_df = landuse_df.mul(land_use_rates.iloc[0])
                _trips_df = _rate_trips_df * _multiplier + _constant

                tripends_df[f"{_trip_type}_{_trk_class}_{_pa_short}s"] = _trips_df.sum(
                    axis=1
                ).round()

        return tripends_df

    @LogStartEnd(level="DEBUG")
    def _balance_pa(self, tripends_df: pd.DataFrame) -> pd.DataFrame:
        """Balance production and attractions.

        Args:
            tripends_df (pd.DataFrame): DataFrame with unbalanced production and attraction
                trip ends.

        Returns:
            pd.DataFrame: DataFrame with balanced production and attraction trip ends.
        """

        for _c in self.config.classes:
            _trip_type = _c.purpose
            _trk_class = _c.name
            _balance_to = _c.balance_to

            _tots = {
                "attractions": tripends_df[
                    f"{_trip_type}_{_trk_class}_attractions"
                ].sum(),
                "productions": tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ].sum(),
            }

            # if productions OR attractions are zero, fill one with other
            if not _tots["attractions"]:
                tripends_df[f"{_trip_type}_{_trk_class}_attractions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ]

            elif not _tots["productions"]:
                tripends_df[f"{_trip_type}_{_trk_class}_productions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_attractions"
                ]

            # otherwise balance based on sums
            elif _balance_to == "productions":
                tripends_df[f"{_trip_type}_{_trk_class}_attractions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_attractions"
                ] * (_tots["productions"] / _tots["attractions"])

            elif _balance_to == "attractions":
                tripends_df[f"{_trip_type}_{_trk_class}_productions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ] * (_tots["attractions"] / _tots["productions"])
            else:
                raise ValueError(f"{_balance_to} is not a valid balance_to value")
        return tripends_df

    @LogStartEnd(level="DEBUG")
    def _aggregate_by_class(self, tripends_df: pd.DataFrame) -> pd.DataFrame:
        """Sum tripends by class across trip purpose.

        Args:
            tripends_df (pd.DataFrame): DataFrame with balanced production and attraction

        Returns:
            pd.DataFrame: DataFrame with aggregated tripends by truck class. Returned columns are:
                vsmtrk_prod, vsmtrk_attr,
                smltrk_prod, smltrk_attr,
                medtrk_prod, medtrk_attr,
                lrgtrk_prod, lrgtrk_attr
        """
        agg_tripends_df = pd.DataFrame()

        _class_pa = itertools.product(
            self.component.classes,
            ["productions", "attractions"],
        )

        for _trk_class, _pa in _class_pa:
            _sum_cols = [
                c for c in tripends_df.columns if c.endswith(f"_{_trk_class}_{_pa}")
            ]
            agg_tripends_df[f"{_trk_class}_{_pa}"] = pd.Series(
                tripends_df[_sum_cols].sum(axis=1)
            )

        agg_tripends_df.round(decimals=7)

        self.logger.log(agg_tripends_df.describe().to_string(), level="DEBUG")

        return agg_tripends_df

__init__(controller, component)

Constructor for the CommercialVehicleTripGeneration component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
component Component

Parent component of sub-component

required
Source code in tm2py\components\demand\commercial.py
222
223
224
225
226
227
228
229
230
def __init__(self, controller: RunController, component: Component):
    """Constructor for the CommercialVehicleTripGeneration component.

    Args:
        controller (RunController): Run controller for model run.
        component (Component): Parent component of sub-component
    """
    super().__init__(controller, component)
    self.config = self.component.config.trip_gen

run()

Run commercial vehicle trip distribution.

Source code in tm2py\components\demand\commercial.py
237
238
239
240
241
242
243
244
@LogStartEnd()
def run(self):
    """Run commercial vehicle trip distribution."""
    _landuse_df = self._aggregate_landuse()
    _unbalanced_tripends_df = self._generate_trip_ends(_landuse_df)
    _balanced_tripends_df = self._balance_pa(_unbalanced_tripends_df)
    total_tripends_df = self._aggregate_by_class(_balanced_tripends_df)
    return total_tripends_df

validate_inputs()

Validate the inputs.

Source code in tm2py\components\demand\commercial.py
232
233
234
235
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

Bases: ConfigItem

Truck model parameters.

Source code in tm2py\config.py
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
@dataclass(frozen=True)
class TruckConfig(ConfigItem):
    """Truck model parameters."""

    classes: List[TruckClassConfig]
    impedances: List[ImpedanceConfig]
    trip_gen: TripGenerationConfig
    trip_dist: TripDistributionConfig
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig
    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    highway_demand_file: str

    """
    @validator("classes")
    def class_consistency(cls, v, values):
        # TODO Can't get to work righ tnow
        _class_names = [c.name for c in v]
        _gen_classes = [c.name for c in values["trip_gen"]]
        _dist_classes = [c.name for c in values["trip_dist"]]
        _time_classes = [c.name for c in values["time_split"]]
        _toll_classes = [c.name for c in values["toll_choice"]]

        assert (
            _class_names == _gen_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.trip_gen ({_gen_classes})."
        assert (
            _class_names == _dist_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.trip_dist ({_dist_classes})."
        assert (
            _class_names == _time_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.time_split ({_time_classes})."
        assert (
            _class_names == _toll_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.toll_choice ({_toll_classes})."

        return v
    """

highway_demand_file instance-attribute

@validator(“classes”) def class_consistency(cls, v, values): # TODO Can’t get to work righ tnow _class_names = [c.name for c in v] _gen_classes = [c.name for c in values[“trip_gen”]] _dist_classes = [c.name for c in values[“trip_dist”]] _time_classes = [c.name for c in values[“time_split”]] _toll_classes = [c.name for c in values[“toll_choice”]]

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
assert (
    _class_names == _gen_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.trip_gen ({_gen_classes})."
assert (
    _class_names == _dist_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.trip_dist ({_dist_classes})."
assert (
    _class_names == _time_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.time_split ({_time_classes})."
assert (
    _class_names == _toll_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.toll_choice ({_toll_classes})."

return v

Inter-regional Demand

Module containing Internal <-> External trip model.

ExternalDemand

Bases: Subcomponent

Forecast of daily internal<->external demand based on growth from a base year.

Create a daily matrix that includes internal/external, external/internal, and external/external passenger vehicle travel (based on Census 2000 journey-to-work flows). These trip tables are based on total traffic counts, which include trucks, but trucks are not explicitly segmented from passenger vehicles. This short-coming is a hold-over from BAYCAST and will be addressed in the next model update.

The row and column totals are taken from count station data provided by Caltrans. The BAYCAST 2006 IX matrix is used as the base matrix and scaled to match forecast year growth assumptions. The script generates estimates for the model forecast year; the growth rates were discussed with neighboring MPOs as part of the SB 375 target setting process.

Input: (1) Station-specific assumed growth rates for each forecast year (the lack of external/external movements through the region allows simple factoring of cells without re-balancing); (2) An input base matrix derived from the Census journey-to-work data.

Output: (1) Four-table, forecast-year specific trip tables containing internal/external, external/internal, and external/external vehicle (xxx or person xxx) travel.

Governed by class DemandGrowth Config:

    highway_demand_file:
    input_demand_file:
    input_demand_matrixname_tmpl:
    modes:
    reference_year:
    annual_growth_rate:
    special_gateway_adjust:

Source code in tm2py\components\demand\internal_external.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
class ExternalDemand(Subcomponent):
    """Forecast of daily internal<->external demand based on growth from a base year.

    Create a daily matrix that includes internal/external, external/internal,
    and external/external passenger vehicle travel (based on Census 2000 journey-to-work flows).
    These trip tables are based on total traffic counts, which include trucks, but trucks are
    not explicitly segmented from passenger vehicles.  This short-coming is a hold-over from
    BAYCAST and will be addressed in the next model update.

    The row and column totals are taken from count station data provided by Caltrans.  The
    BAYCAST 2006 IX matrix is used as the base matrix and scaled to match forecast year growth
    assumptions. The script generates estimates for the model forecast year; the growth rates
    were discussed with neighboring MPOs as part of the SB 375 target setting process.

     Input:  (1)  Station-specific assumed growth rates for each forecast year (the lack of
                  external/external movements through the region allows simple factoring of
                  cells without re-balancing);
             (2)  An input base matrix derived from the Census journey-to-work data.

     Output: (1) Four-table, forecast-year specific trip tables containing internal/external,
                 external/internal, and external/external vehicle (xxx or person xxx) travel.


    Governed by class DemandGrowth Config:
    ```
        highway_demand_file:
        input_demand_file:
        input_demand_matrixname_tmpl:
        modes:
        reference_year:
        annual_growth_rate:
        special_gateway_adjust:
    ```
    """

    def __init__(self, controller, component):
        super().__init__(controller, component)
        self.config = self.component.config.demand
        # Loaded lazily
        self._base_demand = None

    @property
    def year(self):
        return self.controller.config.scenario.year

    @property
    def modes(self):
        return self.component.classes

    @property
    def input_demand_file(self):
        return self.get_abs_path(self.config.input_demand_file)

    @property
    def base_demand(self):
        if self._base_demand is None:
            self._load_base_demand()
        return self._base_demand

    def validate_inputs(self):
        # TODO
        pass

    def _load_base_demand(self):
        """Load reference matrices from .omx to self._base_demand

        input file template: self.config.internal_external.input_demand_matrixname_tmpl
        modes: self.config.internal_external.modes
        """
        _mx_name_tmpl = self.config.input_demand_matrixname_tmpl
        _matrices = {m: _mx_name_tmpl.format(mode=m.upper()) for m in self.modes}

        self._base_demand = omx_to_dict(self.input_demand_file, matrices=_matrices)

    def run(self, base_demand: Dict[str, NumpyArray] = None) -> Dict[str, NumpyArray]:
        """Calculate adjusted demand based on scenario year and growth rates.

        Steps:
        - 1.1 apply special factors to certain gateways based on ID
        - 1.2 apply gateway-specific annual growth rates to results of step 1
           to generate year specific forecast

        Args:
            demand: dictionary of input daily demand matrices (numpy arrays)

        Returns:
             Dictionary of Numpy matrices of daily PA by class mode
        """
        # Build adjustment matrix to be applied to all input matrices
        # special gateway adjustments based on zone index
        if base_demand is None:
            base_demand = self.base_demand
        _num_years = self.year - self.config.reference_year
        _adj_matrix = np.ones(base_demand["da"].shape)

        _adj_matrix = create_matrix_factors(
            default_matrix=_adj_matrix,
            matrix_factors=self.config.special_gateway_adjust,
        )

        _adj_matrix = create_matrix_factors(
            default_matrix=_adj_matrix,
            matrix_factors=self.config.annual_growth_rate,
            periods=_num_years,
        )

        daily_prod_attract = dict(
            (_mode, _demand * _adj_matrix) for _mode, _demand in base_demand.items()
        )
        return daily_prod_attract

run(base_demand=None)

Calculate adjusted demand based on scenario year and growth rates.

Steps: - 1.1 apply special factors to certain gateways based on ID - 1.2 apply gateway-specific annual growth rates to results of step 1 to generate year specific forecast

Parameters:

Name Type Description Default
demand

dictionary of input daily demand matrices (numpy arrays)

required

Returns:

Type Description
Dict[str, NumpyArray]

Dictionary of Numpy matrices of daily PA by class mode

Source code in tm2py\components\demand\internal_external.py
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
def run(self, base_demand: Dict[str, NumpyArray] = None) -> Dict[str, NumpyArray]:
    """Calculate adjusted demand based on scenario year and growth rates.

    Steps:
    - 1.1 apply special factors to certain gateways based on ID
    - 1.2 apply gateway-specific annual growth rates to results of step 1
       to generate year specific forecast

    Args:
        demand: dictionary of input daily demand matrices (numpy arrays)

    Returns:
         Dictionary of Numpy matrices of daily PA by class mode
    """
    # Build adjustment matrix to be applied to all input matrices
    # special gateway adjustments based on zone index
    if base_demand is None:
        base_demand = self.base_demand
    _num_years = self.year - self.config.reference_year
    _adj_matrix = np.ones(base_demand["da"].shape)

    _adj_matrix = create_matrix_factors(
        default_matrix=_adj_matrix,
        matrix_factors=self.config.special_gateway_adjust,
    )

    _adj_matrix = create_matrix_factors(
        default_matrix=_adj_matrix,
        matrix_factors=self.config.annual_growth_rate,
        periods=_num_years,
    )

    daily_prod_attract = dict(
        (_mode, _demand * _adj_matrix) for _mode, _demand in base_demand.items()
    )
    return daily_prod_attract

ExternalTollChoice

Bases: Subcomponent

Toll choice

Apply a binomial choice model for drive alone, shared ride 2, and shared ride 3 internal/external personal vehicle travel.

(1) Time-period-specific origin/destination matrices of drive alone, shared ride 2,

and share ride 3+ internal/external trip tables.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
    (2) Skims providing the time and cost for value toll and non-value toll paths for each

        traffic_skims_{period}.omx, where {period} is the time period ID,
        {class} is the class name da, sr2, sr2, with the following matrix names
          Non-value-toll paying time: {period}_{class}_time,
          Non-value-toll distance: {period}_{class}_dist,
          Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
          Value-toll paying time is: {period}_{class}toll_time,
          Value-toll paying distance is: {period}_{class}toll_dist,
          Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
          Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

Output: Five, six-table trip matrices, one for each time period. Two tables for each vehicle class representing value-toll paying path trips and non-value-toll paying path trips

Governed by TollClassConfig:

1
2
3
4
5
6
7
8
```
classes:
value_of_time:
operating_cost_per_mile:
property_to_skim_toll:
property_to_skim_notoll:
utility:
```
Source code in tm2py\components\demand\internal_external.py
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
class ExternalTollChoice(Subcomponent):
    """Toll choice
    -----------
    Apply a binomial choice model for drive alone, shared ride 2, and shared ride 3
    internal/external personal vehicle travel.

    Input:  (1) Time-period-specific origin/destination matrices of drive alone, shared ride 2,
                and share ride 3+ internal/external trip tables.
            (2) Skims providing the time and cost for value toll and non-value toll paths for each

                traffic_skims_{period}.omx, where {period} is the time period ID,
                {class} is the class name da, sr2, sr2, with the following matrix names
                  Non-value-toll paying time: {period}_{class}_time,
                  Non-value-toll distance: {period}_{class}_dist,
                  Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
                  Value-toll paying time is: {period}_{class}toll_time,
                  Value-toll paying distance is: {period}_{class}toll_dist,
                  Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
                  Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

     Output: Five, six-table trip matrices, one for each time period.  Two tables for each vehicle
             class representing value-toll paying path trips and non-value-toll paying path trips

    Governed by TollClassConfig:

        ```
        classes:
        value_of_time:
        operating_cost_per_mile:
        property_to_skim_toll:
        property_to_skim_notoll:
        utility:
        ```
    """

    def __init__(self, controller, component):
        super().__init__(controller, component)

        self.config = self.component.config.toll_choice

        self.sub_components = {
            "toll choice calculator": TollChoiceCalculator(
                controller, component, self.config
            ),
        }

        # shortcut
        self._toll_choice = self.sub_components["toll choice calculator"]
        self._toll_choice.toll_skim_suffix = "trk"

    def validate_inputs(self):
        # TODO
        pass

    @LogStartEnd()
    def run(
        self, period_demand: Dict[str, Dict[str, NumpyArray]]
    ) -> Dict[str, Dict[str, NumpyArray]]:
        """Binary toll / non-toll choice model by class.

        input: result of _ix_time_of_day
        skims:
            traffic_skims_{period}.omx, where {period} is the time period ID,
            {class} is the class name da, sr2, sr2, with the following matrix names
              Non-value-toll paying time: {period}_{class}_time,
              Non-value-toll distance: {period}_{class}_dist,
              Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
              Value-toll paying time is: {period}_{class}toll_time,
              Value-toll paying distance is: {period}_{class}toll_dist,
              Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
              Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

        STEPS:
        3.1: For each time of day, for each da, sr2, sr3, calculate
             - utility of toll and nontoll
             - probability of toll / nontoll
             - split demand into toll and nontoll matrices

        """

        _time_class_combos = itertools.product(
            self.time_period_names, self.component.classes
        )

        class_demands = defaultdict(dict)
        for _time_period, _class in _time_class_combos:
            if _time_period in period_demand.keys():
                None
            elif _time_period.lower() in period_demand.keys():
                _time_period = _time_period.lower()
            elif _time_period.upper() in period_demand.keys():
                _time_period = _time_period.upper()
            else:
                raise ValueError(
                    f"Period {_time_period} not an available time period.\
                    Available periods are:  {period_demand.keys()}"
                )

            _split_demand = self._toll_choice.run(
                period_demand[_time_period][_class], _class, _time_period
            )

            class_demands[_time_period][_class] = _split_demand["non toll"]
            class_demands[_time_period][f"{_class}toll"] = _split_demand["toll"]
        return class_demands

run(period_demand)

Binary toll / non-toll choice model by class.

input: result of ix_time_of_day skims: traffic_skims{period}.omx, where {period} is the time period ID, {class} is the class name da, sr2, sr2, with the following matrix names Non-value-toll paying time: {period}{class}_time, Non-value-toll distance: {period}{class}dist, Non-value-toll bridge toll is: {period}{class}bridgetoll{class}, Value-toll paying time is: {period}{class}toll_time, Value-toll paying distance is: {period}{class}toll_dist, Value-toll bridge toll is: {period}{class}toll_bridgetoll{class}, Value-toll value toll is: {period}{class}toll_valuetoll{class},

STEPS: 3.1: For each time of day, for each da, sr2, sr3, calculate - utility of toll and nontoll - probability of toll / nontoll - split demand into toll and nontoll matrices

Source code in tm2py\components\demand\internal_external.py
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
@LogStartEnd()
def run(
    self, period_demand: Dict[str, Dict[str, NumpyArray]]
) -> Dict[str, Dict[str, NumpyArray]]:
    """Binary toll / non-toll choice model by class.

    input: result of _ix_time_of_day
    skims:
        traffic_skims_{period}.omx, where {period} is the time period ID,
        {class} is the class name da, sr2, sr2, with the following matrix names
          Non-value-toll paying time: {period}_{class}_time,
          Non-value-toll distance: {period}_{class}_dist,
          Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
          Value-toll paying time is: {period}_{class}toll_time,
          Value-toll paying distance is: {period}_{class}toll_dist,
          Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
          Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

    STEPS:
    3.1: For each time of day, for each da, sr2, sr3, calculate
         - utility of toll and nontoll
         - probability of toll / nontoll
         - split demand into toll and nontoll matrices

    """

    _time_class_combos = itertools.product(
        self.time_period_names, self.component.classes
    )

    class_demands = defaultdict(dict)
    for _time_period, _class in _time_class_combos:
        if _time_period in period_demand.keys():
            None
        elif _time_period.lower() in period_demand.keys():
            _time_period = _time_period.lower()
        elif _time_period.upper() in period_demand.keys():
            _time_period = _time_period.upper()
        else:
            raise ValueError(
                f"Period {_time_period} not an available time period.\
                Available periods are:  {period_demand.keys()}"
            )

        _split_demand = self._toll_choice.run(
            period_demand[_time_period][_class], _class, _time_period
        )

        class_demands[_time_period][_class] = _split_demand["non toll"]
        class_demands[_time_period][f"{_class}toll"] = _split_demand["toll"]
    return class_demands

InternalExternal

Bases: Component

Develop Internal <-> External trip tables from land use and impedances.

  1. Grow demand from base year using static rates ::ExternalDemand
  2. Split by time of day using static factors ::TimePeriodSplit
  3. Apply basic toll binomial choice model: ::ExternalTollChoice
Governed by InternalExternalConfig
Source code in tm2py\components\demand\internal_external.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
class InternalExternal(Component):
    """Develop Internal <-> External trip tables from land use and impedances.

    1. Grow demand from base year using static rates ::ExternalDemand
    2. Split by time of day using static factors ::TimePeriodSplit
    3. Apply basic toll binomial choice model: ::ExternalTollChoice

    Governed by InternalExternalConfig:
        highway_demand_file:
        input_demand_file:
        input_demand_matrixname_tmpl:
        modes:
        reference_year:
        annual_growth_rate: List[MatrixFactorConfig]
        time_of_day: TimeOfDayConfig
        toll_choice: TollChoiceConfig
        special_gateway_adjust: Optional[List[MatrixFactorConfig]]
    """

    def __init__(self, controller: "RunController"):
        super().__init__(controller)
        self.config = self.controller.config.internal_external

        self.sub_components = {
            "demand forecast": ExternalDemand(controller, self),
            "time of day": TimePeriodSplit(
                controller, self, self.config.time_of_day.classes[0].time_period_split
            ),
            "toll choice": ExternalTollChoice(controller, self),
        }

    @property
    def classes(self):
        return self.config.modes

    def validate_inputs(self):
        """Validate inputs to component."""
        ## TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run internal/external travel demand component."""

        daily_demand = self.sub_components["demand forecast"].run()
        period_demand = self.sub_components["time of day"].run(daily_demand)
        class_demands = self.sub_components["toll choice"].run(period_demand)
        self._export_results(class_demands)

    @LogStartEnd()
    def _export_results(self, demand: Dict[str, Dict[str, NumpyArray]]):
        """Export assignable class demands to OMX files by time-of-day."""
        outdir = self.get_abs_path(self.config.output_trip_table_directory)
        os.makedirs(outdir, exist_ok=True)
        for period, matrices in demand.items():
            with OMXManager(
                os.path.join(
                    outdir, self.config.outfile_trip_table_tmp.format(period=period)
                ),
                "w",
            ) as output_file:
                for name, data in matrices.items():
                    output_file.write_array(data, name)

run()

Run internal/external travel demand component.

Source code in tm2py\components\demand\internal_external.py
67
68
69
70
71
72
73
74
@LogStartEnd()
def run(self):
    """Run internal/external travel demand component."""

    daily_demand = self.sub_components["demand forecast"].run()
    period_demand = self.sub_components["time of day"].run(daily_demand)
    class_demands = self.sub_components["toll choice"].run(period_demand)
    self._export_results(class_demands)

validate_inputs()

Validate inputs to component.

Source code in tm2py\components\demand\internal_external.py
62
63
64
65
def validate_inputs(self):
    """Validate inputs to component."""
    ## TODO
    pass

Bases: ConfigItem

Internal <-> External model parameters.

Source code in tm2py\config.py
514
515
516
517
518
519
520
521
522
523
524
@dataclass(frozen=True)
class InternalExternalConfig(ConfigItem):
    """Internal <-> External model parameters."""

    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    highway_demand_file: str
    modes: List[str]
    demand: DemandGrowth
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig

Visitor Demand

Visitor module.

Highway Network Components

Module for highway network preparation steps.

Creates required attributes and populates input values needed for highway assignments. The toll values, VDFs, per-class cost (tolls+operating costs), modes and skim link attributes are calculated.

The following keys and tables are used from the config

highway.tolls.file_path: relative path to input toll file highway.tolls.src_vehicle_group_names: names used in tolls file for toll class values highway.tolls.dst_vehicle_group_names: corresponding names used in network attributes toll classes highway.tolls.valuetoll_start_tollbooth_code: index to split point bridge tolls (< this value) from distance value tolls (>= this value) highway.classes: the list of assignment classes, see the notes under highway_assign for detailed explanation highway.capclass_lookup: the lookup table mapping the link @capclass setting to capacity (@capacity), free_flow_speed (@free_flow_speec) and critical_speed (used to calculate @ja for akcelik type functions) highway.generic_highway_mode_code: unique (with other mode_codes) single character used to label entire auto network in Emme highway.maz_to_maz.mode_code: unique (with other mode_codes) single character used to label MAZ local auto network including connectors

The following link attributes are created (overwritten) and are subsequently used in the highway assignments. - “@flow_XX”: link PCE flows per class, where XX is the class name in the config - “@maz_flow”: Assigned MAZ-to-MAZ flow

The following attributes are calculated
  • vdf: volume delay function to use
  • “@capacity”: total link capacity
  • “@ja”: akcelik delay parameter
  • “@hov_length”: length with HOV lanes
  • “@toll_length”: length with tolls
  • “@bridgetoll_YY”: the bridge toll for class subgroup YY
  • “@valuetoll_YY”: the “value”, non-bridge toll for class subgroup YY
  • “@cost_YY”: total cost for class YY

PrepareNetwork

Bases: Component

Highway network preparation.

Source code in tm2py\components\network\highway\highway_network.py
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
class PrepareNetwork(Component):
    """Highway network preparation."""

    def __init__(self, controller: "RunController"):
        """Constructor for PPrepareNetwork.

        Args:
            controller (RunController): Reference to run controller object.
        """
        super().__init__(controller)
        self.config = self.controller.config.highway
        self._emme_manager = self.controller.emme_manager
        self._highway_emmebank = None
        self._highway_scenarios = None

    @LogStartEnd("Prepare network attributes and modes")
    def run(self):
        """Run network preparation step."""
        for time in self.time_period_names:
            with self.controller.emme_manager.logbook_trace(
                f"prepare for highway assignment {time}"
            ):
                scenario = self.highway_emmebank.scenario(time)
                self._create_class_attributes(scenario, time)
                network = scenario.get_network()
                self._set_tolls(network, time)
                self._set_vdf_attributes(network, time)
                self._set_link_modes(network)
                self._calc_link_skim_lengths(network)
                self._calc_link_class_costs(network)
                self._calc_interchange_distance(network)
                self._calc_link_static_reliability(network)
                scenario.publish_network(network)

    @property
    def highway_emmebank(self):
        if not self._highway_emmebank:
            self._highway_emmebank = self.controller.emme_manager.highway_emmebank
        return self._highway_emmebank

    @property
    def highway_scenarios(self):
        if self._highway_scenarios is None:
            self._highway_scenarios = {
                tp: self.highway_emmebank.scenario(tp) for tp in self.time_period_names
            }
        return self._highway_scenarios

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        toll_file_path = self.get_abs_path(self.config.tolls.file_path)
        if not os.path.exists(toll_file_path):
            self.logger.log(
                f"Tolls file (config.highway.tolls.file_path) does not exist: {toll_file_path}",
                level="ERROR",
            )
            raise FileNotFoundError(f"Tolls file does not exist: {toll_file_path}")
        src_veh_groups = self.config.tolls.src_vehicle_group_names
        columns = ["fac_index"]
        for time in self.controller.config.time_periods:
            for vehicle in src_veh_groups:
                columns.append(f"toll{time.name.lower()}_{vehicle}")
        with open(toll_file_path, "r", encoding="UTF8") as toll_file:
            header = set(h.strip() for h in next(toll_file).split(","))
            missing = []
            for column in columns:
                if column not in header:
                    missing.append(column)
                    self.logger.log(
                        f"Tolls file missing column: {column}", level="ERROR"
                    )
        if missing:
            raise FileFormatError(
                f"Tolls file missing {len(missing)} columns: {', '.join(missing)}"
            )

    def _create_class_attributes(self, scenario: EmmeScenario, time_period: str):
        """Create required network attributes including per-class cost and flow attributes."""
        create_attribute = self.controller.emme_manager.tool(
            "inro.emme.data.extra_attribute.create_extra_attribute"
        )
        attributes = {
            "LINK": [
                ("@capacity", "total link capacity"),
                ("@ja", "akcelik delay parameter"),
                ("@maz_flow", "Assigned MAZ-to-MAZ flow"),
                ("@hov_length", "length with HOV lanes"),
                ("@toll_length", "length with tolls"),
                ("@intdist_down", "dist to the closest d-stream interchange"),
                ("@intdist_up", "dist from the closest upstream int"),
                ("@static_rel", "static reliability"),
                ("@reliability", "link total reliability"),
                ("@reliability_sq", "link total reliability variance"),
                ("@auto_time", "link total reliability"),
            ],
            "NODE": [
                ("@interchange", "interchange"),
            ],
        }
        # toll field attributes by bridge and value and toll definition
        dst_veh_groups = self.config.tolls.dst_vehicle_group_names
        for dst_veh in dst_veh_groups:
            for toll_type in "bridge", "value":
                attributes["LINK"].append(
                    (
                        f"@{toll_type}toll_{dst_veh}",
                        f"{toll_type} toll value for {dst_veh}",
                    )
                )
        # results for link cost and assigned flow
        for assign_class in self.config.classes:
            attributes["LINK"].append(
                (
                    f"@cost_{assign_class.name.lower()}",
                    f'{time_period} {assign_class["description"]} total costs'[:40],
                )
            )
            attributes["LINK"].append(
                (
                    f"@flow_{assign_class.name.lower()}",
                    f'{time_period} {assign_class["description"]} link volume'[:40],
                )
            )
        for domain, attrs in attributes.items():
            for name, desc in attrs:
                create_attribute(domain, name, desc, overwrite=True, scenario=scenario)

    def _set_tolls(self, network: EmmeNetwork, time_period: str):
        """Set the tolls in the network from the toll reference file."""
        toll_index = self._get_toll_indices()
        src_veh_groups = self.config.tolls.src_vehicle_group_names
        dst_veh_groups = self.config.tolls.dst_vehicle_group_names
        valuetoll_start_tollbooth_code = (
            self.config.tolls.valuetoll_start_tollbooth_code
        )
        for link in network.links():
            # set bridgetoll
            if (
                link["@tollbooth"] > 0
                and link["@tollbooth"] < valuetoll_start_tollbooth_code
            ):
                index = int(
                    link["@tollbooth"] * 1000
                    + link["@tollseg"] * 10
                    + link["@useclass"]
                )
                data_row = toll_index.get(index)
                if data_row is None:
                    self.logger.warn(
                        f"set tolls failed index lookup {index}, link {link.id}",
                        indent=True,
                    )
                    continue  # tolls will remain at zero
                for src_veh, dst_veh in zip(src_veh_groups, dst_veh_groups):
                    link[f"@bridgetoll_{dst_veh}"] = (
                        float(data_row[f"toll{time_period.lower()}_{src_veh}"]) * 100
                    )
            # set valuetoll
            elif link["@tollbooth"] >= valuetoll_start_tollbooth_code:
                data_row = toll_index.get(index)
                if data_row is None:
                    self.logger.warn(
                        f"set tolls failed index lookup {index}, link {link.id}",
                        indent=True,
                    )
                    continue  # tolls will remain at zero
                for src_veh, dst_veh in zip(src_veh_groups, dst_veh_groups):
                    link[f"@valuetoll_{dst_veh}"] = (
                        float(data_row[f"toll{time_period.lower()}_{src_veh}"])
                        * link.length
                        * 100
                    )
            else:
                continue

    def _get_toll_indices(self) -> Dict[int, Dict[str, str]]:
        """Get the mapping of toll lookup table from the toll reference file."""
        toll_file_path = self.get_abs_path(self.config.tolls.file_path)
        self.logger.debug(f"toll_file_path {toll_file_path}", indent=True)
        tolls = {}
        with open(toll_file_path, "r", encoding="UTF8") as toll_file:
            header = [h.strip() for h in next(toll_file).split(",")]
            for line in toll_file:
                data = dict(zip(header, line.split(",")))
                tolls[int(data["fac_index"])] = data
        return tolls

    def _set_vdf_attributes(self, network: EmmeNetwork, time_period: str):
        """Set capacity, VDF and critical speed on links."""
        capacity_map = {}
        critical_speed_map = {}
        for row in self.config.capclass_lookup:
            if row.get("capacity") is not None:
                capacity_map[row["capclass"]] = row.get("capacity")
            if row.get("critical_speed") is not None:
                critical_speed_map[row["capclass"]] = row.get("critical_speed")
        tp_mapping = {
            tp.name.upper(): tp.highway_capacity_factor
            for tp in self.controller.config.time_periods
        }
        period_capacity_factor = tp_mapping[time_period]
        akcelik_vdfs = [3, 4, 5, 7, 8, 10, 11, 12, 13, 14]
        for link in network.links():
            cap_lanehour = capacity_map[link["@capclass"]]
            link["@capacity"] = cap_lanehour * period_capacity_factor * link["@lanes"]
            link.volume_delay_func = int(link["@ft"])
            # re-mapping links with type 99 to type 7 "local road of minor importance"
            if link.volume_delay_func == 99:
                link.volume_delay_func = 7
            # num_lanes not used directly, but set for reference
            link.num_lanes = max(min(9.9, link["@lanes"]), 1.0)
            if link.volume_delay_func in akcelik_vdfs and link["@free_flow_speed"] > 0:
                dist = link.length
                critical_speed = critical_speed_map[link["@capclass"]]
                t_c = dist / critical_speed
                t_o = dist / link["@free_flow_speed"]
                link["@ja"] = 16 * (t_c - t_o) ** 2

    def _set_link_modes(self, network: EmmeNetwork):
        """Set the link modes based on the per-class 'excluded_links' set."""
        # first reset link modes (script run more than once)
        # "generic_highway_mode_code" must already be created (in import to Emme script)
        auto_mode = {network.mode(self.config.generic_highway_mode_code)}
        used_modes = {
            network.mode(assign_class.mode_code) for assign_class in self.config.classes
        }
        used_modes.add(network.mode(self.config.maz_to_maz.mode_code))
        for link in network.links():
            link.modes -= used_modes
            if link["@drive_link"]:
                link.modes |= auto_mode
        for mode in used_modes:
            if mode is not None:
                network.delete_mode(mode)

        # Create special access/egress mode for MAZ connectors
        maz_access_mode = network.create_mode(
            "AUX_AUTO", self.config.maz_to_maz.mode_code
        )
        maz_access_mode.description = "MAZ access"
        # create modes from class spec
        # (duplicate mode codes allowed provided the excluded_links is the same)
        mode_excluded_links = {}
        for assign_class in self.config.classes:
            if assign_class.mode_code in mode_excluded_links:
                if (
                    assign_class.excluded_links
                    != mode_excluded_links[assign_class.mode_code]
                ):
                    ex_links1 = mode_excluded_links[assign_class.mode_code]
                    ex_links2 = assign_class.excluded_links
                    raise Exception(
                        f"config error: highway.classes, duplicated mode codes "
                        f"('{assign_class.mode_code}') with different excluded "
                        f"links: {ex_links1} and {ex_links2}"
                    )
                continue
            mode = network.create_mode("AUX_AUTO", assign_class.mode_code)
            mode.description = assign_class.name
            mode_excluded_links[mode.id] = assign_class.excluded_links

        dst_veh_groups = self.config.tolls.dst_vehicle_group_names
        for link in network.links():
            modes = set(m.id for m in link.modes)
            if link.i_node["@maz_id"] + link.j_node["@maz_id"] > 0:
                modes.add(maz_access_mode.id)
                link.modes = modes
                continue
            if not link["@drive_link"]:
                continue
            exclude_links_map = {
                "is_sr": link["@useclass"] in [2, 3],
                "is_sr2": link["@useclass"] == 2,
                "is_sr3": link["@useclass"] == 3,
                "is_auto_only": link["@useclass"] in [2, 3, 4],
            }
            for dst_veh in dst_veh_groups:
                exclude_links_map[f"is_toll_{dst_veh}"] = (
                    link[f"@valuetoll_{dst_veh}"] > 0
                )
            self._apply_exclusions(
                self.config.maz_to_maz.excluded_links,
                maz_access_mode.id,
                modes,
                exclude_links_map,
            )
            for assign_class in self.config.classes:
                self._apply_exclusions(
                    assign_class.excluded_links,
                    assign_class.mode_code,
                    modes,
                    exclude_links_map,
                )
            link.modes = modes

    @staticmethod
    def _apply_exclusions(
        excluded_links_criteria: List[str],
        mode_code: str,
        modes_set: Set[str],
        link_values: Dict[str, bool],
    ):
        """Apply the exclusion criteria to set the link modes."""
        for criteria in excluded_links_criteria:
            if link_values[criteria]:
                return
        modes_set.add(mode_code)

    def _calc_link_skim_lengths(self, network: EmmeNetwork):
        """Calculate the length attributes used in the highway skims."""
        valuetoll_start_tollbooth_code = (
            self.config.tolls.valuetoll_start_tollbooth_code
        )
        for link in network.links():
            # distance in hov lanes / facilities
            if 2 <= link["@useclass"] <= 3:
                link["@hov_length"] = link.length
            else:
                link["@hov_length"] = 0
            # distance on non-bridge toll facilities
            if link["@tollbooth"] > valuetoll_start_tollbooth_code:
                link["@toll_length"] = link.length
            else:
                link["@toll_length"] = 0

    def _calc_link_class_costs(self, network: EmmeNetwork):
        """Calculate the per-class link cost from the tolls and operating costs."""
        for assign_class in self.config.classes:
            cost_attr = f"@cost_{assign_class.name.lower()}"
            op_cost = assign_class["operating_cost_per_mile"]
            toll_factor = assign_class.get("toll_factor")
            if toll_factor is None:
                toll_factor = 1.0
            for link in network.links():
                try:
                    toll_value = sum(
                        link[toll_attr] for toll_attr in assign_class["toll"]
                    )
                except:
                    link
                link[cost_attr] = link.length * op_cost + toll_value * toll_factor

    def _calc_interchange_distance(self, network: EmmeNetwork):
        """
        For highway reliability
        Calculate upstream and downstream interchange distance
        First, label the intersection nodes as nodes with freeway and freeway-to-freeway ramp
        """
        # input interchange nodes file
        # This is a file inherited from https://app.box.com/folder/148342877307, as implemented in the tm2.1
        interchange_nodes_file = self.get_abs_path(self.config.interchange_nodes_file)
        interchange_nodes_df = pd.read_csv(interchange_nodes_file)
        interchange_nodes_df = interchange_nodes_df[interchange_nodes_df.intx > 0]
        interchange_points = interchange_nodes_df["N"].tolist()
        network.create_attribute("NODE", "is_interchange")
        for node in network.nodes():
            if node["#node_id"] in interchange_points:
                node.is_interchange = True
                node["@interchange"] = node.is_interchange

        mode_c = network.mode("c")
        for link in network.links():
            if link["@ft"] in [1, 2] and mode_c in link.modes:
                link["@intdist_down"] = PrepareNetwork.interchange_distance(
                    link, "DOWNSTREAM"
                )
                link["@intdist_up"] = PrepareNetwork.interchange_distance(
                    link, "UPSTREAM"
                )

        network.delete_attribute("NODE", "is_interchange")

    @staticmethod
    def interchange_distance(orig_link, direction):
        visited = set([])
        visited_add = visited.add
        back_links = {}
        heap = []
        if direction == "DOWNSTREAM":
            get_links = lambda l: l.j_node.outgoing_links()
            check_far_node = lambda l: l.j_node.is_interchange
        elif direction == "UPSTREAM":
            get_links = lambda l: l.i_node.incoming_links()
            check_far_node = lambda l: l.i_node.is_interchange
        # Shortest path search for nearest interchange node along freeway
        for link in get_links(orig_link):
            _heapq.heappush(heap, (link["length"], link["#link_id"], link))
        interchange_found = False

        # Check first node
        if check_far_node(orig_link):
            interchange_found = True
            link_cost = 0.0

        try:
            while not interchange_found:
                link_cost, link_id, link = _heapq.heappop(heap)
                if link in visited:
                    continue
                visited_add(link)
                if check_far_node(link):
                    interchange_found = True
                    break
                get_links_return = get_links(link)
                for next_link in get_links_return:
                    if next_link in visited:
                        continue
                    next_cost = link_cost + next_link["length"]
                    _heapq.heappush(heap, (next_cost, next_link["#link_id"], next_link))
        except TypeError:
            # TypeError if the link type objects are compared in the tuples
            # case where the path cost are the same
            raise Exception("Path cost are the same, cannot compare Link objects")
        except IndexError:
            # IndexError if heap is empty
            # case where start / end of highway, dist = 99
            return 99
        return orig_link["length"] / 2.0 + link_cost

    def _calc_link_static_reliability(self, network: EmmeNetwork):
        """
        For highway reliability
        consists of: lane factor, interchange distance, speed factor
        differentiated by freeway, artertial, and others
        """
        # Static reliability parameters
        # freeway coefficients
        freeway_rel = {
            "intercept": 0.1078,
            "speed>70": 0.01393,
            "upstream": 0.011,
            "downstream": 0.0005445,
        }
        # arterial/ramp/other coefficients
        road_rel = {
            "intercept": 0.0546552,
            "lanes": {1: 0.0, 2: 0.0103589, 3: 0.0361211, 4: 0.0446958, 5: 0.0},
            "speed": {
                "<35": 0,
                35: 0.0075674,
                40: 0.0091012,
                45: 0.0080996,
                50: -0.0022938,
                ">50": -0.0046211,
            },
        }
        for link in network.links():
            # if freeway apply freeway parameters to this link
            if (link["@ft"] in [1, 2]) and (link["@lanes"] > 0):
                high_speed_factor = (
                    freeway_rel["speed>70"] if link["@free_flow_speed"] >= 70 else 0
                )
                upstream_factor = freeway_rel["upstream"] * 1 / link["@intdist_up"]
                downstream_factor = (
                    freeway_rel["downstream"] * 1 / link["@intdist_down"]
                )
                link["@static_rel"] = (
                    freeway_rel["intercept"]
                    + high_speed_factor
                    + upstream_factor
                    + downstream_factor
                )
            # arterial/ramp/other apply road parameters
            elif (link["@ft"] < 8) and (link["@lanes"] > 0):
                lane_factor = road_rel["lanes"].get(link["@lanes"], 0)
                speed_bin = link["@free_flow_speed"]
                if speed_bin < 35:
                    speed_bin = "<35"
                elif speed_bin > 50:
                    speed_bin = ">50"
                speed_factor = road_rel["speed"][speed_bin]
                link["@static_rel"] = road_rel["intercept"] + lane_factor + speed_factor
            else:
                link["@static_rel"] = 0

__init__(controller)

Constructor for PPrepareNetwork.

Parameters:

Name Type Description Default
controller RunController

Reference to run controller object.

required
Source code in tm2py\components\network\highway\highway_network.py
66
67
68
69
70
71
72
73
74
75
76
def __init__(self, controller: "RunController"):
    """Constructor for PPrepareNetwork.

    Args:
        controller (RunController): Reference to run controller object.
    """
    super().__init__(controller)
    self.config = self.controller.config.highway
    self._emme_manager = self.controller.emme_manager
    self._highway_emmebank = None
    self._highway_scenarios = None

run()

Run network preparation step.

Source code in tm2py\components\network\highway\highway_network.py
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
@LogStartEnd("Prepare network attributes and modes")
def run(self):
    """Run network preparation step."""
    for time in self.time_period_names:
        with self.controller.emme_manager.logbook_trace(
            f"prepare for highway assignment {time}"
        ):
            scenario = self.highway_emmebank.scenario(time)
            self._create_class_attributes(scenario, time)
            network = scenario.get_network()
            self._set_tolls(network, time)
            self._set_vdf_attributes(network, time)
            self._set_link_modes(network)
            self._calc_link_skim_lengths(network)
            self._calc_link_class_costs(network)
            self._calc_interchange_distance(network)
            self._calc_link_static_reliability(network)
            scenario.publish_network(network)

validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py\components\network\highway\highway_network.py
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    toll_file_path = self.get_abs_path(self.config.tolls.file_path)
    if not os.path.exists(toll_file_path):
        self.logger.log(
            f"Tolls file (config.highway.tolls.file_path) does not exist: {toll_file_path}",
            level="ERROR",
        )
        raise FileNotFoundError(f"Tolls file does not exist: {toll_file_path}")
    src_veh_groups = self.config.tolls.src_vehicle_group_names
    columns = ["fac_index"]
    for time in self.controller.config.time_periods:
        for vehicle in src_veh_groups:
            columns.append(f"toll{time.name.lower()}_{vehicle}")
    with open(toll_file_path, "r", encoding="UTF8") as toll_file:
        header = set(h.strip() for h in next(toll_file).split(","))
        missing = []
        for column in columns:
            if column not in header:
                missing.append(column)
                self.logger.log(
                    f"Tolls file missing column: {column}", level="ERROR"
                )
    if missing:
        raise FileFormatError(
            f"Tolls file missing {len(missing)} columns: {', '.join(missing)}"
        )

Highway assignment and skim component.

Performs equilibrium traffic assignment and generates resulting skims. The assignmend is configured using the “highway” table in the source config. See the config documentation for details. The traffic assignment runs according to the list of assignment classes under highway.classes.

Other relevant parameters from the config are: - emme.num_processors: number of processors as integer or “MAX” or “MAX-N” - time_periods[].emme_scenario_id: Emme scenario number to use for each period - time_periods[].highway_capacity_factor

The Emme network must have the following attributes available:

Link - attributes: - “length” in feet - “vdf”, volume delay function (volume delay functions must also be setup) - “@useclass”, vehicle-class restrictions classification, auto-only, HOV only - “@free_flow_time”, the free flow time (in minutes) - “@tollXX_YY”, the toll for period XX and class subgroup (see truck class) named YY, used together with @tollbooth to generate @bridgetoll_YY and @valuetoll_YY - “@maz_flow”, the background traffic MAZ-to-MAZ SP assigned flow from highway_maz, if controller.iteration > 0 - modes: must be set on links and match the specified mode codes in the traffic config

Network results - attributes: - @flow_XX: link PCE flows per class, where XX is the class name in the config - timau: auto travel time - volau: total assigned flow in PCE

Notes: - Output matrices are in miles, minutes, and cents (2010 dollars) and are stored/ as real values; - Intrazonal distance/time is one half the distance/time to the nearest neighbor; - Intrazonal bridge and value tolls are assumed to be zero

AssignmentClass

Highway assignment class, represents data from config and conversion to Emme specs.

Source code in tm2py\components\network\highway\highway_assign.py
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
class AssignmentClass:
    """Highway assignment class, represents data from config and conversion to Emme specs."""

    def __init__(self, class_config, time_period, iteration, reliability, warmstart):
        """Constructor of Highway Assignment class.

        Args:
            class_config (_type_): _description_
            time_period (_type_): _description_
            iteration (_type_): _description_
            reliability (bool): include reliability in path analysis or not.
                If true, reliability is included in path analysis using link field.
                If false, reliability is not included in path analysis, reliability skim is overwritten as 0.
            warmstart (bool): True if assigning warmstart demand
        """
        self.class_config = class_config
        self.time_period = time_period
        self.iteration = iteration
        self.skim_reliability = reliability
        self.warmstart = warmstart
        self.name = class_config["name"].lower()
        self.skims = class_config.get("skims", [])

    @property
    def emme_highway_class_spec(self) -> EmmeHighwayClassSpec:
        """Construct and return Emme traffic assignment class specification.

        Converted from input config (highway.classes), see Emme Help for
        SOLA traffic assignment for specification details.
        Adds time_period as part of demand and skim matrix names.

        Returns:
            A nested dictionary corresponding to the expected Emme traffic
            class specification used in the SOLA assignment.
        """
        if self.iteration == 0:
            if not self.warmstart:
                demand_matrix = 'ms"zero"'
            else:
                demand_matrix = f'mf"{self.time_period}_{self.name}"'
        else:
            demand_matrix = f'mf"{self.time_period}_{self.name}"'
        class_spec = {
            "mode": self.class_config.mode_code,
            "demand": demand_matrix,
            "generalized_cost": {
                "link_costs": f"@cost_{self.name.lower()}",  # cost in $0.01
                # $/hr -> min/$0.01
                "perception_factor": 0.6 / self.class_config.value_of_time,
            },
            "results": {
                "link_volumes": f"@flow_{self.name.lower()}",
                "od_travel_times": {
                    "shortest_paths": f"mf{self.time_period}_{self.name}_time"
                },
            },
            "path_analyses": self.emme_class_analysis,
        }
        return class_spec

    @property
    def emme_highway_class_spec_wo_pa(self) -> EmmeHighwayClassSpec:
        """Construct and return Emme traffic assignment class specification.

        Converted from input config (highway.classes), see Emme Help for
        SOLA traffic assignment for specification details.
        Adds time_period as part of demand and skim matrix names.

        Returns:
            A nested dictionary corresponding to the expected Emme traffic
            class specification used in the SOLA assignment.
        """
        if self.iteration == 0:
            if not self.warmstart:
                demand_matrix = 'ms"zero"'
            else:
                demand_matrix = f'mf"{self.time_period}_{self.name}"'
        else:
            demand_matrix = f'mf"{self.time_period}_{self.name}"'
        class_spec = {
            "mode": self.class_config.mode_code,
            "demand": demand_matrix,
            "generalized_cost": {
                "link_costs": f"@cost_{self.name.lower()}",  # cost in $0.01
                # $/hr -> min/$0.01
                "perception_factor": 0.6 / self.class_config.value_of_time,
            },
            "results": {
                "link_volumes": f"@flow_{self.name.lower()}",
                "od_travel_times": {
                    "shortest_paths": f"mf{self.time_period}_{self.name}_time"
                },
            },
        }
        return class_spec

    @property
    def emme_class_analysis(self) -> List[EmmeHighwayAnalysisSpec]:
        """Construct and return a list of path analyses specs which generate the required skims.

        Returns:
            A list of nested dictionaries corresponding to the Emme path analysis
            (per-class) specification used in the SOLA assignment.
        """
        class_analysis = []
        if "time" in self.skims:
            class_analysis.append(
                self.emme_analysis_spec(
                    f"@cost_{self.name}".lower(),
                    f"mf{self.time_period}_{self.name}_cost",
                )
            )
        for skim_type in self.skims:
            if skim_type == "time":
                continue
            # if not skimming reliability in all global iterations
            if not self.skim_reliability:
                if skim_type in ["rlbty", "autotime"]:
                    continue
            # if skimming reliability
            # reliability is only skimmed in global iteration 0 and 1
            if self.iteration > 1:
                if skim_type == "rlbty":
                    continue
                if skim_type == "autotime":
                    continue
            if "_" in skim_type:
                skim_type, group = skim_type.split("_")
                matrix_name = f"mf{self.time_period}_{self.name}_{skim_type}_{group}"
            else:
                group = ""
                matrix_name = f"mf{self.time_period}_{self.name}_{skim_type}"
            class_analysis.append(
                self.emme_analysis_spec(
                    self.skim_analysis_link_attribute(skim_type, group),
                    matrix_name,
                )
            )
        return class_analysis

    @property
    def skim_matrices(self) -> List[str]:
        """Returns: List of skim matrix names for this class."""
        skim_matrices = []
        if "time" in self.skims:
            skim_matrices.extend(
                [
                    f"{self.time_period}_{self.name}_time",
                    f"{self.time_period}_{self.name}_cost",
                ]
            )
        for skim_type in self.skims:
            if skim_type == "time":
                continue
            if "_" in skim_type:
                skim_type, group = skim_type.split("_")
                skim_matrices.append(
                    f"{self.time_period}_{self.name}_{skim_type}_{group}"
                )
            else:
                group = ""
                skim_matrices.append(f"{self.time_period}_{self.name}_{skim_type}")
        return skim_matrices

    @staticmethod
    def emme_analysis_spec(link_attr: str, matrix_name: str) -> EmmeHighwayAnalysisSpec:
        """Returns Emme highway class path analysis spec.

        See Emme Help for SOLA assignment for full specification details.
        Args:
            link_attr: input link attribute for which to sum values along the paths
            matrix_name: full matrix name to store the result of the path analysis

        Returns:
            The nested dictionary specification which will generate the skim
            of link attribute values.
        """
        analysis_spec = {
            "link_component": link_attr,
            "turn_component": None,
            "operator": "+",
            "selection_threshold": {"lower": None, "upper": None},
            "path_to_od_composition": {
                "considered_paths": "ALL",
                "multiply_path_proportions_by": {
                    "analyzed_demand": False,
                    "path_value": True,
                },
            },
            "results": {
                "od_values": matrix_name,
                "selected_link_volumes": None,
                "selected_turn_volumes": None,
            },
        }
        return analysis_spec

    @staticmethod
    def skim_analysis_link_attribute(skim: str, group: str) -> str:
        """Return the link attribute name for the specified skim type and group.

        Args:
            skim: name of skim requested, one of dist, hovdist, tolldist, freeflowtime,
                bridgetoll, or valuetoll
            group: subgroup name for the bridgetoll or valuetoll, corresponds to one of
                the names from config.highway.tolls.dst_vehicle_group_names
        Returns:
            A string of the link attribute name used in the analysis.
        """
        lookup = {
            "dist": "length",  # NOTE: length must be in miles
            "hovdist": "@hov_length",
            "tolldist": "@toll_length",
            "freeflowtime": "@free_flow_time",
            "bridgetoll": f"@bridgetoll_{group}",
            "valuetoll": f"@valuetoll_{group}",
            "rlbty": "@reliability_sq",
            "autotime": "@auto_time",
        }
        return lookup[skim]

emme_class_analysis property

Construct and return a list of path analyses specs which generate the required skims.

Returns:

Type Description
List[EmmeHighwayAnalysisSpec]

A list of nested dictionaries corresponding to the Emme path analysis

List[EmmeHighwayAnalysisSpec]

(per-class) specification used in the SOLA assignment.

emme_highway_class_spec property

Construct and return Emme traffic assignment class specification.

Converted from input config (highway.classes), see Emme Help for SOLA traffic assignment for specification details. Adds time_period as part of demand and skim matrix names.

Returns:

Type Description
EmmeHighwayClassSpec

A nested dictionary corresponding to the expected Emme traffic

EmmeHighwayClassSpec

class specification used in the SOLA assignment.

emme_highway_class_spec_wo_pa property

Construct and return Emme traffic assignment class specification.

Converted from input config (highway.classes), see Emme Help for SOLA traffic assignment for specification details. Adds time_period as part of demand and skim matrix names.

Returns:

Type Description
EmmeHighwayClassSpec

A nested dictionary corresponding to the expected Emme traffic

EmmeHighwayClassSpec

class specification used in the SOLA assignment.

skim_matrices property

__init__(class_config, time_period, iteration, reliability, warmstart)

Constructor of Highway Assignment class.

Parameters:

Name Type Description Default
class_config _type_

description

required
time_period _type_

description

required
iteration _type_

description

required
reliability bool

include reliability in path analysis or not. If true, reliability is included in path analysis using link field. If false, reliability is not included in path analysis, reliability skim is overwritten as 0.

required
warmstart bool

True if assigning warmstart demand

required
Source code in tm2py\components\network\highway\highway_assign.py
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
def __init__(self, class_config, time_period, iteration, reliability, warmstart):
    """Constructor of Highway Assignment class.

    Args:
        class_config (_type_): _description_
        time_period (_type_): _description_
        iteration (_type_): _description_
        reliability (bool): include reliability in path analysis or not.
            If true, reliability is included in path analysis using link field.
            If false, reliability is not included in path analysis, reliability skim is overwritten as 0.
        warmstart (bool): True if assigning warmstart demand
    """
    self.class_config = class_config
    self.time_period = time_period
    self.iteration = iteration
    self.skim_reliability = reliability
    self.warmstart = warmstart
    self.name = class_config["name"].lower()
    self.skims = class_config.get("skims", [])

emme_analysis_spec(link_attr, matrix_name) staticmethod

Returns Emme highway class path analysis spec.

See Emme Help for SOLA assignment for full specification details. Args: link_attr: input link attribute for which to sum values along the paths matrix_name: full matrix name to store the result of the path analysis

Returns:

Type Description
EmmeHighwayAnalysisSpec

The nested dictionary specification which will generate the skim

EmmeHighwayAnalysisSpec

of link attribute values.

Source code in tm2py\components\network\highway\highway_assign.py
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
@staticmethod
def emme_analysis_spec(link_attr: str, matrix_name: str) -> EmmeHighwayAnalysisSpec:
    """Returns Emme highway class path analysis spec.

    See Emme Help for SOLA assignment for full specification details.
    Args:
        link_attr: input link attribute for which to sum values along the paths
        matrix_name: full matrix name to store the result of the path analysis

    Returns:
        The nested dictionary specification which will generate the skim
        of link attribute values.
    """
    analysis_spec = {
        "link_component": link_attr,
        "turn_component": None,
        "operator": "+",
        "selection_threshold": {"lower": None, "upper": None},
        "path_to_od_composition": {
            "considered_paths": "ALL",
            "multiply_path_proportions_by": {
                "analyzed_demand": False,
                "path_value": True,
            },
        },
        "results": {
            "od_values": matrix_name,
            "selected_link_volumes": None,
            "selected_turn_volumes": None,
        },
    }
    return analysis_spec

Return the link attribute name for the specified skim type and group.

Parameters:

Name Type Description Default
skim str

name of skim requested, one of dist, hovdist, tolldist, freeflowtime, bridgetoll, or valuetoll

required
group str

subgroup name for the bridgetoll or valuetoll, corresponds to one of the names from config.highway.tolls.dst_vehicle_group_names

required
Source code in tm2py\components\network\highway\highway_assign.py
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
@staticmethod
def skim_analysis_link_attribute(skim: str, group: str) -> str:
    """Return the link attribute name for the specified skim type and group.

    Args:
        skim: name of skim requested, one of dist, hovdist, tolldist, freeflowtime,
            bridgetoll, or valuetoll
        group: subgroup name for the bridgetoll or valuetoll, corresponds to one of
            the names from config.highway.tolls.dst_vehicle_group_names
    Returns:
        A string of the link attribute name used in the analysis.
    """
    lookup = {
        "dist": "length",  # NOTE: length must be in miles
        "hovdist": "@hov_length",
        "tolldist": "@toll_length",
        "freeflowtime": "@free_flow_time",
        "bridgetoll": f"@bridgetoll_{group}",
        "valuetoll": f"@valuetoll_{group}",
        "rlbty": "@reliability_sq",
        "autotime": "@auto_time",
    }
    return lookup[skim]

HighwayAssignment

Bases: Component

Highway assignment and skims. Args: controller: parent RunController object

Source code in tm2py\components\network\highway\highway_assign.py
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
class HighwayAssignment(Component):
    """Highway assignment and skims.
    Args:
        controller: parent RunController object
    """

    def __init__(self, controller: RunController):
        """Constructor for HighwayAssignment components.

        Args:
            controller (RunController): Reference to current run controller.
        """
        super().__init__(controller)

        self.config = self.controller.config.highway

        self._matrix_cache = None
        self._skim_matrices = []
        self._class_config = None
        self._scenario = None
        self._highway_emmebank = None

    @property
    def highway_emmebank(self):
        if not self._highway_emmebank:
            self._highway_emmebank = self.controller.emme_manager.highway_emmebank
        return self._highway_emmebank

    @property
    def classes(self):
        # self.hwy_classes
        return [c.name for c in self.config.classes]

    @property
    def class_config(self):
        # self.hwy_class_configs
        if not self._class_config:
            self._class_config = {c.name: c for c in self.config.classes}

        return self._class_config

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        # TODO
        pass

    @LogStartEnd("Highway assignment and skims", level="STATUS")
    def run(self):
        """Run highway assignment."""
        demand = PrepareHighwayDemand(self.controller)
        if self.controller.iteration == 0:
            self.highway_emmebank.zero_matrix
            if self.controller.config.warmstart.warmstart:
                if self.controller.config.warmstart.use_warmstart_demand:
                    demand.run()
        else:
            demand.run()

        calculate_reliability = self.config.reliability

        for time in self.time_period_names:
            scenario = self.highway_emmebank.scenario(time)
            with self._setup(scenario, time):
                iteration = self.controller.iteration
                warmstart = self.controller.config.warmstart.warmstart
                assign_classes = [
                    AssignmentClass(
                        c, time, iteration, calculate_reliability, warmstart
                    )
                    for c in self.config.classes
                ]
                if iteration > 0:
                    self._copy_maz_flow(scenario)
                else:
                    self._reset_background_traffic(scenario)
                self._create_skim_matrices(scenario, assign_classes)
                # calculate highway reliability in global iteration 0 and 1 only
                # this requires the assignment to be run twice
                if (iteration <= 1) & (calculate_reliability):
                    # set path analysis to False to avoid skimming
                    assign_spec = self._get_assignment_spec(
                        assign_classes, path_analysis=False
                    )

                    with self.logger.log_start_end(
                        "Run SOLA assignment without path analyses", level="INFO"
                    ):
                        assign = self.controller.emme_manager.tool(
                            "inro.emme.traffic_assignment.sola_traffic_assignment"
                        )
                        assign(assign_spec, scenario, chart_log_interval=1)

                    # calucaltes link level LOS based reliability
                    net_calc = NetworkCalculator(self.controller, scenario)

                    exf_pars = scenario.emmebank.extra_function_parameters
                    vdfs = [
                        f
                        for f in scenario.emmebank.functions()
                        if f.type == "VOLUME_DELAY"
                    ]
                    for function in vdfs:
                        expression = function.expression
                        for el in ["el1", "el2", "el3", "el4"]:
                            expression = expression.replace(el, getattr(exf_pars, el))
                        if "@static_rel" in expression:
                            # split function into time component and reliability component
                            time_expr, reliability_expr = expression.split(
                                "*(1+@static_rel+"
                            )
                            net_calc(
                                "@auto_time",
                                time_expr,
                                {"link": "vdf=%s" % function.id[2:]},
                            )
                            net_calc(
                                "@reliability",
                                "(@static_rel+" + reliability_expr,
                                {"link": "vdf=%s" % function.id[2:]},
                            )
                            net_calc(
                                "@reliability_sq", "@reliability**2", {"link": "all"}
                            )

                assign_spec = self._get_assignment_spec(
                    assign_classes, path_analysis=True
                )
                with self.logger.log_start_end(
                    "Run SOLA assignment with path analyses",
                    level="INFO",
                ):
                    assign = self.controller.emme_manager.tool(
                        "inro.emme.traffic_assignment.sola_traffic_assignment"
                    )
                    assign(assign_spec, scenario, chart_log_interval=1)

                # Subtract non-time costs from gen cost to get the raw travel time
                for emme_class_spec in assign_spec["classes"]:
                    self._calc_time_skim(emme_class_spec)
                # Set intra-zonal for time and dist to be 1/2 nearest neighbour
                for class_config in self.config.classes:
                    self._set_intrazonal_values(
                        time,
                        class_config["name"],
                        class_config["skims"],
                    )
                self._export_skims(scenario, time)
                if self.logger.debug_enabled:
                    self._log_debug_report(scenario, time)

    @_context
    def _setup(self, scenario: EmmeScenario, time_period: str):
        """Setup and teardown for Emme Matrix cache and list of skim matrices.

        Args:
            scenario: Emme scenario object
            time_period: time period name
        """
        self._matrix_cache = MatrixCache(scenario)
        self._skim_matrices = []
        msg = f"Highway assignment for period {time_period}"
        with self.logger.log_start_end(msg, level="STATUS"):
            try:
                yield
            finally:
                self._matrix_cache.clear()
                self._matrix_cache = None
                self._skim_matrices = []

    def _copy_maz_flow(self, scenario: EmmeScenario):
        """Copy maz_flow from MAZ demand assignment to ul1 for background traffic.

        Args:
            scenario: Emme scenario object
        """
        self.logger.log(
            "Copy @maz_flow to ul1 for background traffic", indent=True, level="DETAIL"
        )
        net_calc = NetworkCalculator(self.controller, scenario)
        net_calc("ul1", "@maz_flow")

    def _reset_background_traffic(self, scenario: EmmeScenario):
        """Set ul1 for background traffic to 0 (no maz-maz flow).

        Args:
            scenario: Emme scenario object
        """
        self.logger.log(
            "Set ul1 to 0 for background traffic", indent=True, level="DETAIL"
        )
        net_calc = NetworkCalculator(self.controller, scenario)
        net_calc("ul1", "0")

    def _create_skim_matrices(
        self, scenario: EmmeScenario, assign_classes: List[AssignmentClass]
    ):
        """Create matrices to store skim results in Emme database.

        Also add the matrices to list of self._skim_matrices.

        Args:
            scenario: Emme scenario object
            assign_classes: list of AssignmentClass objects
        """
        create_matrix = self.controller.emme_manager.tool(
            "inro.emme.data.matrix.create_matrix"
        )

        with self.logger.log_start_end("Creating skim matrices", level="DETAIL"):
            for klass in assign_classes:
                for matrix_name in klass.skim_matrices:
                    matrix = scenario.emmebank.matrix(f'mf"{matrix_name}"')
                    if not matrix:
                        matrix = create_matrix(
                            "mf", matrix_name, scenario=scenario, overwrite=True
                        )
                        self.logger.debug(
                            f"Create matrix name: {matrix_name}, id: {matrix.id}"
                        )
                    # if not skimming reliability, set reliability matrices to 0
                    if not self.config.reliability:
                        if ("rlbty" in matrix_name) | ("autotime" in matrix_name):
                            data = self._matrix_cache.get_data(matrix_name)
                            # NOTE: sets values for external zones as well
                            data = 0 * data
                            self._matrix_cache.set_data(matrix_name, data)

                    self._skim_matrices.append(matrix)

    def _get_assignment_spec(
        self, assign_classes: List[AssignmentClass], path_analysis=True
    ) -> EmmeTrafficAssignmentSpec:
        """Generate template Emme SOLA assignment specification.

        Args:
            assign_classes: list of AssignmentClass objects

        Returns
            Emme specification for SOLA traffic assignment

        """
        relative_gaps = self.config.relative_gaps
        # get the corresponding relative gap for the current iteration
        relative_gap = None
        if relative_gaps and isinstance(relative_gaps, tuple):
            for item in relative_gaps:
                if item["global_iteration"] == self.controller.iteration:
                    relative_gap = item["relative_gap"]
                    break
            if relative_gap is None:
                raise ValueError(
                    f"RelativeGapConfig: Must specifify a value for global iteration {self.controller.iteration}"
                )
        max_iterations = self.config.max_iterations
        # NOTE: mazmazvol as background traffic in link.data1 ("ul1")
        base_spec = {
            "type": "SOLA_TRAFFIC_ASSIGNMENT",
            "background_traffic": {
                "link_component": "ul1",
                "turn_component": None,
                "add_transit_vehicles": False,
            },
            "classes": [klass.emme_highway_class_spec for klass in assign_classes],
            "stopping_criteria": {
                "max_iterations": max_iterations,
                "best_relative_gap": 0.0,
                "relative_gap": relative_gap,
                "normalized_gap": 0.0,
            },
            "performance_settings": {
                "number_of_processors": self.controller.num_processors
            },
        }
        if not path_analysis:
            base_spec["classes"] = [
                klass.emme_highway_class_spec_wo_pa for klass in assign_classes
            ]
        return base_spec

    def _calc_time_skim(self, emme_class_spec: EmmeHighwayClassSpec):
        """Calculate the real time skim =gen_cost-per_fac*link_costs.

        Args:
            emme_class_spec: dictionary of the per-class spec sub-section from the
                Emme SOLA assignment spec, classes list
        """
        od_travel_times = emme_class_spec["results"]["od_travel_times"][
            "shortest_paths"
        ]
        if od_travel_times is not None:
            # Total link costs is always the first analysis
            cost = emme_class_spec["path_analyses"][0]["results"]["od_values"]
            factor = emme_class_spec["generalized_cost"]["perception_factor"]
            gencost_data = self._matrix_cache.get_data(od_travel_times)
            cost_data = self._matrix_cache.get_data(cost)
            time_data = gencost_data - (factor * cost_data)
            self._matrix_cache.set_data(od_travel_times, time_data)

    def _set_intrazonal_values(
        self, time_period: str, class_name: str, skims: List[str]
    ):
        """Set the intrazonal values to 1/2 nearest neighbour for time and distance skims.

        Args:
            time_period: time period name (from config)
            class_name: highway class name (from config)
            skims: list of requested skims (from config)
        """
        for skim_name in skims:
            if skim_name in ["time", "dist", "freeflowtime", "hovdist", "tolldist"]:
                matrix_name = f"mf{time_period}_{class_name}_{skim_name}"
                self.logger.debug(f"Setting intrazonals to 0.5*min for {matrix_name}")
                data = self._matrix_cache.get_data(matrix_name)
                # NOTE: sets values for external zones as well
                np.fill_diagonal(data, np.inf)
                data[np.diag_indices_from(data)] = 0.5 * np.nanmin(data, 1)
                self._matrix_cache.set_data(matrix_name, data)

    def _export_skims(self, scenario: EmmeScenario, time_period: str):
        """Export skims to OMX files by period.

        Args:
            scenario: Emme scenario object
            time_period: time period name
        """
        # NOTE: skims in separate file by period
        self.logger.debug(
            "_export_skims: self.config.output_skim_path:{}".format(
                self.config.output_skim_path
            )
        )
        omx_file_path = self.get_abs_path(
            self.config.output_skim_path
            / self.config.output_skim_filename_tmpl.format(time_period=time_period)
        )
        self.logger.debug(
            f"export {len(self._skim_matrices)} skim matrices to {omx_file_path}"
        )
        os.makedirs(os.path.dirname(omx_file_path), exist_ok=True)
        with OMXManager(
            omx_file_path, "w", scenario, matrix_cache=self._matrix_cache
        ) as omx_file:
            omx_file.write_matrices(self._skim_matrices)

    def _log_debug_report(self, scenario: EmmeScenario, time_period: str):
        num_zones = len(scenario.zone_numbers)
        num_cells = num_zones * num_zones
        self.logger.debug(f"Highway skim summary for period {time_period}")
        self.logger.debug(
            f"Number of zones: {num_zones}. Number of O-D pairs: {num_cells}. "
            "Values outside -9999999, 9999999 are masked in summaries."
        )
        self.logger.debug(
            "name                            min       max      mean           sum"
        )
        for matrix in self._skim_matrices:
            values = self._matrix_cache.get_data(matrix)
            data = np.ma.masked_outside(values, -9999999, 9999999)
            stats = (
                f"{matrix.name:25} {data.min():9.4g} {data.max():9.4g} "
                f"{data.mean():9.4g} {data.sum(): 13.7g}"
            )
            self.logger.debug(stats)

__init__(controller)

Constructor for HighwayAssignment components.

Parameters:

Name Type Description Default
controller RunController

Reference to current run controller.

required
Source code in tm2py\components\network\highway\highway_assign.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
def __init__(self, controller: RunController):
    """Constructor for HighwayAssignment components.

    Args:
        controller (RunController): Reference to current run controller.
    """
    super().__init__(controller)

    self.config = self.controller.config.highway

    self._matrix_cache = None
    self._skim_matrices = []
    self._class_config = None
    self._scenario = None
    self._highway_emmebank = None

run()

Run highway assignment.

Source code in tm2py\components\network\highway\highway_assign.py
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
@LogStartEnd("Highway assignment and skims", level="STATUS")
def run(self):
    """Run highway assignment."""
    demand = PrepareHighwayDemand(self.controller)
    if self.controller.iteration == 0:
        self.highway_emmebank.zero_matrix
        if self.controller.config.warmstart.warmstart:
            if self.controller.config.warmstart.use_warmstart_demand:
                demand.run()
    else:
        demand.run()

    calculate_reliability = self.config.reliability

    for time in self.time_period_names:
        scenario = self.highway_emmebank.scenario(time)
        with self._setup(scenario, time):
            iteration = self.controller.iteration
            warmstart = self.controller.config.warmstart.warmstart
            assign_classes = [
                AssignmentClass(
                    c, time, iteration, calculate_reliability, warmstart
                )
                for c in self.config.classes
            ]
            if iteration > 0:
                self._copy_maz_flow(scenario)
            else:
                self._reset_background_traffic(scenario)
            self._create_skim_matrices(scenario, assign_classes)
            # calculate highway reliability in global iteration 0 and 1 only
            # this requires the assignment to be run twice
            if (iteration <= 1) & (calculate_reliability):
                # set path analysis to False to avoid skimming
                assign_spec = self._get_assignment_spec(
                    assign_classes, path_analysis=False
                )

                with self.logger.log_start_end(
                    "Run SOLA assignment without path analyses", level="INFO"
                ):
                    assign = self.controller.emme_manager.tool(
                        "inro.emme.traffic_assignment.sola_traffic_assignment"
                    )
                    assign(assign_spec, scenario, chart_log_interval=1)

                # calucaltes link level LOS based reliability
                net_calc = NetworkCalculator(self.controller, scenario)

                exf_pars = scenario.emmebank.extra_function_parameters
                vdfs = [
                    f
                    for f in scenario.emmebank.functions()
                    if f.type == "VOLUME_DELAY"
                ]
                for function in vdfs:
                    expression = function.expression
                    for el in ["el1", "el2", "el3", "el4"]:
                        expression = expression.replace(el, getattr(exf_pars, el))
                    if "@static_rel" in expression:
                        # split function into time component and reliability component
                        time_expr, reliability_expr = expression.split(
                            "*(1+@static_rel+"
                        )
                        net_calc(
                            "@auto_time",
                            time_expr,
                            {"link": "vdf=%s" % function.id[2:]},
                        )
                        net_calc(
                            "@reliability",
                            "(@static_rel+" + reliability_expr,
                            {"link": "vdf=%s" % function.id[2:]},
                        )
                        net_calc(
                            "@reliability_sq", "@reliability**2", {"link": "all"}
                        )

            assign_spec = self._get_assignment_spec(
                assign_classes, path_analysis=True
            )
            with self.logger.log_start_end(
                "Run SOLA assignment with path analyses",
                level="INFO",
            ):
                assign = self.controller.emme_manager.tool(
                    "inro.emme.traffic_assignment.sola_traffic_assignment"
                )
                assign(assign_spec, scenario, chart_log_interval=1)

            # Subtract non-time costs from gen cost to get the raw travel time
            for emme_class_spec in assign_spec["classes"]:
                self._calc_time_skim(emme_class_spec)
            # Set intra-zonal for time and dist to be 1/2 nearest neighbour
            for class_config in self.config.classes:
                self._set_intrazonal_values(
                    time,
                    class_config["name"],
                    class_config["skims"],
                )
            self._export_skims(scenario, time)
            if self.logger.debug_enabled:
                self._log_debug_report(scenario, time)

validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py\components\network\highway\highway_assign.py
126
127
128
129
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    # TODO
    pass

Bases: ConfigItem

Highway assignment and skims parameters.

Properties
Source code in tm2py\config.py
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
@dataclass(frozen=True)
class HighwayConfig(ConfigItem):
    """Highway assignment and skims parameters.

    Properties:
        generic_highway_mode_code: single character unique mode ID for entire
            highway network (no excluded_links)
        relative_gaps: relative gaps for assignment convergence, specific to global iteration, see HighwayRelativeGapConfig
        max_iterations: maximum iterations stopping criteria
        area_type_buffer_dist_miles: used to in calculation to categorize link @areatype
            The area type is determined based on the average density of nearby
            (within this buffer distance) MAZs, using (pop+jobs*2.5)/acres
        drive_access_output_skim_path: relative path for drive access to transit skims
        output_skim_path: relative path template from run dir for OMX output skims
        output_skim_filename_tmpl: template for OMX filename for a time period. Must include
            {time_period} in the string and end in '.omx'.
        output_skim_matrixname_tmpl: template for matrix names within OMX output skims.
            Should include {time_period}, {mode}, and {property}
        tolls: input toll specification, see HighwayTollsConfig
        maz_to_maz: maz-to-maz shortest path assignment and skim specification,
            see HighwayMazToMazConfig
        classes: highway assignment multi-class setup and skim specification,
            see HighwayClassConfig
        capclass_lookup: index cross-reference table from the link @capclass value
            to the free-flow speed, capacity, and critical speed values
        interchange_nodes_file: relative path to the interchange nodes file, this is
            used for calculating highway reliability
        apply_msa_demand: average highway demand with previous iterations'. Default to True.
        reliability: bool to skim highway reliability. Default to true. If true, assignment
            will be run twice in global iterations 0 (warmstart) and 1, to calculate reliability,
            assignment will be run only once in global iterations 2 and 3,
            reliability skim will stay the same as global iteration 1.
            If false, reliability will not be calculated nor skimmed in all global
            iterations, and the resulting reliability skims will be 0.
    """

    generic_highway_mode_code: str = Field(min_length=1, max_length=1)
    relative_gaps: Tuple[HighwayRelativeGapConfig, ...] = Field()
    max_iterations: int = Field(ge=0)
    area_type_buffer_dist_miles: float = Field(gt=0)
    drive_access_output_skim_path: Optional[str] = Field(default=None)
    output_skim_path: pathlib.Path = Field()
    output_skim_filename_tmpl: str = Field()
    output_skim_matrixname_tmpl: str = Field()
    tolls: HighwayTollsConfig = Field()
    maz_to_maz: HighwayMazToMazConfig = Field()
    classes: Tuple[HighwayClassConfig, ...] = Field()
    capclass_lookup: Tuple[HighwayCapClassConfig, ...] = Field()
    interchange_nodes_file: str = Field()
    apply_msa_demand: bool = True
    reliability: bool = Field(default=True)

    @validator("output_skim_filename_tmpl")
    def valid_skim_template(value):
        """Validate skim template has correct {} and extension."""
        assert (
            "{time_period" in value
        ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
        assert (
            value[-4:].lower() == ".omx"
        ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
        return value

    @validator("output_skim_matrixname_tmpl")
    def valid_skim_matrix_name_template(value):
        """Validate skim matrix template has correct {}."""
        assert (
            "{time_period" in value
        ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
        assert (
            "{property" in value
        ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
        assert (
            "{mode" in value
        ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
        return value

    @validator("capclass_lookup")
    def unique_capclass_numbers(cls, value):
        """Validate list of capclass_lookup has unique .capclass values."""
        capclass_ids = [i.capclass for i in value]
        error_msg = "-> capclass value must be unique in list"
        assert len(capclass_ids) == len(set(capclass_ids)), error_msg
        return value

    @validator("classes", pre=True)
    def unique_class_names(cls, value):
        """Validate list of classes has unique .name values."""
        class_names = [highway_class["name"] for highway_class in value]
        error_msg = "-> name value must be unique in list"
        assert len(class_names) == len(set(class_names)), error_msg
        return value

    @validator("classes")
    def validate_class_mode_excluded_links(cls, value, values):
        """Validate list of classes has unique .mode_code or .excluded_links match."""
        # validate if any mode IDs are used twice, that they have the same excluded links sets
        mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
        for i, highway_class in enumerate(value):
            # maz_to_maz.mode_code must be unique
            if "maz_to_maz" in values:
                assert (
                    highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
                ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
            # make sure that if any mode IDs are used twice, they have the same excluded links sets
            if highway_class.mode_code in mode_excluded_links:
                ex_links1 = highway_class["excluded_links"]
                ex_links2 = mode_excluded_links[highway_class["mode_code"]]
                error_msg = (
                    f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                    f"with different excluded links: {ex_links1} and {ex_links2}"
                )
                assert ex_links1 == ex_links2, error_msg
            mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
        return value

    @validator("classes")
    def validate_class_keyword_lists(cls, value, values):
        """Validate classes .skims, .toll, and .excluded_links values."""
        if "tolls" not in values:
            return value
        avail_skims = [
            "time",
            "dist",
            "hovdist",
            "tolldist",
            "freeflowtime",
            "rlbty",
            "autotime",
        ]
        available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
        avail_toll_attrs = []
        for name in values["tolls"].dst_vehicle_group_names:
            toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
            avail_skims.extend(toll_types)
            avail_toll_attrs.extend(["@" + name for name in toll_types])
            available_link_sets.append(f"is_toll_{name}")

        # validate class skim name list and toll attribute against toll setup
        def check_keywords(class_num, key, val, available):
            extra_keys = set(val) - set(available)
            error_msg = (
                f" -> {class_num} -> {key}: unrecognized {key} name(s): "
                f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
            )
            assert not extra_keys, error_msg

        for i, highway_class in enumerate(value):
            check_keywords(i, "skim", highway_class["skims"], avail_skims)
            check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
            check_keywords(
                i,
                "excluded_links",
                highway_class["excluded_links"],
                available_link_sets,
            )
        return value

unique_capclass_numbers(value)

Validate list of capclass_lookup has unique .capclass values.

Source code in tm2py\config.py
1003
1004
1005
1006
1007
1008
1009
@validator("capclass_lookup")
def unique_capclass_numbers(cls, value):
    """Validate list of capclass_lookup has unique .capclass values."""
    capclass_ids = [i.capclass for i in value]
    error_msg = "-> capclass value must be unique in list"
    assert len(capclass_ids) == len(set(capclass_ids)), error_msg
    return value

unique_class_names(value)

Validate list of classes has unique .name values.

Source code in tm2py\config.py
1011
1012
1013
1014
1015
1016
1017
@validator("classes", pre=True)
def unique_class_names(cls, value):
    """Validate list of classes has unique .name values."""
    class_names = [highway_class["name"] for highway_class in value]
    error_msg = "-> name value must be unique in list"
    assert len(class_names) == len(set(class_names)), error_msg
    return value

valid_skim_matrix_name_template(value)

Validate skim matrix template has correct {}.

Source code in tm2py\config.py
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
@validator("output_skim_matrixname_tmpl")
def valid_skim_matrix_name_template(value):
    """Validate skim matrix template has correct {}."""
    assert (
        "{time_period" in value
    ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
    assert (
        "{property" in value
    ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
    assert (
        "{mode" in value
    ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
    return value

valid_skim_template(value)

Validate skim template has correct {} and extension.

Source code in tm2py\config.py
978
979
980
981
982
983
984
985
986
987
@validator("output_skim_filename_tmpl")
def valid_skim_template(value):
    """Validate skim template has correct {} and extension."""
    assert (
        "{time_period" in value
    ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
    assert (
        value[-4:].lower() == ".omx"
    ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
    return value

validate_class_keyword_lists(value, values)

Validate classes .skims, .toll, and .excluded_links values.

Source code in tm2py\config.py
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
@validator("classes")
def validate_class_keyword_lists(cls, value, values):
    """Validate classes .skims, .toll, and .excluded_links values."""
    if "tolls" not in values:
        return value
    avail_skims = [
        "time",
        "dist",
        "hovdist",
        "tolldist",
        "freeflowtime",
        "rlbty",
        "autotime",
    ]
    available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
    avail_toll_attrs = []
    for name in values["tolls"].dst_vehicle_group_names:
        toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
        avail_skims.extend(toll_types)
        avail_toll_attrs.extend(["@" + name for name in toll_types])
        available_link_sets.append(f"is_toll_{name}")

    # validate class skim name list and toll attribute against toll setup
    def check_keywords(class_num, key, val, available):
        extra_keys = set(val) - set(available)
        error_msg = (
            f" -> {class_num} -> {key}: unrecognized {key} name(s): "
            f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
        )
        assert not extra_keys, error_msg

    for i, highway_class in enumerate(value):
        check_keywords(i, "skim", highway_class["skims"], avail_skims)
        check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
        check_keywords(
            i,
            "excluded_links",
            highway_class["excluded_links"],
            available_link_sets,
        )
    return value

Validate list of classes has unique .mode_code or .excluded_links match.

Source code in tm2py\config.py
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
@validator("classes")
def validate_class_mode_excluded_links(cls, value, values):
    """Validate list of classes has unique .mode_code or .excluded_links match."""
    # validate if any mode IDs are used twice, that they have the same excluded links sets
    mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
    for i, highway_class in enumerate(value):
        # maz_to_maz.mode_code must be unique
        if "maz_to_maz" in values:
            assert (
                highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
            ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
        # make sure that if any mode IDs are used twice, they have the same excluded links sets
        if highway_class.mode_code in mode_excluded_links:
            ex_links1 = highway_class["excluded_links"]
            ex_links2 = mode_excluded_links[highway_class["mode_code"]]
            error_msg = (
                f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                f"with different excluded links: {ex_links1} and {ex_links2}"
            )
            assert ex_links1 == ex_links2, error_msg
        mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
    return value

Bases: ConfigItem

Highway assignment class definition.

Note that excluded_links, skims and toll attribute names include vehicle groups (“{vehicle}”) which reference the list of highway.toll.dst_vehicle_group_names (see HighwayTollsConfig). The default example model config uses: “da”, “sr2”, “sr3”, “vsm”, sml”, “med”, “lrg”

Example single class config

name = “da” description= “drive alone” mode_code= “d” [[highway.classes.demand]] source = “household” name = “SOV_GP_{period}” [[highway.classes.demand]] source = “air_passenger” name = “da” [[highway.classes.demand]] source = “internal_external” name = “da” excluded_links = [“is_toll_da”, “is_sr2”], value_of_time = 18.93, # $ / hr operating_cost_per_mile = 17.23, # cents / mile toll = [“@bridgetoll_da”] skims = [“time”, “dist”, “freeflowtime”, “bridgetoll_da”],

Properties
Source code in tm2py\config.py
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
@dataclass(frozen=True)
class HighwayClassConfig(ConfigItem):
    """Highway assignment class definition.

    Note that excluded_links, skims and toll attribute names include
    vehicle groups ("{vehicle}") which reference the list of
    highway.toll.dst_vehicle_group_names (see HighwayTollsConfig).
    The default example model config uses:
    "da", "sr2", "sr3", "vsm", sml", "med", "lrg"

    Example single class config:
        name = "da"
        description= "drive alone"
        mode_code= "d"
        [[highway.classes.demand]]
            source = "household"
            name = "SOV_GP_{period}"
        [[highway.classes.demand]]
            source = "air_passenger"
            name = "da"
        [[highway.classes.demand]]
            source = "internal_external"
            name = "da"
        excluded_links = ["is_toll_da", "is_sr2"],
        value_of_time = 18.93,  # $ / hr
        operating_cost_per_mile = 17.23,  # cents / mile
        toll = ["@bridgetoll_da"]
        skims = ["time", "dist", "freeflowtime", "bridgetoll_da"],

    Properties:
        name: short (up to 10 character) unique reference name for the class.
            used in attribute and matrix names
        description: longer text used in attribute and matrix descriptions
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords.
            Should be unique in list of :es, unless multiple classes
            have identical excluded_links specification. Cannot be the
            same as used for highway.maz_to_maz.mode_code.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        demand: list of OMX file and matrix keyname references,
            see ClassDemandConfig
        excluded_links: list of keywords to identify links to exclude from
            this class' available subnetwork (generate link.modes)
            Options are:
                - "is_sr": is reserved for shared ride (@useclass in 2,3)
                - "is_sr2": is reserved for shared ride 2+ (@useclass == 2)
                - "is_sr3": is reserved for shared ride 3+ (@useclass == 3)
                - "is_auto_only": is reserved for autos (non-truck) (@useclass != 1)
                - "is_toll_{vehicle}": has a value (non-bridge) toll for the {vehicle} toll group
        toll: list of additional toll cost link attribute (values stored in cents),
            summed, one of "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
        toll_factor: optional, factor to apply to toll values in cost calculation
        pce: optional, passenger car equivalent to convert assigned demand in
            PCE units to vehicles for total assigned vehicle calculations
        skims: list of skim matrices to generate
            Options are:
                "time": pure travel time in minutes
                "dist": distance in miles
                "hovdist": distance on HOV facilities (is_sr2 or is_sr3)
                "tolldist": distance on toll facilities
                    (@tollbooth > highway.tolls.valuetoll_start_tollbooth_code)
                "freeflowtime": free flow travel time in minutes
                "bridgetoll_{vehicle}": bridge tolls, {vehicle} refers to toll group
                "valuetoll_{vehicle}": other, non-bridge tolls, {vehicle} refers to toll group
    """

    name: str = Field(min_length=1, max_length=10)
    veh_group_name: str = Field(min_length=1, max_length=10)
    description: Optional[str] = Field(default="")
    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    pce: Optional[float] = Field(default=1.0, gt=0)
    # Note that excluded_links, skims, and tolls validated under HighwayConfig to include
    # highway.toll.dst_vehicle_group_names names
    excluded_links: Tuple[str, ...] = Field()
    skims: Tuple[str, ...] = Field()
    toll: Tuple[str, ...] = Field()
    toll_factor: Optional[float] = Field(default=None, gt=0)
    demand: Tuple[ClassDemandConfig, ...] = Field()

Bases: ConfigItem

Highway assignment and skim input tolls and related parameters.

Properties
Source code in tm2py\config.py
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
@dataclass(frozen=True)
class HighwayTollsConfig(ConfigItem):
    """Highway assignment and skim input tolls and related parameters.

    Properties:
        file_path: source relative file path for the highway tolls index CSV
        valuetoll_start_tollbooth_code: tollbooth separates links with "bridge" tolls
            (index < this value) vs. "value" tolls. These toll attributes
            can then be referenced separately in the highway.classes[].tolls
            list
        src_vehicle_group_names: name used for the vehicle toll CSV column IDs,
            of the form "toll{period}_{vehicle}"
        dst_vehicle_group_names: list of names used in destination network
            for the corresponding vehicle group. Length of list must be the same
            as src_vehicle_group_names. Used for toll related attributes and
            resulting skim matrices. Cross-referenced in list of highway.classes[],
            valid keywords for:
                excluded_links: "is_toll_{vehicle}"
                tolls: "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
                skims: "bridgetoll_{vehicle}", "valuetoll_{vehicle}"
    """

    file_path: pathlib.Path = Field()
    valuetoll_start_tollbooth_code: int = Field(gt=1)
    src_vehicle_group_names: Tuple[str, ...] = Field()
    dst_vehicle_group_names: Tuple[str, ...] = Field()

    @validator("dst_vehicle_group_names", always=True)
    def dst_vehicle_group_names_length(cls, value, values):
        """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
        if "src_vehicle_group_names" in values:
            assert len(value) == len(
                values["src_vehicle_group_names"]
            ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
            assert all(
                [len(v) <= 4 for v in value]
            ), "dst_vehicle_group_names must be 4 characters or less"
        return value

dst_vehicle_group_names_length(value, values)

Validate dst_vehicle_group_names has same length as src_vehicle_group_names.

Source code in tm2py\config.py
841
842
843
844
845
846
847
848
849
850
851
@validator("dst_vehicle_group_names", always=True)
def dst_vehicle_group_names_length(cls, value, values):
    """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
    if "src_vehicle_group_names" in values:
        assert len(value) == len(
            values["src_vehicle_group_names"]
        ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
        assert all(
            [len(v) <= 4 for v in value]
        ), "dst_vehicle_group_names must be 4 characters or less"
    return value

Bases: ConfigItem

Grouping of counties for assignment and demand files.

Properties
Source code in tm2py\config.py
867
868
869
870
871
872
873
874
875
876
877
@dataclass(frozen=True)
class DemandCountyGroupConfig(ConfigItem):
    """Grouping of counties for assignment and demand files.

    Properties:
        number: id number for this group, must be unique
        counties: list of one or more county names
    """

    number: int = Field()
    counties: Tuple[COUNTY_NAMES, ...] = Field()

Assigns and skims MAZ-to-MAZ demand along shortest generalized cost path.

MAZ to MAZ demand is read in from separate OMX matrices as defined under the config table highway.maz_to_maz.demand_county_groups,

The demand is expected to be short distance (e.g. <0.5 miles), or within the same TAZ. The demand is grouped into bins of origin -> all destinations, by distance (straight-line) to furthest destination. This limits the size of the shortest path calculated to the minimum required. The bin edges have been predefined after testing as (in miles): [0.0, 0.9, 1.2, 1.8, 2.5, 5.0, 10.0, max_dist]

Input: Emme network with: Link attributes: - time attribute, either timau (resulting VDF congested time) or @free_flow_time Node attributes: @maz_id, x, y, and #node_county Demand matrices under highway.maz_to_maz.demand_file, and can have a placeholder auto_{period}MAZ_AUTO{number}_{period}.omx

Output: The resulting MAZ-MAZ flows are saved in link @maz_flow which is used as background traffic in the equilibrium Highway assignment.

AssignMAZSPDemand

Bases: Component

MAZ-to-MAZ shortest-path highway assignment.

Calculates shortest path between MAZs with demand in the Emme network and assigns flow.

Source code in tm2py\components\network\highway\highway_maz.py
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
class AssignMAZSPDemand(Component):
    """MAZ-to-MAZ shortest-path highway assignment.

    Calculates shortest path between MAZs with demand in the Emme network
    and assigns flow.
    """

    # skip Too many instance attributes recommendation, it is OK as is
    # pylint: disable=R0902

    def __init__(self, controller: RunController):
        """MAZ-to-MAZ shortest-path highway assignment.

        Args:
            controller: parent Controller object
        """

        super().__init__(controller)
        self.config = self.controller.config.highway.maz_to_maz
        self._debug = False

        # bins: performance parameter: crow-fly distance bins
        #       to limit shortest path calculation by origin to furthest destination
        #       semi-exposed for performance testing
        self._bin_edges = _default_bin_edges

        # Lazily-loaded Emme Properties
        self._highway_emmebank = None
        self._eb_dir = None

        # Internal attributes to track data through the sequence of steps
        self._scenario = None
        self._mazs = None
        self._demand = _defaultdict(lambda: [])
        self._max_dist = 0
        self._network = None
        self._root_index = None
        self._leaf_index = None

    @property
    def highway_emmebank(self):
        if self._highway_emmebank is None:
            self._highway_emmebank = self.controller.emme_manager.highway_emmebank
        return self._highway_emmebank

    @property
    def eb_dir(self):
        if self._eb_dir is None:
            self._eb_dir = os.path.dirname(self.highway_emmebank.path)
        return self._eb_dir

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run MAZ-to-MAZ shortest path assignment."""

        county_groups = {}
        for group in self.config.demand_county_groups:
            county_groups[group.number] = group.counties
        for time in self.time_period_names:
            self._scenario = self.highway_emmebank.scenario(time)
            with self._setup(time):
                self._prepare_network()
                for i, names in county_groups.items():
                    maz_ids = self._get_county_mazs(names)
                    if len(maz_ids) == 0:
                        self.logger.log(
                            f"warning: no mazs for counties {', '.join(names)}"
                        )
                        continue
                    self._process_demand(time, i, maz_ids)
                demand_bins = self._group_demand()
                for i, demand_group in enumerate(demand_bins):
                    self._find_roots_and_leaves(demand_group["demand"])
                    self._set_link_cost_maz()
                    self._run_shortest_path(time, i, demand_group["dist"])
                    self._assign_flow(time, i, demand_group["demand"])

    @_context
    def _setup(self, time: str):
        """Context setup / teardown, initializes internal attributes.

        Args:
            time: name of the time period
        """
        self._mazs = None
        self._demand = _defaultdict(lambda: [])
        self._max_dist = 0
        self._network = None
        self._root_index = None
        self._leaf_index = None
        attributes = [
            ("LINK", "@link_cost", "total cost MAZ-MAZ"),
            ("LINK", "@link_cost_maz", "cost MAZ-MAZ, unused MAZs blocked"),
            ("NODE", "@maz_root", "Flag for MAZs which are roots"),
            ("NODE", "@maz_leaf", "Flag for MAZs which are leaves"),
        ]
        for domain, name, desc in attributes:
            self.logger.log(f"Create temp {domain} attr: {name}, {desc}", level="TRACE")
        with self.controller.emme_manager.temp_attributes_and_restore(
            self._scenario, attributes
        ):
            try:
                with self.logger.log_start_end(
                    f"MAZ assign for period {time} scenario {self._scenario}"
                ):
                    yield
            finally:
                if not self._debug:
                    self._mazs = None
                    self._demand = None
                    self._network = None
                    self._root_index = None
                    self._leaf_index = None
                    # delete sp path files
                    for bin_no in range(len(self._bin_edges)):
                        file_path = os.path.join(self.eb_dir, f"sp_{time}_{bin_no}.ebp")
                        if os.path.exists(file_path):
                            os.remove(file_path)

    def _prepare_network(self):
        """Calculate link cost (travel time + bridge tolls + operating cost) and load network.

        Reads Emme network from disk for later node lookups. Optimized to only load
        attribute values of interest, additional attributes must be added in
        order to be read from disk.
        """
        if self._scenario.has_traffic_results:
            time_attr = "(@free_flow_time.max.timau)"
        else:
            time_attr = "@free_flow_time"
        self.logger.log(f"Calculating link costs using time {time_attr}", level="DEBUG")
        vot = self.config.value_of_time
        op_cost = self.config.operating_cost_per_mile
        net_calc = NetworkCalculator(self.controller, self._scenario)
        report = net_calc(
            "@link_cost", f"{time_attr} + 0.6 / {vot} * (length * {op_cost})"
        )
        self.logger.log("Link cost calculation report", level="TRACE")
        self.logger.log_dict(report, level="TRACE")
        self._network = self.controller.emme_manager.get_network(
            self._scenario, {"NODE": ["@maz_id", "x", "y", "#node_county"], "LINK": []}
        )
        self._network.create_attribute("LINK", "temp_flow")

    def _get_county_mazs(self, counties: List[str]) -> List[EmmeNode]:
        """Get all MAZ nodes which are located in one of these counties.

        Used the node attribute #node_county to identify the node location.
        Name must be an exact match. Catches a mapping of the county names
        to nodes so nodes are processed only once.

        Args:
            counties: list of county names

        Returns:
            List of MAZ nodes (Emme Node) which are in these counties.
        """
        self.logger.log(
            f"Processing county MAZs for {', '.join(counties)}", level="DETAIL"
        )
        network = self._network
        # maz data
        # maz_file = self.get_abs_path(self.controller.config.scenario.maz_landuse_file)
        # maz_df = pd.read_csv(maz_file)
        # maz_county_dict = dict(zip(maz_df["MAZ_ORIGINAL"], maz_df["CountyName"]))
        # NOTE: every maz must have a valid #node_county
        if self._mazs is None:
            self._mazs = _defaultdict(lambda: [])
            for node in network.nodes():
                if node["@maz_id"]:
                    # self._mazs[maz_county_dict[node["@maz_id"]]].append(node)
                    self._mazs[node["#node_county"]].append(node)
        mazs = []
        for county in counties:
            mazs.extend(self._mazs[county])
        # highway emme network does not include the 5 inaccessiable MAZs, but the trip table is indexed by the full MAZ list
        # https://app.asana.com/0/12291104512575/1199091221400653/f
        if "San Francisco" in counties:
            mazs.extend(
                [
                    {"@maz_id": 10186},
                    {"@maz_id": 16084},
                    {"@maz_id": 111432},
                    {"@maz_id": 111433},
                ]
            )
        if "Contra Costa" in counties:
            mazs.extend([{"@maz_id": 411178}])
        self.logger.log(f"Num MAZs {len(mazs)}", level="DEBUG")
        return sorted(mazs, key=lambda n: n["@maz_id"])

    def _process_demand(self, time: str, index: int, maz_ids: List[EmmeNode]):
        """Loads the demand from file and groups by origin node.

        Sets the demand to self._demand for later processing, grouping the demand in
        a dictionary by origin node (Emme Node object) to list of dictionaries
        {"orig": orig_node, "dest": dest_node, "dem": demand, "dist": dist}

        Args:
            time: time period name
            index: group index of the demand file, used to find the file by name
            maz_ids: indexed list of MAZ ID nodes for the county group
                (active counties for this demand file)
        """
        self.logger.log(
            f"Process demand for time period {time} index {index}", level="DETAIL"
        )
        data = self._read_demand_array(time, index)
        origins, destinations = data.nonzero()
        self.logger.log(
            f"non-zero origins {len(origins)} destinations {len(destinations)}",
            level="DEBUG",
        )
        total_demand = 0
        for orig, dest in zip(origins, destinations):
            # skip intra-maz demand
            if orig == dest:
                continue
            if orig > len(maz_ids) - 1:
                self.logger.log(
                    f"Network MAZ @maz_id={orig} #county_name does not match its county name in the input MAZ SE data.",
                    level="DEBUG",
                )
                continue
            if dest > len(maz_ids) - 1:
                self.logger.log(
                    f"Network MAZ @maz_id={dest} #county_name does not match its county name in the input MAZ SE data.",
                    level="DEBUG",
                )
                continue
            check = maz_ids[99]
            orig_node = maz_ids[orig]
            dest_node = maz_ids[dest]
            dist = _sqrt(
                (dest_node.x - orig_node.x) ** 2 + (dest_node.y - orig_node.y) ** 2
            )
            if (dist / 5280) > self.config.max_distance:
                self.logger.log(
                    f"MAZ demand from {orig} to {dest} is over {self.config.max_distance} miles, do not assign",
                    level="DEBUG",
                )
                continue
            if dist > self._max_dist:
                self._max_dist = dist
            demand = data[orig][dest]
            total_demand += demand
            self._demand[orig_node].append(
                {
                    "orig": orig_node,
                    "dest": dest_node,
                    "dem": demand,
                    "dist": dist,
                }
            )
        self.logger.log(f"Max distance found {self._max_dist}", level="DEBUG")
        self.logger.log(f"Total inter-zonal demand {total_demand}", level="DEBUG")

    def _read_demand_array(self, time: str, index: int) -> NumpyArray:
        """Load the demand from file with the specified time and index name.

        Args:
            time: time period name
            index: group index of the demand file, used to find the file by name
        """
        file_path_tmplt = self.get_abs_path(self.config.demand_file)
        omx_file_path = self.get_abs_path(
            file_path_tmplt.format(
                period=time, number=index, iter=self.controller.iteration
            )
        )
        self.logger.log(f"Reading demand from {omx_file_path}", level="DEBUG")
        with OMXManager(omx_file_path, "r") as omx_file:
            demand_array = omx_file.read(f"MAZ_AUTO_{index}_{time}")
            omx_file.close()
        return demand_array

    def _group_demand(
        self,
    ) -> List[Dict[str, Union[float, List[Dict[str, Union[float, EmmeNode]]]]]]:
        """Process the demand loaded from files \
            and create groups based on the origin to the furthest destination with demand.

        Returns:
            List of dictionaries, containing the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}

        """
        self.logger.log("Grouping demand in distance buckets", level="DETAIL")
        # group demand from same origin into distance bins by furthest
        # distance destination to limit shortest path search radius
        bin_edges = self._bin_edges[:]
        if bin_edges[-1] < self._max_dist / 5280.0:
            bin_edges.append(self._max_dist / 5280.0)

        demand_groups = [
            {"dist": edge, "demand": []} for i, edge in enumerate(bin_edges[1:])
        ]
        for data in self._demand.values():
            max_dist = max(entry["dist"] for entry in data) / 5280.0
            for group in demand_groups:
                if max_dist < group["dist"]:
                    group["demand"].extend(data)
                    break
        for group in demand_groups:
            self.logger.log(
                f"bin dist {group['dist']}, size {len(group['demand'])}", level="DEBUG"
            )
        # Filter out groups without any demand
        demand_groups = [group for group in demand_groups if group["demand"]]
        return demand_groups

    def _find_roots_and_leaves(self, demand: List[Dict[str, Union[float, EmmeNode]]]):
        """Label available MAZ root nodes and leaf nodes for the path calculation.

        The MAZ nodes which are found as origins in the demand are "activated"
        by setting @maz_root to non-zero, and similarly the leaves have @maz_leaf
        set to non-zero.

        Args:
            demand: list of dictionaries, containing the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        network = self._network
        attrs_to_init = [("NODE", ["@maz_root", "@maz_leaf"]), ("LINK", ["maz_cost"])]
        for domain, attrs in attrs_to_init:
            for name in attrs:
                if name in network.attributes(domain):
                    network.delete_attribute(domain, name)
                network.create_attribute(domain, name)
        root_maz_ids = {}
        leaf_maz_ids = {}
        for data in demand:
            o_node, d_node = data["orig"], data["dest"]
            root_maz_ids[o_node.number] = o_node["@maz_root"] = o_node["@maz_id"]
            leaf_maz_ids[d_node.number] = d_node["@maz_leaf"] = d_node["@maz_id"]
        self._root_index = {p: i for i, p in enumerate(sorted(root_maz_ids.keys()))}
        self._leaf_index = {q: i for i, q in enumerate(sorted(leaf_maz_ids.keys()))}
        self.controller.emme_manager.copy_attribute_values(
            self._network, self._scenario, {"NODE": ["@maz_root", "@maz_leaf"]}
        )

    def _set_link_cost_maz(self):
        """Set link cost used in the shortest path forbidden using unavailable connectors.

        Copy the pre-calculated cost @link_cost to @link_cost_maz,
        setting value to 1e20 on connectors to unused zone leaves / from
        unused roots.
        """
        # forbid egress from MAZ nodes which are not demand roots /
        #        access to MAZ nodes which are not demand leafs
        net_calc = NetworkCalculator(self.controller, self._scenario)
        net_calc.add_calc("@link_cost_maz", "@link_cost")
        net_calc.add_calc("@link_cost_maz", "1e20", "@maz_root=0 and !@maz_id=0")
        net_calc.add_calc("@link_cost_maz", "1e20", "@maz_leafj=0 and !@maz_idj=0")
        net_calc.run()

    @LogStartEnd(level="DETAIL")
    def _run_shortest_path(self, time: str, bin_no: int, max_radius: float):
        """Run the shortest path tool to generate paths between the marked nodes.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            max_radius: max unit coordinate distance to limit search tree
        """
        shortest_paths_tool = self.controller.emme_manager.tool(
            "inro.emme.network_calculation.shortest_path"
        )
        max_radius = max_radius * 5280 + 100  # add some buffer for rounding error
        ext = "ebp" if _USE_BINARY else "txt"
        file_name = f"sp_{time}_{bin_no}.{ext}"

        spec = {
            "type": "SHORTEST_PATH",
            "modes": [self.config.mode_code],
            "root_nodes": "@maz_root",
            "leaf_nodes": "@maz_leaf",
            "link_cost": "@link_cost_maz",
            "path_constraints": {
                "max_radius": max_radius,
                "uturn_allowed": False,
                "through_leaves": False,
                "through_centroids": False,
                "exclude_forbidden_turns": False,
            },
            "results": {
                "skim_output": {
                    "file": "",
                    "format": "TEXT",
                    "return_numpy": False,
                    "analyses": [],
                },
                "path_output": {
                    "format": "BINARY" if _USE_BINARY else "TEXT",
                    "file": os.path.join(self.eb_dir, file_name),
                },
            },
            "performance_settings": {
                "number_of_processors": self.controller.num_processors,
                "direction": "FORWARD",
                "method": "STANDARD",
            },
        }
        shortest_paths_tool(spec, self._scenario)

    def _assign_flow(
        self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]]
    ):
        """Assign the demand along the paths generated from the shortest path tool.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            demand: list of dictionaries, containing the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        if _USE_BINARY:
            self._assign_flow_binary(time, bin_no, demand)
        else:
            self._assign_flow_text(time, bin_no, demand)

    def _assign_flow_text(
        self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]]
    ):
        """Assign the demand along the paths generated from the shortest path tool.

        The paths are read from a text format file, see Emme help for details.
        Demand is summed in self._network (in memory) using temp_flow attribute
        and written to scenario (Emmebank / disk) @maz_flow.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            demand: list of dictionaries, containin the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        paths = self._load_text_format_paths(time, bin_no)
        not_assigned, assigned = 0, 0
        for data in demand:
            orig, dest, dem = data["orig"].number, data["dest"].number, data["dem"]
            path = paths.get(orig, {}).get(dest)
            if path is None:
                not_assigned += dem
                continue
            i_node = orig
            for j_node in path:
                link = self._network.link(i_node, j_node)
                link["temp_flow"] += dem
                i_node = j_node
            assigned += dem
        self.logger.log(f"ASSIGN bin {bin_no}: total: {len(demand)}", level="DEBUG")
        self.logger.log(
            f"assigned: {assigned}, not assigned: {not_assigned}", level="DEBUG"
        )

        self.controller.emme_manager.copy_attribute_values(
            self._network, self._scenario, {"LINK": ["temp_flow"]}, {"LINK": ["data1"]}
        )

    def _load_text_format_paths(
        self, time: str, bin_no: int
    ) -> Dict[int, Dict[int, List[int]]]:
        """Load all paths from text file and return as nested dictionary.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment

        Returns:
            All paths as a nested dictionary, path = paths[origin][destination],
            using the node IDs as integers.
        """
        paths = _defaultdict(lambda: {})
        with open(
            os.path.join(self.eb_dir, f"sp_{time}_{bin_no}.txt"),
            "r",
            encoding="utf8",
        ) as paths_file:
            for line in paths_file:
                nodes = [int(x) for x in line.split()]
                paths[nodes[0]][nodes[-1]] = nodes[1:]
        return paths

    def _assign_flow_binary(
        self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]]
    ):
        """Assign the demand along the paths generated from the shortest path tool.

        The paths are read from a binary format file, see Emme help for details.
        Demand is summed in self._network (in memory) using temp_flow attribute
        and written to scenario (Emmebank / disk) @maz_flow.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            demand: list of dictionaries, containin the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        file_name = f"sp_{time}_{bin_no}.ebp"
        with open(os.path.join(self.eb_dir, file_name), "rb") as paths_file:
            # read set of path pointers by Orig-Dest sequence from file
            offset, leaves_nb, path_indicies = self._get_path_indices(paths_file)
            assigned = 0
            not_assigned = 0
            bytes_read = offset * 8
            # for all orig-dest pairs with demand, load path from file
            for data in demand:
                # get file position based on orig-dest index
                start, end = self._get_path_location(
                    data["orig"].number, data["dest"].number, leaves_nb, path_indicies
                )
                # no path found, disconnected zone
                if start == end:
                    not_assigned += data["dem"]
                    continue
                paths_file.seek(start * 4 + offset * 8)
                self._assign_path_flow(paths_file, start, end, data["dem"])
                assigned += data["dem"]
                bytes_read += (end - start) * 4
        self.controller.emme_manager.copy_attribute_values(
            self._network,
            self._scenario,
            {"LINK": ["temp_flow"]},
            {"LINK": ["@maz_flow"]},
        )
        self.logger.log(
            f"ASSIGN bin {bin_no}, total {len(demand)}, assign "
            f"{assigned}, not assign {not_assigned}, bytes {bytes_read}",
            level="DEBUG",
        )

    @staticmethod
    def _get_path_indices(paths_file: BinaryIO) -> [int, int, _array.array]:
        """Get the path header indices.

        See the Emme Shortest path tool doc for additional details on reading
        this file.

        Args:
            paths_file: binary file access to the generated paths file

        Returns:
            2 ints + array of ints: offset, leafs_nb, path_indicies
            offset: starting index to read the paths
            leafs_nb: number of leafs in the shortest path file
            path_indicies: array of the start index for each root, leaf path in paths_file.
        """
        # read first 4 integers from file (Q=64-bit unsigned integers)
        header = _array.array("Q")
        header.fromfile(paths_file, 4)
        roots_nb, leaves_nb = header[2:4]
        # Load sequence of path indices (positions by orig-dest index),
        # pointing to list of path node IDs in file
        path_indicies = _array.array("Q")
        path_indicies.fromfile(paths_file, roots_nb * leaves_nb + 1)
        offset = roots_nb * leaves_nb + 1 + 4
        return offset, leaves_nb, path_indicies

    def _get_path_location(
        self,
        orig: EmmeNode,
        dest: EmmeNode,
        leaves_nb: int,
        path_indicies: _array.array,
    ) -> [int, int]:
        """Get the location in the paths_file to read.

        Args:
            orig: Emme Node object, origin MAZ to query the path
            dest: Emme Node object, destination MAZ to query the path
            leaves_nb: number of leaves
            path_indicies: array of the start index for each root, leaf path in paths_file.

        Returns:
            Two integers, start, end
            start: starting index to read Node ID bytes from paths_file
            end: ending index to read bytes from paths_file
        """
        p_index = self._root_index[orig]
        q_index = self._leaf_index[dest]
        index = p_index * leaves_nb + q_index
        start = path_indicies[index]
        end = path_indicies[index + 1]
        return start, end

    def _assign_path_flow(
        self, paths_file: BinaryIO, start: int, end: int, demand: float
    ):
        """Add demand to link temp_flow for the path.

        Args:
            paths_file: binary file access to read path from
            start: starting index to read Node ID bytes from paths_file
            end: ending index to read bytes from paths_file
            demand: flow demand to add on link
        """
        # load sequence of Node IDs which define the path (L=32-bit unsigned integers)
        path = _array.array("L")
        path.fromfile(paths_file, end - start)
        # process path to sequence of links and add flow
        path_iter = iter(path)
        i_node = next(path_iter)
        for j_node in path_iter:
            link = self._network.link(i_node, j_node)
            link["temp_flow"] += demand
            i_node = j_node

__init__(controller)

MAZ-to-MAZ shortest-path highway assignment.

Parameters:

Name Type Description Default
controller RunController

parent Controller object

required
Source code in tm2py\components\network\highway\highway_maz.py
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
def __init__(self, controller: RunController):
    """MAZ-to-MAZ shortest-path highway assignment.

    Args:
        controller: parent Controller object
    """

    super().__init__(controller)
    self.config = self.controller.config.highway.maz_to_maz
    self._debug = False

    # bins: performance parameter: crow-fly distance bins
    #       to limit shortest path calculation by origin to furthest destination
    #       semi-exposed for performance testing
    self._bin_edges = _default_bin_edges

    # Lazily-loaded Emme Properties
    self._highway_emmebank = None
    self._eb_dir = None

    # Internal attributes to track data through the sequence of steps
    self._scenario = None
    self._mazs = None
    self._demand = _defaultdict(lambda: [])
    self._max_dist = 0
    self._network = None
    self._root_index = None
    self._leaf_index = None

run()

Run MAZ-to-MAZ shortest path assignment.

Source code in tm2py\components\network\highway\highway_maz.py
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
@LogStartEnd()
def run(self):
    """Run MAZ-to-MAZ shortest path assignment."""

    county_groups = {}
    for group in self.config.demand_county_groups:
        county_groups[group.number] = group.counties
    for time in self.time_period_names:
        self._scenario = self.highway_emmebank.scenario(time)
        with self._setup(time):
            self._prepare_network()
            for i, names in county_groups.items():
                maz_ids = self._get_county_mazs(names)
                if len(maz_ids) == 0:
                    self.logger.log(
                        f"warning: no mazs for counties {', '.join(names)}"
                    )
                    continue
                self._process_demand(time, i, maz_ids)
            demand_bins = self._group_demand()
            for i, demand_group in enumerate(demand_bins):
                self._find_roots_and_leaves(demand_group["demand"])
                self._set_link_cost_maz()
                self._run_shortest_path(time, i, demand_group["dist"])
                self._assign_flow(time, i, demand_group["demand"])

validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py\components\network\highway\highway_maz.py
111
112
113
114
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    # TODO
    pass

SkimMAZCosts

Bases: Component

MAZ-to-MAZ shortest-path skim of time, distance and toll.

Source code in tm2py\components\network\highway\highway_maz.py
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
class SkimMAZCosts(Component):
    """MAZ-to-MAZ shortest-path skim of time, distance and toll."""

    def __init__(self, controller: RunController):
        """MAZ-to-MAZ shortest-path skim of time, distance and toll.

        Args:
            controller: parent RunController object
        """
        super().__init__(controller)
        self.config = self.controller.config.highway.maz_to_maz
        # TODO add config requirement that most be a valid time period
        self._scenario = None
        self._network = None
        self._highway_emmebank = None

    @property
    def highway_emmebank(self):
        if self._highway_emmebank is None:
            self._highway_emmebank = self.controller.emme_manager.highway_emmebank
        return self._highway_emmebank

    @property
    def scenario(self):
        if self._scenario is None:
            self._scenario = self.highway_emmebank.scenario(self.config.skim_period)
        return self._scenario

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run shortest path skims for all available MAZ-to-MAZ O-D pairs.

        Runs a shortest path builder for each county, using a maz_skim_cost
        to limit the search. The valid gen cost (time + cost), distance and toll (drive alone)
        are written to CSV at the output_skim_file path:
        FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL

        The following config inputs are used directly in this component. Note also
        that the network mode_code is prepared in the highway_network component
        using the excluded_links.

        config.highway.maz_to_maz:
            skim_period: name of the period used for the skim, must match one the
                defined config.time_periods
            demand_county_groups: used for the list of counties, creates a list out
                of all listed counties under [].counties
            output_skim_file: relative path to save the skims
            value_of_time: value of time used to convert tolls and auto operating cost
            operating_cost_per_mile: auto operating cost
            max_skim_cost: max cost value used to limit the shortest path search
            mode_code:
        """

        # prepare output file and write header
        output = self.get_abs_path(self.config.output_skim_file)
        os.makedirs(os.path.dirname(output), exist_ok=True)
        with open(output, "w", encoding="utf8") as output_file:
            output_file.write("FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL\n")
        counties = []
        for group in self.config.demand_county_groups:
            counties.extend(group.counties)
        with self._setup():
            self._prepare_network()
            for county in counties:
                num_roots = self._mark_roots(county)
                if num_roots == 0:
                    continue
                sp_values = self._run_shortest_path()
                self._export_results(sp_values)

    @_context
    def _setup(self):
        """Creates the temp attributes used in the component."""
        attributes = [
            ("LINK", "@link_cost", "total cost MAZ-MAZ"),
            ("NODE", "@maz_root", "selected roots (origins)"),
        ]
        with self.controller.emme_manager.temp_attributes_and_restore(
            self.scenario, attributes
        ):
            try:
                yield
            finally:
                self._network = None  # clear network obj ref to free memory

    @LogStartEnd(level="DEBUG")
    def _prepare_network(self):
        """Calculates the link cost in @link_cost and loads the network to self._network."""
        net_calc = NetworkCalculator(self.controller, self._scenario)
        if self._scenario.has_traffic_results:
            time_attr = "(@free_flow_time.max.timau)"
        else:
            time_attr = "@free_flow_time"
        self.logger.log(f"Time attribute {time_attr}", level="DEBUG")
        vot = self.config.value_of_time
        op_cost = self.config.operating_cost_per_mile
        net_calc("@link_cost", f"{time_attr} + 0.6 / {vot} * (length * {op_cost})")
        self._network = self.controller.emme_manager.get_network(
            self.scenario, {"NODE": ["@maz_id", "#node_county"]}
        )

    def _mark_roots(self, county: str) -> int:
        """Mark the available roots in the county."""
        count_roots = 0
        for node in self._network.nodes():
            if node["@maz_id"] > 0 and node["#node_county"] == county:
                node["@maz_root"] = node["@maz_id"]
                count_roots += 1
            else:
                node["@maz_root"] = 0
        values = self._network.get_attribute_values("NODE", ["@maz_root"])
        self.scenario.set_attribute_values("NODE", ["@maz_root"], values)
        return count_roots

    @LogStartEnd(level="DETAIL")
    def _run_shortest_path(self) -> Dict[str, NumpyArray]:
        """Run shortest paths tool and return dictionary of skim results name, numpy arrays.

        O-D pairs are limited by a max cost value from config.highway.maz_to_maz.max_skim_cost,
        from roots marked by @maz_root to all available leaves at @maz_id.

        Returns:
            A dictionary with keys "COST", "DISTANCE", and "BRIDGETOLL", and numpy
            arrays of SP values for available O-D pairs
        """
        shortest_paths_tool = self.controller.emme_manager.tool(
            "inro.emme.network_calculation.shortest_path"
        )
        max_cost = float(self.config.max_skim_cost)
        spec = {
            "type": "SHORTEST_PATH",
            "modes": [self.config.mode_code],
            "root_nodes": "@maz_root",
            "leaf_nodes": "@maz_id",
            "link_cost": "@link_cost",
            "path_constraints": {
                "max_cost": max_cost,
                "uturn_allowed": False,
                "through_leaves": False,
                "through_centroids": False,
                "exclude_forbidden_turns": False,
            },
            "results": {
                "skim_output": {
                    "return_numpy": True,
                    "analyses": [
                        {
                            "component": "SHORTEST_PATH_COST",
                            "operator": "+",
                            "name": "COST",
                            "description": "",
                        },
                        {
                            "component": "length",
                            "operator": "+",
                            "name": "DISTANCE",
                            "description": "",
                        },
                        {
                            "component": "@bridgetoll_da",
                            "operator": "+",
                            "name": "BRIDGETOLL",
                            "description": "",
                        },
                    ],
                    "format": "OMX",
                }
            },
            "performance_settings": {
                "number_of_processors": self.controller.num_processors,
                "direction": "FORWARD",
                "method": "STANDARD",
            },
        }
        sp_values = shortest_paths_tool(spec, self.scenario)
        return sp_values

    def _export_results(self, sp_values: Dict[str, NumpyArray]):
        """Write matrix skims to CSV.

        The matrices are filtered to omit rows for which the COST is
        < 0 or > 1e19 (Emme uses 1e20 to indicate inaccessible zone pairs).

        sp_values: dictionary of matrix costs, with the three keys
            "COST", "DISTANCE", and "BRIDGETOLL" and Numpy arrays of values
        """
        # get list of MAZ IDS
        roots = [
            node["@maz_root"] for node in self._network.nodes() if node["@maz_root"]
        ]
        leaves = [node["@maz_id"] for node in self._network.nodes() if node["@maz_id"]]
        # build dataframe with output data and to/from MAZ ids
        root_ids = np.repeat(roots, len(leaves))
        leaf_ids = leaves * len(roots)
        result_df = pd.DataFrame(
            {
                "FROM_ZONE": root_ids,
                "TO_ZONE": leaf_ids,
                "COST": sp_values["COST"].flatten(),
                "DISTANCE": sp_values["DISTANCE"].flatten(),
                "BRIDGETOLL": sp_values["BRIDGETOLL"].flatten(),
            }
        )
        # drop 0's / 1e20
        result_df = result_df.query("COST > 0 & COST < 1e19")
        # write remaining values to text file
        # FROM_ZONE,TO_ZONE,COST,DISTANCE,BRIDGETOLL
        output = self.get_abs_path(self.config.output_skim_file)
        with open(output, "a", newline="", encoding="utf8") as output_file:
            result_df.to_csv(output_file, header=False, index=False)

__init__(controller)

MAZ-to-MAZ shortest-path skim of time, distance and toll.

Parameters:

Name Type Description Default
controller RunController

parent RunController object

required
Source code in tm2py\components\network\highway\highway_maz.py
676
677
678
679
680
681
682
683
684
685
686
687
def __init__(self, controller: RunController):
    """MAZ-to-MAZ shortest-path skim of time, distance and toll.

    Args:
        controller: parent RunController object
    """
    super().__init__(controller)
    self.config = self.controller.config.highway.maz_to_maz
    # TODO add config requirement that most be a valid time period
    self._scenario = None
    self._network = None
    self._highway_emmebank = None

run()

Run shortest path skims for all available MAZ-to-MAZ O-D pairs.

Runs a shortest path builder for each county, using a maz_skim_cost to limit the search. The valid gen cost (time + cost), distance and toll (drive alone) are written to CSV at the output_skim_file path: FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL

The following config inputs are used directly in this component. Note also that the network mode_code is prepared in the highway_network component using the excluded_links.

config.highway.maz_to_maz: skim_period: name of the period used for the skim, must match one the defined config.time_periods demand_county_groups: used for the list of counties, creates a list out of all listed counties under [].counties output_skim_file: relative path to save the skims value_of_time: value of time used to convert tolls and auto operating cost operating_cost_per_mile: auto operating cost max_skim_cost: max cost value used to limit the shortest path search mode_code:

Source code in tm2py\components\network\highway\highway_maz.py
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
@LogStartEnd()
def run(self):
    """Run shortest path skims for all available MAZ-to-MAZ O-D pairs.

    Runs a shortest path builder for each county, using a maz_skim_cost
    to limit the search. The valid gen cost (time + cost), distance and toll (drive alone)
    are written to CSV at the output_skim_file path:
    FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL

    The following config inputs are used directly in this component. Note also
    that the network mode_code is prepared in the highway_network component
    using the excluded_links.

    config.highway.maz_to_maz:
        skim_period: name of the period used for the skim, must match one the
            defined config.time_periods
        demand_county_groups: used for the list of counties, creates a list out
            of all listed counties under [].counties
        output_skim_file: relative path to save the skims
        value_of_time: value of time used to convert tolls and auto operating cost
        operating_cost_per_mile: auto operating cost
        max_skim_cost: max cost value used to limit the shortest path search
        mode_code:
    """

    # prepare output file and write header
    output = self.get_abs_path(self.config.output_skim_file)
    os.makedirs(os.path.dirname(output), exist_ok=True)
    with open(output, "w", encoding="utf8") as output_file:
        output_file.write("FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL\n")
    counties = []
    for group in self.config.demand_county_groups:
        counties.extend(group.counties)
    with self._setup():
        self._prepare_network()
        for county in counties:
            num_roots = self._mark_roots(county)
            if num_roots == 0:
                continue
            sp_values = self._run_shortest_path()
            self._export_results(sp_values)

validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py\components\network\highway\highway_maz.py
701
702
703
704
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    # TODO
    pass

Bases: ConfigItem

Highway MAZ to MAZ shortest path assignment and skim parameters.

Properties
Source code in tm2py\config.py
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
@dataclass(frozen=True)
class HighwayMazToMazConfig(ConfigItem):
    """Highway MAZ to MAZ shortest path assignment and skim parameters.

    Properties:
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords,
            plus including MAZ connectors.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        max_skim_cost: max shortest path distance to search for MAZ-to-MAZ
            skims, in generized costs units (includes operating cost
            converted to minutes)
        excluded_links: list of keywords to identify links to exclude from
            MAZ-to-MAZ paths, see HighwayClassConfig.excluded_links
        demand_file: relative path to find the input demand files
            can have use a placeholder for {period} and {number}, where the
            {period} is the time_period.name (see TimePeriodConfig)
            and {number} is the demand_count_groups[].number
            (see DemandCountyGroupConfig)
            e.g.: auto_{period}_MAZ_AUTO_{number}_{period}.omx
        demand_county_groups: List of demand county names and
        skim_period: period name to use for the shotest path skims, must
            match one of the names listed in the time_periods
        output_skim_file: relative path to resulting MAZ-to-MAZ skims
    """

    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    max_distance: float = Field(gt=0)
    max_skim_cost: float = Field(gt=0)
    excluded_links: Tuple[str, ...] = Field()
    demand_file: pathlib.Path = Field()
    demand_county_groups: Tuple[DemandCountyGroupConfig, ...] = Field()
    skim_period: str = Field()
    output_skim_file: pathlib.Path = Field()

    @validator("demand_county_groups")
    def unique_group_numbers(cls, value):
        """Validate list of demand_county_groups has unique .number values."""
        group_ids = [group.number for group in value]
        assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
        return value

unique_group_numbers(value)

Validate list of demand_county_groups has unique .number values.

Source code in tm2py\config.py
918
919
920
921
922
923
@validator("demand_county_groups")
def unique_group_numbers(cls, value):
    """Validate list of demand_county_groups has unique .number values."""
    group_ids = [group.number for group in value]
    assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
    return value

Transit Network Components

Transit assignment module.

TransitAssignment

Bases: Component

Run transit assignment.

Source code in tm2py\components\network\transit\transit_assign.py
 401
 402
 403
 404
 405
 406
 407
 408
 409
 410
 411
 412
 413
 414
 415
 416
 417
 418
 419
 420
 421
 422
 423
 424
 425
 426
 427
 428
 429
 430
 431
 432
 433
 434
 435
 436
 437
 438
 439
 440
 441
 442
 443
 444
 445
 446
 447
 448
 449
 450
 451
 452
 453
 454
 455
 456
 457
 458
 459
 460
 461
 462
 463
 464
 465
 466
 467
 468
 469
 470
 471
 472
 473
 474
 475
 476
 477
 478
 479
 480
 481
 482
 483
 484
 485
 486
 487
 488
 489
 490
 491
 492
 493
 494
 495
 496
 497
 498
 499
 500
 501
 502
 503
 504
 505
 506
 507
 508
 509
 510
 511
 512
 513
 514
 515
 516
 517
 518
 519
 520
 521
 522
 523
 524
 525
 526
 527
 528
 529
 530
 531
 532
 533
 534
 535
 536
 537
 538
 539
 540
 541
 542
 543
 544
 545
 546
 547
 548
 549
 550
 551
 552
 553
 554
 555
 556
 557
 558
 559
 560
 561
 562
 563
 564
 565
 566
 567
 568
 569
 570
 571
 572
 573
 574
 575
 576
 577
 578
 579
 580
 581
 582
 583
 584
 585
 586
 587
 588
 589
 590
 591
 592
 593
 594
 595
 596
 597
 598
 599
 600
 601
 602
 603
 604
 605
 606
 607
 608
 609
 610
 611
 612
 613
 614
 615
 616
 617
 618
 619
 620
 621
 622
 623
 624
 625
 626
 627
 628
 629
 630
 631
 632
 633
 634
 635
 636
 637
 638
 639
 640
 641
 642
 643
 644
 645
 646
 647
 648
 649
 650
 651
 652
 653
 654
 655
 656
 657
 658
 659
 660
 661
 662
 663
 664
 665
 666
 667
 668
 669
 670
 671
 672
 673
 674
 675
 676
 677
 678
 679
 680
 681
 682
 683
 684
 685
 686
 687
 688
 689
 690
 691
 692
 693
 694
 695
 696
 697
 698
 699
 700
 701
 702
 703
 704
 705
 706
 707
 708
 709
 710
 711
 712
 713
 714
 715
 716
 717
 718
 719
 720
 721
 722
 723
 724
 725
 726
 727
 728
 729
 730
 731
 732
 733
 734
 735
 736
 737
 738
 739
 740
 741
 742
 743
 744
 745
 746
 747
 748
 749
 750
 751
 752
 753
 754
 755
 756
 757
 758
 759
 760
 761
 762
 763
 764
 765
 766
 767
 768
 769
 770
 771
 772
 773
 774
 775
 776
 777
 778
 779
 780
 781
 782
 783
 784
 785
 786
 787
 788
 789
 790
 791
 792
 793
 794
 795
 796
 797
 798
 799
 800
 801
 802
 803
 804
 805
 806
 807
 808
 809
 810
 811
 812
 813
 814
 815
 816
 817
 818
 819
 820
 821
 822
 823
 824
 825
 826
 827
 828
 829
 830
 831
 832
 833
 834
 835
 836
 837
 838
 839
 840
 841
 842
 843
 844
 845
 846
 847
 848
 849
 850
 851
 852
 853
 854
 855
 856
 857
 858
 859
 860
 861
 862
 863
 864
 865
 866
 867
 868
 869
 870
 871
 872
 873
 874
 875
 876
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
class TransitAssignment(Component):
    """Run transit assignment."""

    def __init__(self, controller: "RunController"):
        """Constructor for TransitAssignment.

        Args:
            controller: RunController object.
        """
        super().__init__(controller)
        self.config = self.controller.config.transit
        self.sub_components = {
            "prepare transit demand": PrepareTransitDemand(controller),
        }
        self.transit_network = PrepareTransitNetwork(controller)
        self._demand_matrix = None  # FIXME
        self._num_processors = self.controller.emme_manager.num_processors
        self._time_period = None
        self._scenario = None
        self._transit_emmebank = None

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO

    @property
    def transit_emmebank(self):
        if not self._transit_emmebank:
            self._transit_emmebank = self.controller.emme_manager.transit_emmebank
        return self._transit_emmebank

    @LogStartEnd("Transit assignments")
    def run(self):
        """Run transit assignments."""

        if self.controller.iteration == 0:
            self.transit_emmebank.zero_matrix
            if self.controller.config.warmstart.warmstart:
                if self.controller.config.warmstart.use_warmstart_demand:
                    self.sub_components["prepare transit demand"].run()
            else:
                # give error message to user about not warmstarting transit
                raise Exception(
                    f"ERROR: transit has to be warmstarted, please either specify use_warmstart_skim or use_warmstart_demand"
                )
        else:
            self.sub_components["prepare transit demand"].run()

        for time_period in self.time_period_names:
            # update auto times
            print("updating auto time in transit network")
            self.transit_network.update_auto_times(time_period)

            if self.controller.iteration == 0:
                # iteration = 0 : run uncongested transit assignment
                use_ccr = False
                congested_transit_assignment = False
                print("running uncongested transit assignment with warmstart demand")
                self.run_transit_assign(
                    time_period, use_ccr, congested_transit_assignment
                )
            elif (self.controller.iteration == 1) & (self.controller.config.warmstart.use_warmstart_skim):
                # iteration = 1 and use_warmstart_skim = True : run uncongested transit assignment
                use_ccr = False
                congested_transit_assignment = False
                self.run_transit_assign(
                    time_period, use_ccr, congested_transit_assignment
                )               
            else:
                # iteration >= 1 and use_warmstart_skim = False : run congested transit assignment
                use_ccr = self.config.use_ccr
                if time_period in ["EA", "EV", "MD"]:
                    congested_transit_assignment = False
                else:
                    congested_transit_assignment = (
                        self.config.congested_transit_assignment
                    )

                self.run_transit_assign(
                    time_period, use_ccr, congested_transit_assignment
                )

            # output_summaries
            if self.config.output_stop_usage_path is not None:
                network, class_stop_attrs = self._calc_connector_flows(time_period)
                self._export_connector_flows(network, class_stop_attrs, time_period)
            if self.config.output_transit_boardings_path is not None:
                self._export_boardings_by_line(time_period)
            if self.config.output_transit_segment_path is not None:
                self._export_transit_segment(time_period)
            if self.config.output_station_to_station_flow_path is not None:
                self._export_boardings_by_station(time_period)
            if self.config.output_transfer_at_station_path is not None:
                self._export_transfer_at_stops(time_period)

    @LogStartEnd("Transit assignments for a time period")
    def run_transit_assign(
        self, time_period: str, use_ccr: bool, congested_transit_assignment: bool
    ):
        if use_ccr:
            self._run_ccr_assign(time_period)
        elif congested_transit_assignment:
            self._run_congested_assign(time_period)
        else:
            self._run_extended_assign(time_period)

    def _apply_peaking_factor(self, time_period: str, ea_df=None):
        """apply peaking factors.

        Args:
            time_period: time period name abbreviation
        """
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        _network = _emme_scenario.get_network()
        _duration = self.time_period_durations[time_period.lower()]

        if time_period.lower() == "am":
            for line in _network.transit_lines():
                line["@orig_hdw"] = line.headway
                line_name = line.id
                line_veh = line.vehicle
                line_hdw = line.headway
                line_cap = 60 * _duration * line_veh.total_capacity / line_hdw
                if line_name in ea_df["line_name_am"].to_list():
                    ea_boardings = ea_df.loc[
                        ea_df["line_name_am"] == line_name, "boardings"
                    ].values[0]
                else:
                    ea_boardings = 0
                pnr_peaking_factor = (
                    line_cap - ea_boardings
                ) / line_cap  # substract ea boardings from am parking capacity
                non_pnr_peaking_factor = self.config.congested.am_peaking_factor
                # in Emme transit assignment, the capacity is computed for each transit line as: 60 * _duration * vehicle.total_capacity / line.headway
                # so instead of applying peaking factor to calculated capacity, we can divide line.headway by this peaking factor
                # if ea number of parkers exceed the am parking capacity, set the headway to a very large number
                if pnr_peaking_factor > 0:
                    pnr_line_hdw = line_hdw / pnr_peaking_factor
                else:
                    pnr_line_hdw = 999
                non_pnr_line_hdw = line_hdw * non_pnr_peaking_factor
                if ("pnr" in line_name) and ("egr" in line_name):
                    continue
                elif ("pnr" in line_name) and ("acc" in line_name):
                    line.headway = pnr_line_hdw
                else:
                    line.headway = non_pnr_line_hdw

        if time_period.lower() == "pm":
            for line in _network.transit_lines():
                line["@orig_hdw"] = line.headway
                line_name = line.id
                line_hdw = line.headway
                non_pnr_peaking_factor = self.config.congested.pm_peaking_factor
                non_pnr_line_hdw = line_hdw * non_pnr_peaking_factor
                if "pnr" in line_name:
                    continue
                else:
                    line.headway = non_pnr_line_hdw

        if time_period.lower() == "ea":
            line_name = []
            boards = []
            ea_pnr_df = pd.DataFrame()
            for line in _network.transit_lines():
                boardings = 0
                for segment in line.segments(include_hidden=True):
                    boardings += segment.transit_boardings
                line_name.append(line.id)
                boards.append(boardings)
            ea_pnr_df["line_name"] = line_name
            ea_pnr_df["boardings"] = boards
            ea_pnr_df["line_name_am"] = ea_pnr_df["line_name"].str.replace(
                "EA", "AM"
            )  # will substract ea boardings from am parking capacity
            path_boardings = self.get_abs_path(
                self.config.output_transit_boardings_path
            )
            ea_pnr_df.to_csv(path_boardings.format(period="ea_pnr"), index=False)

        _update_attributes = {"TRANSIT_LINE": ["@orig_hdw", "headway"]}
        self.controller.emme_manager.copy_attribute_values(
            _network, _emme_scenario, _update_attributes
        )

    def _transit_classes(self, time_period) -> List[TransitAssignmentClass]:
        emme_manager = self.controller.emme_manager
        if self.config.use_fares:
            fare_modes = _defaultdict(lambda: set([]))
            network = self.transit_emmebank.scenario(time_period).get_partial_network(
                ["TRANSIT_LINE"], include_attributes=False
            )
            emme_manager.copy_attribute_values(
                self.transit_emmebank.scenario(time_period),
                network,
                {"TRANSIT_LINE": ["#src_mode"]},
            )
            for line in network.transit_lines():
                fare_modes[line["#src_mode"]].add(line.mode.id)
        else:
            fare_modes = None
        spec_dir = os.path.join(
            self.get_abs_path(
                os.path.dirname(self.controller.config.emme.project_path)
            ),
            "Specifications",
        )
        transit_classes = []
        for class_config in self.config.classes:
            transit_classes.append(
                TransitAssignmentClass(
                    class_config,
                    self.config,
                    time_period,
                    self.controller.iteration,
                    self._num_processors,
                    fare_modes,
                    spec_dir,
                )
            )
        return transit_classes

    def _run_ccr_assign(self, time_period: str) -> None:
        """Runs capacity constrained (??) CCR transit assignment for a time period + update penalties.

        Args:
            time_period: time period name
        """
        _duration = self.time_period_durations[time_period.lower()]
        _ccr_weights = self.config.ccr_weights
        _eawt_weights = self.config.eawt_weights
        _mode_config = {
            mode_config.mode_id: mode_config for mode_config in self.config.modes
        }
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        transit_classes = self._transit_classes(time_period)

        assign_transit = self.controller.emme_manager.tool(
            "inro.emme.transit_assignment.capacitated_transit_assignment"
        )
        _tclass_specs = [tclass.emme_transit_spec for tclass in transit_classes]
        _tclass_names = [tclass.name for tclass in transit_classes]

        _cost_func = {
            "segment": {
                "type": "CUSTOM",
                "python_function": func_returns_crowded_segment_cost(
                    _duration, _ccr_weights
                ),
                "congestion_attribute": "us3",
                "orig_func": False,
            },
            "headway": {
                "type": "CUSTOM",
                "python_function": func_returns_calc_updated_perceived_headway(
                    _duration,
                    _eawt_weights,
                    _mode_config,
                    use_fares=self.config.use_fares,
                )
                + "\n"
                + textwrap.dedent(inspect.getsource(calc_extra_wait_time))
                + "\n"
                + textwrap.dedent(inspect.getsource(calc_adjusted_headway))
                + "\n"
                + textwrap.dedent(inspect.getsource(calc_total_offs))
                + "\n"
                + textwrap.dedent(inspect.getsource(calc_offs_thru_segment)),
            },
            "assignment_period": _duration,
        }

        _stop_criteria = {
            "max_iterations": self.config.ccr_stop_criteria.max_iterations,
            "relative_difference": self.config.ccr_stop_criteria.relative_difference,
            "percent_segments_over_capacity": self.config.ccr_stop_criteria.percent_segments_over_capacity,
        }
        add_volumes = False
        assign_transit(
            _tclass_specs,
            congestion_function=_cost_func,
            stopping_criteria=_stop_criteria,
            class_names=_tclass_names,
            scenario=_emme_scenario,
            log_worksheets=False,
        )
        add_volumes = True

        # question - why do we need to do this between iterations AND ALSO give it to the EMME cost function? Does EMME not use it?
        self._calc_segment_ccr_penalties(time_period)

    def _run_congested_assign(self, time_period: str) -> None:
        """Runs congested transit assignment for a time period.

        Args:
            time_period: time period name
        """
        _duration = self.time_period_durations[time_period.lower()]
        _congested_weights = self.config.congested_weights
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        transit_classes = self._transit_classes(time_period)

        assign_transit = self.controller.emme_manager.tool(
            "inro.emme.transit_assignment.congested_transit_assignment"
        )
        _tclass_specs = [tclass.emme_transit_spec for tclass in transit_classes]
        _tclass_names = [tclass.name for tclass in transit_classes]

        _cost_func = {
            "type": "CUSTOM",
            "python_function": func_returns_segment_congestion(
                _duration,
                _emme_scenario,
                _congested_weights,
                use_fares=self.config.use_fares,
            ),
            "congestion_attribute": "us3",
            "orig_func": False,
            "assignment_period": _duration,
        }

        stop_criteria_settings = self.config.congested.stop_criteria
        # get the corresponding stop criteria for the global iteration
        _stop_criteria = None
        for item in stop_criteria_settings:
            if item["global_iteration"] == self.controller.iteration:
                _stop_criteria = {
                    "max_iterations": [
                        time.max_iteration
                        for time in item.max_iterations
                        if time.time_period.lower() == time_period.lower()
                    ][0],
                    "normalized_gap": item.normalized_gap,
                    "relative_gap": item.relative_gap,
                }
        if _stop_criteria is None:
            raise ValueError(
                f"transit.congested.stop_criteria: Must specifify stop criteria for global iteration {self.controller.iteration}"
            )
        add_volumes = False
        assign_transit(
            _tclass_specs,
            congestion_function=_cost_func,
            stopping_criteria=_stop_criteria,
            class_names=_tclass_names,
            scenario=_emme_scenario,
            log_worksheets=False,
        )
        add_volumes = True

    def _run_extended_assign(self, time_period: str) -> None:
        """Run transit assignment without CCR.

        Args:
            time_period (_type_): time period name
        """
        assign_transit = self.controller.emme_manager.modeller.tool(
            "inro.emme.transit_assignment.extended_transit_assignment"
        )
        _emme_scenario = self.transit_emmebank.scenario(time_period)

        # Question for INRO: Why are we only adding subsequent volumes shouldn't it assume to be
        #   zero to begin with?
        # Question for INRO: Can this function be distributed across machines? If so, how would
        #   that be structured?
        add_volumes = False
        for tclass in self._transit_classes(time_period):
            assign_transit(
                tclass.emme_transit_spec,
                class_name=tclass.name,
                add_volumes=add_volumes,
                scenario=_emme_scenario,
            )
            add_volumes = True

    def _get_network_with_boardings(
        self, emme_scenario: "EmmeScenario"
    ) -> "EmmeNetwork":
        """Get networkw ith transit boardings by line and segment.

        Args:
            emme_scenario (_type_):

        Returns:
            EmmeNetwork: with transit boardings by line and segment.
        """
        network = emme_scenario.get_partial_network(
            ["TRANSIT_LINE", "TRANSIT_SEGMENT"], include_attributes=False
        )
        _attributes = {
            "TRANSIT_LINE": ["description", "#src_mode"],
            "TRANSIT_SEGMENT": ["transit_boardings"],
        }
        _emme_manager = self.controller.emme_manager
        _emme_manager.copy_attribute_values(emme_scenario, network, _attributes)
        return network

    def _export_boardings_by_line(self, time_period: str) -> None:
        """Export total boardings by line to config.transit.output_transit_boardings_file.

        args:
            time_period (str): time period abbreviation
        """
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        network = _emme_scenario.get_network()

        output_transit_boardings_file = self.get_abs_path(
            self.config.output_transit_boardings_path
        )

        os.makedirs(os.path.dirname(output_transit_boardings_file), exist_ok=True)

        with open(
            output_transit_boardings_file.format(period=time_period.lower()),
            "w",
            encoding="utf8",
        ) as out_file:
            out_file.write(
                ",".join(
                    [
                        "line_name",
                        "description",
                        "total_boarding",
                        "total_hour_cap",
                        "tm2_mode",
                        "line_mode",
                        "headway",
                        "fare_system",
                    ]
                )
            )
            out_file.write("\n")
            for line in network.transit_lines():
                boardings = 0
                capacity = line.vehicle.total_capacity
                hdw = line.headway
                line_hour_cap = 60 * capacity / hdw
                if self.config.use_fares:
                    mode = line["#src_mode"]
                else:
                    mode = line.mode
                for segment in line.segments(include_hidden=True):
                    boardings += segment.transit_boardings
                out_file.write(
                    ",".join(
                        [
                            str(x)
                            for x in [
                                line.id,
                                line["#description"],
                                boardings,
                                line_hour_cap,
                                line["#mode"],
                                mode,
                                line.headway,
                                line["#faresystem"],
                            ]
                        ]
                    )
                )
                out_file.write("\n")

    def _calc_connector_flows(
        self, time_period: str
    ) -> Tuple["EmmeNetwork", Dict[str, str]]:
        """Calculate boardings and alightings by assignment class.

        args:
            time_period (str): time period abbreviation

        returns:
            EmmeNetwork with aux_transit_volumes
            transit class stop attributes: {<transit_class_name>: @aux_volume_<transit_class_name>...}
        """
        _emme_manager = self.controller.emme_manager
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        network_results = _emme_manager.tool(
            "inro.emme.transit_assignment.extended.network_results"
        )
        create_extra = _emme_manager.tool(
            "inro.emme.data.extra_attribute.create_extra_attribute"
        )
        tclass_stop_attrs = {}
        for tclass in self.config.classes:
            attr_name = f"@aux_vol_{tclass.name}".lower()  # maximum length 20 limit
            create_extra("LINK", attr_name, overwrite=True, scenario=_emme_scenario)
            spec = {
                "type": "EXTENDED_TRANSIT_NETWORK_RESULTS",
                "on_links": {"aux_transit_volumes": attr_name},
            }
            network_results(spec, class_name=tclass.name, scenario=_emme_scenario)
            tclass_stop_attrs[tclass.name] = attr_name

        # optimization: partial network to only load links and certain attributes
        network = _emme_scenario.get_partial_network(["LINK"], include_attributes=True)
        attributes = {
            "LINK": tclass_stop_attrs.values(),
            "NODE": ["@taz_id", "#node_id"],
        }
        _emme_manager.copy_attribute_values(_emme_scenario, network, attributes)
        return network, tclass_stop_attrs

    def _export_connector_flows(
        self, network: EmmeNetwork, class_stop_attrs: Dict[str, str], time_period: str
    ):
        """Export boardings and alightings by assignment class, stop(connector) and TAZ.

        args:
            network: network to use
            class_stop_attrs: list of attributes to export
        """
        path_tmplt = self.get_abs_path(self.config.output_stop_usage_path)
        os.makedirs(os.path.dirname(path_tmplt), exist_ok=True)
        with open(
            path_tmplt.format(period=time_period.lower()), "w", encoding="utf8"
        ) as out_file:
            out_file.write(",".join(["mode", "taz", "stop", "boardings", "alightings"]))
            out_file.write("\n")
            for zone in network.centroids():
                taz_id = int(zone["@taz_id"])
                for link in zone.outgoing_links():
                    stop_id = link.j_node["#node_id"]
                    for name, attr_name in class_stop_attrs.items():
                        alightings = (
                            link.reverse_link[attr_name] if link.reverse_link else 0.0
                        )
                        out_file.write(
                            f"{name}, {taz_id}, {stop_id}, {link[attr_name]}, {alightings}\n"
                        )
                for link in zone.incoming_links():
                    if link.reverse_link:  # already exported
                        continue
                    stop_id = link.i_node["#node_id"]
                    for name, attr_name in class_stop_attrs.items():
                        out_file.write(
                            f"{name}, {taz_id}, {stop_id}, 0.0, {link[attr_name]}\n"
                        )

    def _export_transit_segment(self, time_period: str):
        # add total boardings by access mode
        _emme_manager = self.controller.emme_manager
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        network_results = _emme_manager.tool(
            "inro.emme.transit_assignment.extended.network_results"
        )
        create_extra = _emme_manager.tool(
            "inro.emme.data.extra_attribute.create_extra_attribute"
        )
        for tclass in self.config.classes:
            initial_board_attr_name = f"@iboard_{tclass.name}".lower()
            direct_xboard_attr_name = f"@dboard_{tclass.name}".lower()
            auxiliary_xboard_attr_name = f"@aboard_{tclass.name}".lower()
            create_extra(
                "TRANSIT_SEGMENT",
                initial_board_attr_name,
                overwrite=True,
                scenario=_emme_scenario,
            )
            create_extra(
                "TRANSIT_SEGMENT",
                direct_xboard_attr_name,
                overwrite=True,
                scenario=_emme_scenario,
            )
            create_extra(
                "TRANSIT_SEGMENT",
                auxiliary_xboard_attr_name,
                overwrite=True,
                scenario=_emme_scenario,
            )
            spec = {
                "type": "EXTENDED_TRANSIT_NETWORK_RESULTS",
                "on_segments": {
                    "initial_boardings": initial_board_attr_name,
                    "transfer_boardings_direct": direct_xboard_attr_name,
                    "transfer_boardings_indirect": auxiliary_xboard_attr_name,
                },
            }
            network_results(spec, class_name=tclass.name, scenario=_emme_scenario)

        network = _emme_scenario.get_network()
        path_boardings = self.get_abs_path(self.config.output_transit_segment_path)
        with open(path_boardings.format(period=time_period.lower()), "w") as f:
            f.write(
                ",".join(
                    [
                        "line",
                        "stop_name",
                        "i_node",
                        "j_node",
                        "dwt",
                        "ttf",
                        "voltr",
                        "board",
                        "con_time",
                        "uncon_time",
                        "mode",
                        "src_mode",
                        "mdesc",
                        "hdw",
                        "orig_hdw",
                        "speed",
                        "vauteq",
                        "vcaps",
                        "vcapt",
                        "initial_board_ptw",
                        "initial_board_wtp",
                        "initial_board_ktw",
                        "initial_board_wtk",
                        "initial_board_wtw",
                        "direct_transfer_board_ptw",
                        "direct_transfer_board_wtp",
                        "direct_transfer_board_ktw",
                        "direct_transfer_board_wtk",
                        "direct_transfer_board_wtw",
                        "auxiliary_transfer_board_ptw",
                        "auxiliary_transfer_board_wtp",
                        "auxiliary_transfer_board_ktw",
                        "auxiliary_transfer_board_wtk",
                        "auxiliary_transfer_board_wtw",
                    ]
                )
            )
            f.write("\n")

            for line in network.transit_lines():
                for segment in line.segments(include_hidden=True):
                    if self.config.use_fares:
                        mode = segment.line["#src_mode"]
                    else:
                        mode = segment.line.mode
                    if self.config.congested.use_peaking_factor and (
                        time_period.lower() in ["am", "pm"]
                    ):
                        orig_headway = segment.line["@orig_hdw"]
                    else:
                        orig_headway = segment.line.headway
                    f.write(
                        ",".join(
                            [
                                str(x)
                                for x in [
                                    segment.id,
                                    '"{0}"'.format(segment["#stop_name"]),
                                    segment.i_node,
                                    segment.j_node,
                                    segment.dwell_time,
                                    segment.transit_time_func,
                                    segment.transit_volume,
                                    segment.transit_boardings,
                                    segment.transit_time,
                                    segment["@trantime_seg"],
                                    segment.line.mode,
                                    mode,
                                    segment.line.mode.description,
                                    segment.line.headway,
                                    orig_headway,
                                    segment.line.speed,
                                    segment.line.vehicle.auto_equivalent,
                                    segment.line.vehicle.seated_capacity,
                                    segment.line.vehicle.total_capacity,
                                    segment["@iboard_pnr_trn_wlk"],
                                    segment["@iboard_wlk_trn_pnr"],
                                    segment["@iboard_knr_trn_wlk"],
                                    segment["@iboard_wlk_trn_knr"],
                                    segment["@iboard_wlk_trn_wlk"],
                                    segment["@dboard_pnr_trn_wlk"],
                                    segment["@dboard_wlk_trn_pnr"],
                                    segment["@dboard_knr_trn_wlk"],
                                    segment["@dboard_wlk_trn_knr"],
                                    segment["@dboard_wlk_trn_wlk"],
                                    segment["@aboard_pnr_trn_wlk"],
                                    segment["@aboard_wlk_trn_pnr"],
                                    segment["@aboard_knr_trn_wlk"],
                                    segment["@aboard_wlk_trn_knr"],
                                    segment["@aboard_wlk_trn_wlk"],
                                ]
                            ]
                        )
                    )
                    f.write("\n")

    def _export_boardings_by_station(self, time_period: str):
        _emme_manager = self.controller.emme_manager
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        network = _emme_scenario.get_network()
        sta2sta = _emme_manager.tool(
            "inro.emme.transit_assignment.extended.station_to_station_analysis"
        )
        sta2sta_spec = {
            "type": "EXTENDED_TRANSIT_STATION_TO_STATION_ANALYSIS",
            "transit_line_selections": {
                "first_boarding": "mode=h",
                "last_alighting": "mode=h",
            },
            "analyzed_demand": None,
        }

        # map to used modes in apply fares case
        fare_modes = _defaultdict(lambda: set([]))
        for line in network.transit_lines():
            if self.config.use_fares:
                fare_modes[line["#src_mode"]].add(line.mode.id)
            else:
                fare_modes[line.mode.id].add(line.mode.id)

        operator_dict = {
            # mode: network_selection
            "bart": "h",
            "caltrain": "r",
        }

        for tclass in self.config.classes:
            for op, cut in operator_dict.items():
                demand_matrix = "mfTRN_%s_%s" % (tclass.name, time_period)
                output_file_name = self.get_abs_path(
                    self.config.output_station_to_station_flow_path
                )

                sta2sta_spec["transit_line_selections"][
                    "first_boarding"
                ] = "mode=" + ",".join(list(fare_modes[cut]))
                sta2sta_spec["transit_line_selections"][
                    "last_alighting"
                ] = "mode=" + ",".join(list(fare_modes[cut]))
                sta2sta_spec["analyzed_demand"] = demand_matrix

                output_path = output_file_name.format(
                    operator=op, tclass=tclass.name, period=time_period.lower()
                )
                sta2sta(
                    specification=sta2sta_spec,
                    output_file=output_path,
                    scenario=_emme_scenario,
                    append_to_output_file=False,
                    class_name=tclass.name,
                )

    def _export_transfer_at_stops(self, time_period: str):
        _emme_manager = self.controller.emme_manager
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        network = _emme_scenario.get_network()
        transfers_at_stops = _emme_manager.tool(
            "inro.emme.transit_assignment.extended.apps.transfers_at_stops"
        )

        stop_location = self.config.output_transfer_at_station_node_ids
        stop_location_val_key = {val: key for key, val in stop_location.items()}

        for node in network.nodes():
            if stop_location_val_key.get(node["#node_id"]):
                stop_location[stop_location_val_key[node["#node_id"]]] = node.id

        for tclass in self.config.classes:
            for stop_name, stop_id in stop_location.items():
                demand_matrix = "mfTRN_%s_%s" % (tclass.name, time_period)
                output_file_name = self.get_abs_path(
                    self.config.output_transfer_at_station_path
                )
                output_path = output_file_name.format(
                    tclass=tclass.name, stop=stop_name, period=time_period.lower()
                )

                transfers_at_stops(
                    selection=f"i={stop_id}",
                    export_path=output_path,
                    scenario=_emme_scenario,
                    class_name=tclass.name,
                    analyzed_demand=demand_matrix,
                )

    def _add_ccr_vars_to_scenario(self, emme_scenario: "EmmeScenario") -> None:
        """Add Extra Added Wait Time and Capacity Penalty to emme scenario.

        Args:
            emme_scenario : EmmeScenario
        """
        create_extra = self.controller.emme_manager.tool(
            "inro.emme.data.extra_attribute.create_extra_attribute"
        )
        create_extra(
            "TRANSIT_SEGMENT",
            "@eawt",
            "extra added wait time",
            overwrite=True,
            scenario=emme_scenario,
        )
        create_extra(
            "TRANSIT_SEGMENT",
            "@capacity_penalty",
            "capacity penalty at boarding",
            overwrite=True,
            scenario=emme_scenario,
        )

    def _get_network_with_ccr_scenario_attributes(self, emme_scenario):
        self._add_ccr_vars_to_scenario(emme_scenario)

        _attributes = {
            "TRANSIT_SEGMENT": [
                "@phdwy",
                "transit_volume",
                "transit_boardings",
            ],
            "TRANSIT_VEHICLE": ["seated_capacity", "total_capacity"],
            "TRANSIT_LINE": ["headway"],
        }
        if self.config.use_fares:
            _attributes["TRANSIT_LINE"].append("#src_mode")

        # load network object from scenario (on disk) and copy some attributes
        network = emme_scenario.get_partial_network(
            ["TRANSIT_SEGMENT"], include_attributes=False
        )
        network.create_attribute("TRANSIT_LINE", "capacity")

        self.emme_manager.copy_attribute_values(emme_scenario, network, _attributes)
        return network

    def _calc_segment_ccr_penalties(self, time_period):
        """Calculate extra average wait time (@eawt) and @capacity_penalty on the segments.

        TODO: INRO Please document


        """
        _emme_scenario = self.transit_emmebank.scenario(time_period)
        _network = self._get_network_with_ccr_scenario_attributes(_emme_scenario)

        _eawt_weights = self.config.eawt_weights
        _mode_config = {
            mode_config.mode_id: mode_config for mode_config in self.config.modes
        }

        _duration = self.time_period_durations[time_period.lower()]
        for line in _network.transit_lines():
            line.capacity = time_period_capacity(
                line.vehicle.total_capacity, line.headway, _duration
            )

        # QUESTION: document origin of this param.
        _hdwy_fraction = 0.5  # fixed in assignment spec
        for segment in _network.transit_segments():
            segment["@eawt"] = calc_extra_wait_time(
                segment,
                segment.line.capacity,
                _eawt_weights,
                _mode_config,
                use_fares=self.config.use_fares,
            )
            segment["@capacity_penalty"] = (
                max(segment["@phdwy"] - segment["@eawt"] - segment.line.headway, 0)
                * _hdwy_fraction
            )
        # copy (save) results back from the network to the scenario (on disk)
        _ccr_attributes = {"TRANSIT_SEGMENT": ["@eawt", "@capacity_penalty"]}
        self.emme_manager.copy_attribute_values(
            _network, _emme_scenario, _ccr_attributes
        )

__init__(controller)

Constructor for TransitAssignment.

Parameters:

Name Type Description Default
controller 'RunController'

RunController object.

required
Source code in tm2py\components\network\transit\transit_assign.py
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
def __init__(self, controller: "RunController"):
    """Constructor for TransitAssignment.

    Args:
        controller: RunController object.
    """
    super().__init__(controller)
    self.config = self.controller.config.transit
    self.sub_components = {
        "prepare transit demand": PrepareTransitDemand(controller),
    }
    self.transit_network = PrepareTransitNetwork(controller)
    self._demand_matrix = None  # FIXME
    self._num_processors = self.controller.emme_manager.num_processors
    self._time_period = None
    self._scenario = None
    self._transit_emmebank = None

run()

Run transit assignments.

Source code in tm2py\components\network\transit\transit_assign.py
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
@LogStartEnd("Transit assignments")
def run(self):
    """Run transit assignments."""

    if self.controller.iteration == 0:
        self.transit_emmebank.zero_matrix
        if self.controller.config.warmstart.warmstart:
            if self.controller.config.warmstart.use_warmstart_demand:
                self.sub_components["prepare transit demand"].run()
        else:
            # give error message to user about not warmstarting transit
            raise Exception(
                f"ERROR: transit has to be warmstarted, please either specify use_warmstart_skim or use_warmstart_demand"
            )
    else:
        self.sub_components["prepare transit demand"].run()

    for time_period in self.time_period_names:
        # update auto times
        print("updating auto time in transit network")
        self.transit_network.update_auto_times(time_period)

        if self.controller.iteration == 0:
            # iteration = 0 : run uncongested transit assignment
            use_ccr = False
            congested_transit_assignment = False
            print("running uncongested transit assignment with warmstart demand")
            self.run_transit_assign(
                time_period, use_ccr, congested_transit_assignment
            )
        elif (self.controller.iteration == 1) & (self.controller.config.warmstart.use_warmstart_skim):
            # iteration = 1 and use_warmstart_skim = True : run uncongested transit assignment
            use_ccr = False
            congested_transit_assignment = False
            self.run_transit_assign(
                time_period, use_ccr, congested_transit_assignment
            )               
        else:
            # iteration >= 1 and use_warmstart_skim = False : run congested transit assignment
            use_ccr = self.config.use_ccr
            if time_period in ["EA", "EV", "MD"]:
                congested_transit_assignment = False
            else:
                congested_transit_assignment = (
                    self.config.congested_transit_assignment
                )

            self.run_transit_assign(
                time_period, use_ccr, congested_transit_assignment
            )

        # output_summaries
        if self.config.output_stop_usage_path is not None:
            network, class_stop_attrs = self._calc_connector_flows(time_period)
            self._export_connector_flows(network, class_stop_attrs, time_period)
        if self.config.output_transit_boardings_path is not None:
            self._export_boardings_by_line(time_period)
        if self.config.output_transit_segment_path is not None:
            self._export_transit_segment(time_period)
        if self.config.output_station_to_station_flow_path is not None:
            self._export_boardings_by_station(time_period)
        if self.config.output_transfer_at_station_path is not None:
            self._export_transfer_at_stops(time_period)

validate_inputs()

Validate the inputs.

Source code in tm2py\components\network\transit\transit_assign.py
422
423
def validate_inputs(self):
    """Validate the inputs."""

TransitAssignmentClass

Transit assignment class, represents data from config and conversion to Emme specs.

Internal properties
Source code in tm2py\components\network\transit\transit_assign.py
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
class TransitAssignmentClass:
    """Transit assignment class, represents data from config and conversion to Emme specs.

    Internal properties:
        _name: the class name loaded from config (not to be changed)
        _class_config: the transit class config (TransitClassConfig)
        _transit_config: the root transit assignment config (TransitConfig)
        _time_period: the time period name
        _iteration: the current iteration
        _num_processors: the number of processors to use, loaded from config
        _fare_modes: the mapping from the generated fare mode ID to the original
            source mode ID
        _spec_dir: directory to find the generated journey levels tables from
            the apply fares step
    """

    # disable too many instance attributes and arguments recommendations
    # pylint: disable=R0902, R0913

    def __init__(
        self,
        tclass_config: TransitClassConfig,
        config: TransitConfig,
        time_period: str,
        iteration: int,
        num_processors: int,
        fare_modes: Dict[str, Set[str]],
        spec_dir: str,
    ):
        """Assignment class constructor.

        Args:
            tclass_config: the transit class config (TransitClassConfig)
            config: the root transit assignment config (TransitConfig)
            time_period: the time period name
            iteration: the current iteration
            num_processors: the number of processors to use, loaded from config
            fare_modes: the mapping from the generated fare mode ID to the original
                source mode ID
            spec_dir: directory to find the generated journey levels tables from
                the apply fares step
        """
        self._name = tclass_config.name
        self._class_config = tclass_config
        self._config = config
        self._time_period = time_period
        self._iteration = iteration
        self._num_processors = num_processors
        self._fare_modes = fare_modes
        self._spec_dir = spec_dir

    @property
    def name(self) -> str:
        """The class name."""
        return self._name

    @property
    def emme_transit_spec(self) -> EmmeTransitSpec:
        """Return Emme Extended transit assignment specification.

        Converted from input config (transit.classes, with some parameters from
        transit table), see also Emme Help for
        Extended transit assignment for specification details.

        """
        spec = {
            "type": "EXTENDED_TRANSIT_ASSIGNMENT",
            "modes": self._modes,
            "demand": self._demand_matrix,
            "waiting_time": {
                "effective_headways": self._config.effective_headway_source,
                "headway_fraction": "@hdw_fraction",
                "perception_factor": self._config.initial_wait_perception_factor,
                "spread_factor": 1.0,
            },
            "boarding_cost": {"global": {"penalty": 0, "perception_factor": 1}},
            "boarding_time": {
                "on_lines": {
                    "penalty": "@iboard_penalty",
                    "perception_factor": 1,
                }
            },
            "in_vehicle_cost": None,
            "in_vehicle_time": {"perception_factor": "@invehicle_factor"},
            "aux_transit_time": {
                "perception_factor": 1
            },  # walk and drive perception factors are specified in mode definition "speed_or_time_factor"
            "aux_transit_cost": None,
            "journey_levels": self._journey_levels,
            "flow_distribution_between_lines": {"consider_total_impedance": True},
            "flow_distribution_at_origins": {
                "fixed_proportions_on_connectors": None,
                "choices_at_origins": "OPTIMAL_STRATEGY",
            },
            "flow_distribution_at_regular_nodes_with_aux_transit_choices": {
                "choices_at_regular_nodes": "OPTIMAL_STRATEGY"
            },
            "circular_lines": {"stay": False},
            "connector_to_connector_path_prohibition": None,
            "od_results": {"total_impedance": None},
            "performance_settings": {"number_of_processors": self._num_processors},
        }
        if self._config.use_fares:
            fare_perception = 60 / self._config.value_of_time
            spec["boarding_cost"] = {
                "on_segments": {
                    "penalty": "@board_cost",
                    "perception_factor": fare_perception,
                }
            }
            spec["in_vehicle_cost"] = {
                "penalty": "@invehicle_cost",
                "perception_factor": fare_perception,
            }
        # Optional aux_transit_cost, used for walk time on connectors,
        #          set if override_connector_times is on
        if self._config.get("override_connector_times", False):
            spec["aux_transit_cost"] = {
                "penalty": f"@walk_time_{self.name.lower()}",
                "perception_factor": self._config.walk_perception_factor,
            }
        return spec

    @property
    def _demand_matrix(self) -> str:
        return f'mf"TRN_{self._class_config.skim_set_id}_{self._time_period}"'

    def _get_used_mode_ids(self, modes: List[TransitModeConfig]) -> List[str]:
        """Get list of assignment Mode IDs from input list of Emme mode objects.

        Accounts for fare table (mapping from input mode ID to auto-generated
        set of mode IDs for fare transition table (fares.far input) by applyfares
        component.
        """
        if self._config.use_fares:
            out_modes = set([])
            for mode in modes:
                if mode.assign_type == "TRANSIT":
                    out_modes.update(self._fare_modes[mode.mode_id])
                else:
                    out_modes.add(mode.mode_id)
            return list(out_modes)
        return [mode.mode_id for mode in modes]

    @property
    def _modes(self) -> List[str]:
        """List of modes IDs (str) to use in assignment for this class."""
        all_modes = self._config.modes
        mode_types = self._class_config.mode_types
        modes = [mode for mode in all_modes if mode.type in mode_types]
        return self._get_used_mode_ids(modes)

    @property
    def _transit_modes(self) -> List[str]:
        """List of transit modes IDs (str) to use in assignment for this class."""
        all_modes = self._config.modes
        mode_types = self._class_config.mode_types
        modes = [
            mode
            for mode in all_modes
            if mode.type in mode_types and mode.assign_type == "TRANSIT"
        ]
        return self._get_used_mode_ids(modes)

    @property
    def fare_perception(self):
        return 60 / self._config.value_of_time

    @property
    def headway_fraction(self):
        return 0.5

    @property
    def _journey_levels(self) -> EmmeTransitJourneyLevelSpec:
        modes = self._transit_modes
        effective_headway_source = self._config.effective_headway_source
        if self._config.use_fares:
            fare_perception = self.fare_perception
            file_name = f"{self._time_period}_ALLPEN_journey_levels.ems"
            with open(
                os.path.join(self._spec_dir, file_name), "r", encoding="utf8"
            ) as jl_spec:
                journey_levels = _json.load(jl_spec)["journey_levels"]

            if self.name == "PNR_TRN_WLK":
                new_journey_levels = copy.deepcopy(journey_levels)

                for i in range(0, len(new_journey_levels)):
                    jls = new_journey_levels[i]
                    for level in jls["transition_rules"]:
                        level["next_journey_level"] = level["next_journey_level"] + 1
                    jls["transition_rules"].extend(
                        [
                            {"mode": "e", "next_journey_level": i + 2},
                            {
                                "mode": "D",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                            {"mode": "w", "next_journey_level": i + 2},
                            {
                                "mode": "p",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                        ]
                    )
                # level 0: drive access
                transition_rules_drive_access = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_drive_access:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_drive_access.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {"mode": "D", "next_journey_level": 0},
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {"mode": "p", "next_journey_level": 1},
                    ]
                )
                # level 1: use transit
                transition_rules_pnr = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_pnr:
                    level["next_journey_level"] = 2
                transition_rules_pnr.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {"mode": "p", "next_journey_level": 1},
                    ]
                )
                # level len(new_journey_levels)+2: every mode is prohibited
                transition_rules_prohibit = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_prohibit:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_prohibit.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "p",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                new_journey_levels.insert(
                    0,
                    {
                        "description": "drive access",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_drive_access,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                new_journey_levels.insert(
                    1,
                    {
                        "description": "pnr",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_pnr,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                new_journey_levels.append(
                    {
                        "description": "prohibit",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_prohibit,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    }
                )
                for level in new_journey_levels[2:-1]:
                    level["waiting_time"] = {
                        "headway_fraction": "@hdw_fraction",
                        "effective_headways": effective_headway_source,
                        "spread_factor": 1,
                        "perception_factor": "@wait_pfactor",
                    }
                    level["boarding_time"] = {
                        "on_lines": {
                            "penalty": "@xboard_penalty",
                            "perception_factor": 1,
                        },
                        "at_nodes": {
                            "penalty": "@xboard_nodepen",
                            "perception_factor": 1,
                        },
                    }
                # add in the correct value of time parameter
                for level in new_journey_levels:
                    if level["boarding_cost"]:
                        level["boarding_cost"]["on_segments"][
                            "perception_factor"
                        ] = fare_perception

            elif self.name == "WLK_TRN_PNR":
                new_journey_levels = copy.deepcopy(journey_levels)

                for i in range(0, len(new_journey_levels)):
                    jls = new_journey_levels[i]
                    jls["destinations_reachable"] = False
                    jls["transition_rules"].extend(
                        [
                            {
                                "mode": "a",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                            {
                                "mode": "D",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                            {"mode": "w", "next_journey_level": i + 1},
                            {
                                "mode": "p",
                                "next_journey_level": len(new_journey_levels) + 1,
                            },
                        ]
                    )
                # level 0: walk access
                transition_rules_walk_access = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_walk_access:
                    level["next_journey_level"] = 1
                transition_rules_walk_access.extend(
                    [
                        {"mode": "a", "next_journey_level": 0},
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "p",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                # level len(new_journey_levels)+1: drive home
                transition_rules_drive_home = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_drive_home:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_drive_home.extend(
                    [
                        {
                            "mode": "a",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 1,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "p",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                # level len(new_journey_levels)+2: every mode is prohibited
                transition_rules_prohibit = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_prohibit:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_prohibit.extend(
                    [
                        {
                            "mode": "a",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "p",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                new_journey_levels.insert(
                    0,
                    {
                        "description": "walk access",
                        "destinations_reachable": True,
                        "transition_rules": transition_rules_walk_access,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                new_journey_levels.append(
                    {
                        "description": "drive home",
                        "destinations_reachable": True,
                        "transition_rules": transition_rules_drive_home,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    }
                )
                new_journey_levels.append(
                    {
                        "description": "prohibit",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_prohibit,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    }
                )
                for level in new_journey_levels[1:-2]:
                    level["waiting_time"] = {
                        "headway_fraction": "@hdw_fraction",
                        "effective_headways": effective_headway_source,
                        "spread_factor": 1,
                        "perception_factor": "@wait_pfactor",
                    }
                    level["boarding_time"] = {
                        "on_lines": {
                            "penalty": "@xboard_penalty",
                            "perception_factor": 1,
                        },
                        "at_nodes": {
                            "penalty": "@xboard_nodepen",
                            "perception_factor": 1,
                        },
                    }
                # add in the correct value of time parameter
                for level in new_journey_levels:
                    if level["boarding_cost"]:
                        level["boarding_cost"]["on_segments"][
                            "perception_factor"
                        ] = fare_perception

            elif self.name == "KNR_TRN_WLK":
                new_journey_levels = copy.deepcopy(journey_levels)

                for i in range(0, len(new_journey_levels)):
                    jls = new_journey_levels[i]
                    for level in jls["transition_rules"]:
                        level["next_journey_level"] = level["next_journey_level"] + 1
                    jls["transition_rules"].extend(
                        [
                            {"mode": "e", "next_journey_level": i + 2},
                            {
                                "mode": "D",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                            {"mode": "w", "next_journey_level": i + 2},
                            {
                                "mode": "k",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                        ]
                    )
                # level 0: drive access
                transition_rules_drive_access = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_drive_access:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_drive_access.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {"mode": "D", "next_journey_level": 0},
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {"mode": "k", "next_journey_level": 1},
                    ]
                )
                # level 1: use transit
                transition_rules_knr = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_knr:
                    level["next_journey_level"] = 2
                transition_rules_knr.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {"mode": "k", "next_journey_level": 1},
                    ]
                )
                # level len(new_journey_levels)+2: every mode is prohibited
                transition_rules_prohibit = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_prohibit:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_prohibit.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "k",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                new_journey_levels.insert(
                    0,
                    {
                        "description": "drive access",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_drive_access,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                new_journey_levels.insert(
                    1,
                    {
                        "description": "knr",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_knr,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                new_journey_levels.append(
                    {
                        "description": "prohibit",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_prohibit,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    }
                )
                for level in new_journey_levels[2:-1]:
                    level["waiting_time"] = {
                        "headway_fraction": "@hdw_fraction",
                        "effective_headways": effective_headway_source,
                        "spread_factor": 1,
                        "perception_factor": "@wait_pfactor",
                    }
                    level["boarding_time"] = {
                        "on_lines": {
                            "penalty": "@xboard_penalty",
                            "perception_factor": 1,
                        },
                        "at_nodes": {
                            "penalty": "@xboard_nodepen",
                            "perception_factor": 1,
                        },
                    }
                # add in the correct value of time parameter
                for level in new_journey_levels:
                    if level["boarding_cost"]:
                        level["boarding_cost"]["on_segments"][
                            "perception_factor"
                        ] = fare_perception

            elif self.name == "WLK_TRN_KNR":
                new_journey_levels = copy.deepcopy(journey_levels)

                for i in range(0, len(new_journey_levels)):
                    jls = new_journey_levels[i]
                    jls["destinations_reachable"] = False
                    jls["transition_rules"].extend(
                        [
                            {
                                "mode": "a",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                            {
                                "mode": "D",
                                "next_journey_level": len(new_journey_levels) + 2,
                            },
                            {"mode": "w", "next_journey_level": i + 1},
                            {
                                "mode": "k",
                                "next_journey_level": len(new_journey_levels) + 1,
                            },
                        ]
                    )
                # level 0: walk access
                transition_rules_walk_access = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_walk_access:
                    level["next_journey_level"] = 1
                transition_rules_walk_access.extend(
                    [
                        {"mode": "a", "next_journey_level": 0},
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "k",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                # level len(new_journey_levels)+1: drive home
                transition_rules_drive_home = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_drive_home:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_drive_home.extend(
                    [
                        {
                            "mode": "a",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 1,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "k",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                # level len(new_journey_levels)+2: every mode is prohibited
                transition_rules_prohibit = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                for level in transition_rules_prohibit:
                    level["next_journey_level"] = len(new_journey_levels) + 2
                transition_rules_prohibit.extend(
                    [
                        {
                            "mode": "a",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "D",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                        {
                            "mode": "k",
                            "next_journey_level": len(new_journey_levels) + 2,
                        },
                    ]
                )
                new_journey_levels.insert(
                    0,
                    {
                        "description": "walk access",
                        "destinations_reachable": True,
                        "transition_rules": transition_rules_walk_access,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                new_journey_levels.append(
                    {
                        "description": "drive home",
                        "destinations_reachable": True,
                        "transition_rules": transition_rules_drive_home,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    }
                )
                new_journey_levels.append(
                    {
                        "description": "prohibit",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_prohibit,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    }
                )
                for level in new_journey_levels[1:-2]:
                    level["waiting_time"] = {
                        "headway_fraction": "@hdw_fraction",
                        "effective_headways": effective_headway_source,
                        "spread_factor": 1,
                        "perception_factor": "@wait_pfactor",
                    }
                    level["boarding_time"] = {
                        "on_lines": {
                            "penalty": "@xboard_penalty",
                            "perception_factor": 1,
                        },
                        "at_nodes": {
                            "penalty": "@xboard_nodepen",
                            "perception_factor": 1,
                        },
                    }
                # add in the correct value of time parameter
                for level in new_journey_levels:
                    if level["boarding_cost"]:
                        level["boarding_cost"]["on_segments"][
                            "perception_factor"
                        ] = fare_perception

            elif self.name == "WLK_TRN_WLK":
                new_journey_levels = copy.deepcopy(journey_levels)

                for i in range(0, len(new_journey_levels)):
                    jls = new_journey_levels[i]
                    jls["transition_rules"].extend(
                        [
                            {"mode": "e", "next_journey_level": i + 1},
                            {"mode": "w", "next_journey_level": i + 1},
                            {
                                "mode": "a",
                                "next_journey_level": i + 1,
                            },
                        ]
                    )
                # level 0: only allow walk access and walk auxilary
                # must use the trasit modes to get onto the next level,
                transition_rules_walk = copy.deepcopy(
                    journey_levels[0]["transition_rules"]
                )
                transition_rules_walk.extend(
                    [
                        {
                            "mode": "e",
                            "next_journey_level": 0,
                        },
                        {
                            "mode": "w",
                            "next_journey_level": 0,
                        },
                        {"mode": "a", "next_journey_level": 0},
                    ]
                )
                new_journey_levels.insert(
                    0,
                    {
                        "description": "base",
                        "destinations_reachable": False,
                        "transition_rules": transition_rules_walk,
                        "waiting_time": None,
                        "boarding_time": None,
                        "boarding_cost": None,
                    },
                )
                for level in new_journey_levels[1:]:
                    level["waiting_time"] = {
                        "headway_fraction": "@hdw_fraction",
                        "effective_headways": effective_headway_source,
                        "spread_factor": 1,
                        "perception_factor": "@wait_pfactor",
                    }
                    level["boarding_time"] = {
                        "on_lines": {
                            "penalty": "@xboard_penalty",
                            "perception_factor": 1,
                        },
                        "at_nodes": {
                            "penalty": "@xboard_nodepen",
                            "perception_factor": 1,
                        },
                    }
                # add in the correct value of time parameter
                for level in new_journey_levels:
                    if level["boarding_cost"]:
                        level["boarding_cost"]["on_segments"][
                            "perception_factor"
                        ] = fare_perception

            with open(
                os.path.join(
                    self._spec_dir,
                    "%s_%s_journey_levels.ems" % (self._time_period, self.name),
                ),
                "w",
            ) as jl_spec_file:
                spec = {
                    "type": "EXTENDED_TRANSIT_ASSIGNMENT",
                    "journey_levels": new_journey_levels,
                }
                _json.dump(spec, jl_spec_file, indent=4)

        else:
            new_journey_levels = [
                {
                    "description": "",
                    "destinations_reachable": True,
                    "transition_rules": [
                        {"mode": m, "next_journey_level": 1} for m in modes
                    ],
                },
                {
                    "description": "",
                    "destinations_reachable": True,
                    "transition_rules": [
                        {"mode": m, "next_journey_level": 1} for m in modes
                    ],
                    "waiting_time": {
                        "headway_fraction": "@hdw_fraction",
                        "effective_headways": effective_headway_source,
                        "spread_factor": 1,
                        "perception_factor": "@wait_pfactor",
                    },
                },
            ]
            for level in new_journey_levels[1:]:
                level["boarding_time"] = {
                    "on_lines": {"penalty": "@xboard_penalty", "perception_factor": 1},
                    "at_nodes": {"penalty": "@xboard_nodepen", "perception_factor": 1},
                }

        return new_journey_levels

emme_transit_spec property

Return Emme Extended transit assignment specification.

Converted from input config (transit.classes, with some parameters from transit table), see also Emme Help for Extended transit assignment for specification details.

name property

The class name.

__init__(tclass_config, config, time_period, iteration, num_processors, fare_modes, spec_dir)

Assignment class constructor.

Parameters:

Name Type Description Default
tclass_config TransitClassConfig

the transit class config (TransitClassConfig)

required
config TransitConfig

the root transit assignment config (TransitConfig)

required
time_period str

the time period name

required
iteration int

the current iteration

required
num_processors int

the number of processors to use, loaded from config

required
fare_modes Dict[str, Set[str]]

the mapping from the generated fare mode ID to the original source mode ID

required
spec_dir str

directory to find the generated journey levels tables from the apply fares step

required
Source code in tm2py\components\network\transit\transit_assign.py
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
def __init__(
    self,
    tclass_config: TransitClassConfig,
    config: TransitConfig,
    time_period: str,
    iteration: int,
    num_processors: int,
    fare_modes: Dict[str, Set[str]],
    spec_dir: str,
):
    """Assignment class constructor.

    Args:
        tclass_config: the transit class config (TransitClassConfig)
        config: the root transit assignment config (TransitConfig)
        time_period: the time period name
        iteration: the current iteration
        num_processors: the number of processors to use, loaded from config
        fare_modes: the mapping from the generated fare mode ID to the original
            source mode ID
        spec_dir: directory to find the generated journey levels tables from
            the apply fares step
    """
    self._name = tclass_config.name
    self._class_config = tclass_config
    self._config = config
    self._time_period = time_period
    self._iteration = iteration
    self._num_processors = num_processors
    self._fare_modes = fare_modes
    self._spec_dir = spec_dir

calc_adjusted_headway(segment, segment_capacity)

Headway adjusted based on ....?

TODO: add documentation about source and theory behind this.

Parameters:

Name Type Description Default
segment

Emme transit segment object

required
segment_capacity float

description

required

Returns:

Name Type Description
float float

Adjusted headway

Source code in tm2py\components\network\transit\transit_assign.py
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
def calc_adjusted_headway(segment, segment_capacity: float) -> float:
    """Headway adjusted based on ....?

    TODO: add documentation about source and theory behind this.

    Args:
        segment: Emme transit segment object
        segment_capacity (float): _description_

    Returns:
        float: Adjusted headway
    """
    # TODO add to params
    max_hdwy_growth = 1.5
    max_headway = 999.98
    # QUESTION FOR INRO: what is the difference between segment["@phdwy"] and line.headway?
    # is one the perceived headway?
    _transit_volume = segment.transit_volume
    _transit_boardings = segment.transit_boardings
    _previous_headway = segment["@phdwy"]
    _current_headway = segment.line.headway
    _available_capacity = max(
        segment_capacity - _transit_volume + _transit_boardings, 0
    )

    adjusted_headway = min(
        max_headway,
        _previous_headway
        * min((_transit_boardings + 1) / (_available_capacity + 1), 1.5),
    )
    adjusted_headway = max(_current_headway, adjusted_headway)

    return adjusted_headway

calc_extra_wait_time(segment, segment_capacity, eawt_weights, mode_config, use_fares=False)

Calculate extra added wait time based on…

TODO document fully.

Parameters:

Name Type Description Default
segment _type_

Emme transit segment object.

required
segment_capacity float

description

required
eawt_weights

extra added wait time weights

required
mode_config dict

mode character to mode config

required
use_fares bool

description. Defaults to False.

False

Returns:

Name Type Description
_type_

description

Source code in tm2py\components\network\transit\transit_assign.py
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
def calc_extra_wait_time(
    segment,
    segment_capacity: float,
    eawt_weights,
    mode_config: dict,
    use_fares: bool = False,
):
    """Calculate extra added wait time based on...

    # TODO document fully.

    Args:
        segment (_type_): Emme transit segment object.
        segment_capacity (float): _description_
        eawt_weights: extra added wait time weights
        mode_config: mode character to mode config
        use_fares (bool, optional): _description_. Defaults to False.

    Returns:
        _type_: _description_
    """
    _transit_volume = segment.transit_volume
    _headway = segment.line.headway if segment.line.headway >= 0.1 else 9999
    _total_offs = calc_total_offs(segment.line)
    _offs_thru_segment = calc_offs_thru_segment(segment)

    # TODO Document and add params to config. Have no idea what source is here.
    eawt = (
        eawt_weights.constant
        + eawt_weights.weight_inverse_headway * (1 / _headway)
        + eawt_weights.vcr * (_transit_volume / segment_capacity)
        + eawt_weights.exit_proportion * (_offs_thru_segment / _total_offs)
    )

    if use_fares:
        eawt_factor = (
            1
            if segment.line["#src_mode"] == ""
            else mode_config[segment.line["#src_mode"]]["eawt_factor"]
        )
    else:
        eawt_factor = (
            1
            if segment.line.mode.id == ""
            else mode_config[segment.line.mode.id]["eawt_factor"]
        )

    return eawt * eawt_factor

calc_offs_thru_segment(segment)

summary

Parameters:

Name Type Description Default
segment _type_

description

required

Returns:

Name Type Description
float float

description

Source code in tm2py\components\network\transit\transit_assign.py
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
def calc_offs_thru_segment(segment) -> float:
    """_summary_

    Args:
        segment (_type_): _description_

    Returns:
        float: _description_
    """
    # SIJIA TODO check that it should be [:segment.number+1] . Not sure if 0-indexed in emme or 1-indexed?
    segments_thru_this_segment = [seg for seg in iter(segment.line.segments(True))][
        : segment.number + 1
    ]
    offs_thru_this_seg = [
        prev_seg.transit_volume - this_seg.transit_volume + this_seg.transit_boardings
        for prev_seg, this_seg in zip(
            segments_thru_this_segment[:-1], segments_thru_this_segment[1:]
        )
    ]
    total_offs_thru_this_seg = sum(offs_thru_this_seg)
    return total_offs_thru_this_seg

calc_total_offs(line)

Calculate total alightings for a line.

Parameters:

Name Type Description Default
line _type_

description

required
Source code in tm2py\components\network\transit\transit_assign.py
196
197
198
199
200
201
202
203
204
205
206
207
208
def calc_total_offs(line) -> float:
    """Calculate total alightings for a line.

    Args:
        line (_type_): _description_
    """
    # NOTE This was done previously using:
    # total_offs += prev_seg.transit_volume - seg.transit_volume + seg.transit_boardings
    # but offs should equal ons for a whole line, so this seems simpler
    offs = [seg.transit_boardings for seg in line.segments(True)]
    total_offs = sum(offs)
    # added lambda due to divide by zero error
    return total_offs if total_offs >= 0.001 else 9999

func_returns_calc_updated_perceived_headway(time_period_duration, eawt_weights, mode_config, use_fares)

function that returns the calc_headway function for emme assignment, with partial preloaded parameters acts like partial as emme does not take partial

Source code in tm2py\components\network\transit\transit_assign.py
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
def func_returns_calc_updated_perceived_headway(
    time_period_duration, eawt_weights, mode_config, use_fares
):
    """
    function that returns the calc_headway function for emme assignment, with partial preloaded parameters
    acts like partial as emme does not take partial
    """

    def calc_headway(transit_volume, transit_boardings, headway, capacity, segment):
        """Calculate perceived (???) headway updated by ... and extra added wait time.

        # TODO Document more fully.

        Args:
            time_period_duration(float): time period duration in minutes
            segment: Emme Transit segment object
            eawt_weights:
            mode_config:
            use_fares (bool): if true, will use fares

        Returns:
            _type_: _description_
        """
        # QUESTION FOR INRO: Kevin separately put segment.line.headway and headway as an arg.
        # Would they be different? Why?
        # TODO: Either can we label the headways so it is clear what is diff about them or just use single value?

        from tm2py.config import (
            CcrWeightsConfig,
            EawtWeightsConfig,
            TransitClassConfig,
            TransitConfig,
            TransitModeConfig,
        )

        _segment_capacity = capacity

        vcr = transit_volume / _segment_capacity

        _extra_added_wait_time = calc_extra_wait_time(
            segment,
            _segment_capacity,
            {eawt_weights},
            {mode_config},
            {use_fares},
        )

        _adjusted_headway = calc_adjusted_headway(
            segment,
            _segment_capacity,
        )

        return _adjusted_headway + _extra_added_wait_time

    return textwrap.dedent(inspect.getsource(calc_headway)).format(
        time_period_duration=time_period_duration,
        eawt_weights=eawt_weights,
        mode_config=mode_config,
        use_fares=use_fares,
    )

func_returns_crowded_segment_cost(time_period_duration, weights)

function that returns the calc_segment_cost function for emme assignment, with partial preloaded parameters acts like partial as emme does not take partial

Source code in tm2py\components\network\transit\transit_assign.py
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def func_returns_crowded_segment_cost(time_period_duration, weights: CcrWeightsConfig):
    """
    function that returns the calc_segment_cost function for emme assignment, with partial preloaded parameters
    acts like partial as emme does not take partial
    """

    def calc_segment_cost(transit_volume: float, capacity, segment) -> float:
        """Calculates crowding factor for a segment.

        Toronto implementation limited factor between 1.0 and 10.0.
        For use with Emme Capacitated assignment normalize by subtracting 1

        Args:
            time_period_duration(float): time period duration in minutes
            weights (_type_): transit capacity weights
            segment_pax (float): transit passengers for the segment for the time period
            segment: emme line segment

        Returns:
            float: crowding factor for a segment
        """

        from tm2py.config import (
            CcrWeightsConfig,
            EawtWeightsConfig,
            TransitClassConfig,
            TransitConfig,
            TransitModeConfig,
        )

        if transit_volume == 0:
            return 0.0

        line = segment.line

        seated_capacity = (
            line.vehicle.seated_capacity * {time_period_duration} * 60 / line.headway
        )

        seated_pax = min(transit_volume, seated_capacity)
        standing_pax = max(transit_volume - seated_pax, 0)

        seated_cost = {weights}.min_seat + ({weights}.max_seat - {weights}.min_seat) * (
            transit_volume / capacity
        ) ** {weights}.power_seat

        standing_cost = {weights}.min_stand + (
            {weights}.max_stand - {weights}.min_stand
        ) * (transit_volume / capacity) ** {weights}.power_stand

        crowded_cost = (seated_cost * seated_pax + standing_cost * standing_pax) / (
            transit_volume + 0.01
        )

        normalized_crowded_cost = max(crowded_cost - 1, 0)

        return normalized_crowded_cost

    return textwrap.dedent(inspect.getsource(calc_segment_cost)).format(
        time_period_duration=time_period_duration, weights=weights
    )

func_returns_segment_congestion(time_period_duration, scenario, weights, use_fares=False)

function that returns the calc_segment_cost function for emme assignment, with partial preloaded parameters acts like partial as emme does not take partial

Source code in tm2py\components\network\transit\transit_assign.py
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
def func_returns_segment_congestion(
    time_period_duration,
    scenario,
    weights: CongestedWeightsConfig,
    use_fares: bool = False,
):
    """
    function that returns the calc_segment_cost function for emme assignment, with partial preloaded parameters
    acts like partial as emme does not take partial
    """
    if use_fares:
        values = scenario.get_attribute_values("TRANSIT_LINE", ["#src_mode"])
        scenario.set_attribute_values("TRANSIT_LINE", ["#src_mode"], values)

    def calc_segment_cost(transit_volume: float, capacity, segment) -> float:
        """Calculates crowding factor for a segment.

        Toronto implementation limited factor between 1.0 and 10.0.
        For use with Emme Capacitated assignment normalize by subtracting 1

        Args:
            time_period_duration(float): time period duration in minutes
            weights (_type_): transit capacity weights
            segment: emme line segment

        Returns:
            float: crowding factor for a segment
        """

        from tm2py.config import (
            CongestedWeightsConfig,
            TransitClassConfig,
            TransitConfig,
            TransitModeConfig,
        )

        if transit_volume <= 0:
            return 0.0

        line = segment.line

        if {use_fares}:
            mode_char = line["#src_mode"]
        else:
            mode_char = line.mode.id

        if mode_char in ["p"]:
            congestion = 0.25 * ((transit_volume / capacity) ** 10)
        else:
            seated_capacity = (
                line.vehicle.seated_capacity
                * {time_period_duration}
                * 60
                / line.headway
            )

            seated_pax = min(transit_volume, seated_capacity)
            standing_pax = max(transit_volume - seated_pax, 0)

            seated_cost = {weights}.min_seat + (
                {weights}.max_seat - {weights}.min_seat
            ) * (transit_volume / capacity) ** {weights}.power_seat

            standing_cost = {weights}.min_stand + (
                {weights}.max_stand - {weights}.min_stand
            ) * (transit_volume / capacity) ** {weights}.power_stand

            crowded_cost = (seated_cost * seated_pax + standing_cost * standing_pax) / (
                transit_volume
            )

            congestion = max(crowded_cost, 1) - 1.0

        return congestion

    return textwrap.dedent(inspect.getsource(calc_segment_cost)).format(
        time_period_duration=time_period_duration, weights=weights, use_fares=use_fares
    )

time_period_capacity(vehicle_capacity, headway, time_period_duration)

summary

Parameters:

Name Type Description Default
vehicle_capacity float

Vehicle capacity per hour. For vehicles with multiple cars (i.e. trainsets), should be the capacity of all of them that are traveling together.

required
headway float

Vehicle (or train sets) per hour.

required
time_period_duration float

duration of the time period in minutes

required

Returns:

Name Type Description
float float

capacity for the whole time period

Source code in tm2py\components\network\transit\transit_assign.py
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
def time_period_capacity(
    vehicle_capacity: float, headway: float, time_period_duration: float
) -> float:
    """_summary_

    Args:
        vehicle_capacity (float): Vehicle capacity per hour. For vehicles with multiple cars
            (i.e. trainsets), should be the capacity of all of them that are traveling together.
        headway (float): Vehicle (or train sets) per hour.
        time_period_duration (float): duration of the time period in minutes

    Returns:
        float: capacity for the whole time period
    """
    return vehicle_capacity * time_period_duration * 60 / headway

Transit skims module.

TransitSkim

Bases: Component

Transit skim calculation methods.

Source code in tm2py\components\network\transit\transit_skim.py
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
class TransitSkim(Component):
    """Transit skim calculation methods."""

    def __init__(self, controller: "RunController"):
        """Constructor for TransitSkim class.

        Args:
            controller: The RunController instance.
        """
        super().__init__(controller)
        self.config = self.controller.config.transit
        self._emmebank = None

        self._networks = None
        self._scenarios = None
        self._matrix_cache = None
        self._skim_properties = None
        self._skim_matrices = {
            k: None
            for k in itertools.product(
                self.time_period_names,
                self.config.classes,
                self.skim_properties,
            )
        }

    def validate_inputs(self):
        """Validate inputs."""
        # TODO add input validation
        pass

    @property
    def emmebank(self):
        if not self._emmebank:
            self._emmebank = self.controller.emme_manager.transit_emmebank
        return self._emmebank

    @property
    def scenarios(self):
        if self._scenarios is None:
            self._scenarios = {
                tp: self.emmebank.scenario(tp) for tp in self.time_period_names
            }
        return self._scenarios

    @property
    def networks(self):
        if self._networks is None:
            self._networks = {
                tp: self.scenarios[tp].get_partial_network(
                    ["TRANSIT_SEGMENT"], include_attributes=False
                )
                for tp in self.time_period_names
            }
        return self._networks

    @property
    def matrix_cache(self):
        if self._matrix_cache is None:
            self._matrix_cache = {
                tp: MatrixCache(self.scenarios[tp]) for tp in self.time_period_names
            }
        return self._matrix_cache

    @LogStartEnd("Transit skims")
    def run(self):
        """Run transit skims."""
        self.emmebank_skim_matrices(
            self.time_period_names, self.config.classes, self.skim_properties
        )
        with self.logger.log_start_end(f"period transit skims"):
            for _time_period in self.time_period_names:
                with self.controller.emme_manager.logbook_trace(
                    f"Transit skims for period {_time_period}"
                ):
                    for _transit_class in self.config.classes:
                        self.run_skim_set(_time_period, _transit_class)
                        self._export_skims(_time_period, _transit_class)
                    if self.logger.debug_enabled:
                        self._log_debug_report(_time_period)

    @property
    def skim_matrices(self):
        return self._skim_matrices

    @property
    def skim_properties(self):
        """List of Skim Property named tuples: name, description.

        TODO put these in config.
        """
        if self._skim_properties is None:
            from collections import namedtuple

            # TODO config
            self._skim_properties = []

            _basic_skims = [
                ("IWAIT", "first wait time"),
                ("XWAIT", "transfer wait time"),
                ("WAIT", "total wait time"),
                ("FARE", "fare"),
                ("BOARDS", "num boardings"),
                ("WAUX", "auxiliary walk time"),
                ("DTIME", "access and egress drive time"),
                ("DDIST", "access and egress drive distance"),
                ("WACC", "access walk time"),
                ("WEGR", "egress walk time"),
                ("IVT", "total in-vehicle time"),
                ("IN_VEHICLE_COST", "in-vehicle cost"),
                ("CROWD", "Crowding penalty"),
            ]
            self._skim_properties += [
                Skimproperty(_name, _desc) for _name, _desc in _basic_skims
            ]
            for mode in self.config.modes:
                if (mode.assign_type == "TRANSIT") and (mode.type != "PNR_dummy"):
                    desc = mode.description or mode.name
                    self._skim_properties.append(
                        Skimproperty(
                            f"IVT{mode.name}",
                            f"{desc} in-vehicle travel time"[:40],
                        )
                    )
            if self.config.use_ccr:
                self._skim_properties.extend(
                    [
                        Skimproperty("LINKREL", "Link reliability"),
                        Skimproperty("EAWT", "Extra added wait time"),
                        Skimproperty("CAPPEN", "Capacity penalty"),
                    ]
                )
            if self.config.congested_transit_assignment:
                self._skim_properties.extend(
                    [
                        Skimproperty("TRIM", "used to trim demands"),
                        Skimproperty("XBOATIME", "transfer boarding time penalty"),
                        Skimproperty("DTOLL", "drive access or egress toll price"),
                    ]
                )
        return self._skim_properties

    def emmebank_skim_matrices(
        self,
        time_periods: List[str] = None,
        transit_classes=None,
        skim_properties: Skimproperty = None,
    ) -> dict:
        """Gets skim matrices from emmebank, or lazily creates them if they don't already exist."""
        create_matrix = self.controller.emme_manager.tool(
            "inro.emme.data.matrix.create_matrix"
        )
        if time_periods is None:
            time_periods = self.time_period_names
        if not set(time_periods).issubset(set(self.time_period_names)):
            raise ValueError(
                f"time_periods ({time_periods}) must be subset of time_period_names ({self.time_period_names})."
            )

        if transit_classes is None:
            transit_classes = self.config.classes
        if not set(transit_classes).issubset(set(self.config.classes)):
            raise ValueError(
                f"time_periods ({transit_classes}) must be subset of time_period_names ({self.config.transit_classes})."
            )

        if skim_properties is None:
            skim_properties = self.skim_properties
        if not set(skim_properties).issubset(set(self.skim_properties)):
            raise ValueError(
                f"time_periods ({skim_properties}) must be subset of time_period_names ({self.skim_properties})."
            )

        _tp_tclass_skprop = itertools.product(
            time_periods, transit_classes, skim_properties
        )
        _tp_tclass_skprop_list = []

        for _tp, _tclass, _skprop in _tp_tclass_skprop:
            a = 1
            _name = f"{_tp}_{_tclass.name}_{_skprop.name}"
            _desc = f"{_tp} {_tclass.description}: {_skprop.desc}"
            _matrix = self.scenarios[_tp].emmebank.matrix(f'mf"{_name}"')
            if not _matrix:
                _matrix = create_matrix(
                    "mf", _name, _desc, scenario=self.scenarios[_tp], overwrite=True
                )
            else:
                _matrix.description = _desc

            self._skim_matrices[_name] = _matrix
            _tp_tclass_skprop_list.append(_name)

        skim_matrices = {
            k: v
            for k, v in self._skim_matrices.items()
            if k in list(_tp_tclass_skprop_list)
        }
        return skim_matrices

    def run_skim_set(self, time_period: str, transit_class: str):
        """Run the transit skim calculations for a given time period and assignment class.

        Results are stored in transit emmebank.

        Steps:
            1. determine if using transit capacity constraint
            2. skim walk, wait time, boardings, and fares
            3. skim in vehicle time by mode
            4. mask transfers above max amount
            5. mask if doesn't have required modes
        """
        use_ccr = False
        congested_transit_assignment = self.config.congested_transit_assignment
        if self.controller.iteration >= 1:
            use_ccr = self.config.use_ccr
        with self.controller.emme_manager.logbook_trace(
            "First and total wait time, number of boardings, "
            "fares, and total and transfer walk time"
        ):
            self.skim_walk_wait_boards_fares(time_period, transit_class)
        with self.controller.emme_manager.logbook_trace("In-vehicle time by mode"):
            self.skim_invehicle_time_by_mode(time_period, transit_class, use_ccr)
        with self.controller.emme_manager.logbook_trace(
            "Drive distance and time",
            "Walk auxiliary time, walk access time and walk egress time",
        ):
            self.skim_drive_walk(time_period, transit_class)
        with self.controller.emme_manager.logbook_trace("Calculate crowding"):
            self.skim_crowding(time_period, transit_class)
        if use_ccr:
            with self.controller.emme_manager.logbook_trace("CCR related skims"):
                self.skim_reliability_crowding_capacity(time_period, transit_class)

    def skim_walk_wait_boards_fares(self, time_period: str, transit_class: str):
        """Skim wait, walk, board, and fares for a given time period and transit assignment class.

        Skim the first and total wait time, number of boardings, (transfers + 1)
        fares, total walk time, total in-vehicle time.
        """
        _tp_tclass = f"{time_period}_{transit_class.name}"
        _network = self.networks[time_period]
        _transit_mode_ids = [
            m.id for m in _network.modes() if m.type in ["TRANSIT", "AUX_TRANSIT"]
        ]
        spec = {
            "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
            "actual_first_waiting_times": f'mf"{_tp_tclass}_IWAIT"',
            "actual_total_waiting_times": f'mf"{_tp_tclass}_WAIT"',
            "by_mode_subset": {
                "modes": _transit_mode_ids,
                "avg_boardings": f'mf"{_tp_tclass}_BOARDS"',
            },
        }
        if self.config.use_fares:
            spec["by_mode_subset"].update(
                {
                    "actual_in_vehicle_costs": f'mf"{_tp_tclass}_IN_VEHICLE_COST"',
                    "actual_total_boarding_costs": f'mf"{_tp_tclass}_FARE"',
                }
            )

        self.controller.emme_manager.matrix_results(
            spec,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

        self._calc_xfer_wait(time_period, transit_class.name)
        self._calc_boardings(time_period, transit_class.name)
        if self.config.use_fares:
            self._calc_fares(time_period, transit_class.name)

    def _calc_xfer_walk(self, time_period, transit_class_name):
        xfer_modes = [m.mode_id for m in self.config.modes if m.type == "WALK"]
        tp_tclass = f"{time_period}_{transit_class_name}"
        spec = {
            "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
            "by_mode_subset": {
                "modes": xfer_modes,
                "actual_aux_transit_times": f'mf"{tp_tclass}_XFERWALK"',
            },
        }
        self.controller.emme_manager.matrix_results(
            spec,
            class_name=transit_class_name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    def _calc_xfer_wait(self, time_period, transit_class_name):
        """Calculate transfer wait from total wait time and initial wait time and add to Emmebank.

        TODO convert this type of calculation to numpy
        """
        tp_tclass = f"{time_period}_{transit_class_name}"
        spec = {
            "type": "MATRIX_CALCULATION",
            "constraint": {
                "by_value": {
                    "od_values": f'mf"{tp_tclass}_WAIT"',
                    "interval_min": 0,
                    "interval_max": 9999999,
                    "condition": "INCLUDE",
                }
            },
            "result": f'mf"{tp_tclass}_XWAIT"',
            "expression": f'(mf"{tp_tclass}_WAIT" - mf"{tp_tclass}_IWAIT").max.0',
        }

        self.controller.emme_manager.matrix_calculator(
            spec,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    def _calc_boardings(self, time_period: str, transit_class_name: str):
        """Calculate # boardings from # of transfers and add to Emmebank.

        TODO convert this type of calculation to numpy
        """
        _tp_tclass = f"{time_period}_{transit_class_name}"
        if ("PNR_TRN_WLK" in _tp_tclass) or ("WLK_TRN_PNR" in _tp_tclass):
            spec = {
                "type": "MATRIX_CALCULATION",
                "constraint": {
                    "by_value": {
                        "od_values": f'mf"{_tp_tclass}_BOARDS"',
                        "interval_min": 0,
                        "interval_max": 9999999,
                        "condition": "INCLUDE",
                    }
                },
                # CHECK should this be BOARDS or similar, not xfers?
                "result": f'mf"{_tp_tclass}_BOARDS"',
                "expression": f'(mf"{_tp_tclass}_BOARDS" - 1).max.0',
            }

            self.controller.emme_manager.matrix_calculator(
                spec,
                scenario=self.scenarios[time_period],
                num_processors=self.controller.num_processors,
            )

    def _calc_fares(self, time_period: str, transit_class_name: str):
        """Calculate fares as sum in-vehicle cost and boarding cost to get the fare paid and add to Emmebank.

        TODO convert this type of calculation to numpy
        """
        _tp_tclass = f"{time_period}_{transit_class_name}"
        spec = {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass}_FARE"',
            "expression": f'(mf"{_tp_tclass}_FARE" + mf"{_tp_tclass}_IN_VEHICLE_COST")',
        }

        self.controller.emme_manager.matrix_calculator(
            spec,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    @staticmethod
    def _segments_with_modes(_network, _modes: Union[Collection[str], str]):
        _modes = list(_modes)
        segments = [
            li.segments() for li in _network.transit_lines() if li.mode.id in _modes
        ]
        return segments

    def _invehicle_time_by_mode_ccr(
        self, time_period: str, transit_class: str, mode_combinations
    ) -> List[str]:
        """Calculate in-vehicle travel time by mode using CCR and store results in Emmebank.

        Args:
            time_period (_type_): time period abbreviation
            transit_class (_type_): transit class name
            mode_combinations (_type_): TODO

        Returns:
            List of matrix names in Emmebank to sum together to get total in-vehicle travel time.
        """

        _network = self.networks[time_period]
        _scenario = self.scenarios[time_period]
        _tp_tclass = f"{time_period}_{transit_class.name}"
        _total_ivtt_expr = []
        create_temps = self.controller.emme_manager.temp_attributes_and_restore
        temp_attrs = [["TRANSIT_SEGMENT", "@mode_timtr", "base time by mode"]]
        with create_temps(_scenario, temp_attrs):
            for _mode_name, _modes in mode_combinations:
                _network.create_attribute("TRANSIT_SEGMENT", "@mode_timtr")
                _li_segs_with_mode = TransitSkim._segments_with_modes(_network, _modes)
                # set temp attribute @mode_timtr to contain the non-congested in-vehicle
                # times for segments of the mode of interest
                for line_segment in _li_segs_with_mode:
                    for segment in line_segment:
                        segment["@mode_timtr"] = segment["@base_timtr"]
                # not sure why we to copy this if we are deleting it in next line? - ES
                self.controller.emme_manager.copy_attribute_values(
                    self.networks[time_period],
                    _scenario,
                    {"TRANSIT_SEGMENT": ["@mode_timtr"]},
                )
                self.networks[time_period].delete_attribute(
                    "TRANSIT_SEGMENT", "@mode_timtr"
                )
                _ivtt_matrix_name = f'mf"{_tp_tclass}_IVT{_mode_name}"'
                _total_ivtt_expr.append(_ivtt_matrix_name)
                self._run_strategy_analysis(
                    time_period,
                    transit_class,
                    {"in_vehicle": "@mode_timtr"},
                    f"IVT{_mode_name}",
                )
        return _total_ivtt_expr

    def _invehicle_time_by_mode_no_ccr(
        self, time_period: str, transit_class: str, mode_combinations
    ) -> List[str]:
        """Calculate in-vehicle travel time by without CCR and store results in Emmebank.

        Args:
            time_period (_type_): time period abbreviation
            transit_class (_type_): transit class name
            mode_combinations (_type_): TODO

        Returns: List of matrix names in Emmebank to sum together to get total in-vehicle travel time.

        """
        _tp_tclass = f"{time_period}_{transit_class.name}"
        _total_ivtt_expr = []
        for _mode_name, modes in mode_combinations:
            _ivtt_matrix_name = f'mf"{_tp_tclass}_IVT{_mode_name}"'
            _total_ivtt_expr.append(_ivtt_matrix_name)
            spec = {
                "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
                "by_mode_subset": {
                    "modes": modes,
                    "actual_in_vehicle_times": _ivtt_matrix_name,
                },
            }
            self.controller.emme_manager.matrix_results(
                spec,
                class_name=transit_class.name,
                scenario=self.scenarios[time_period],
                num_processors=self.controller.num_processors,
            )
        return _total_ivtt_expr

    def skim_invehicle_time_by_mode(
        self, time_period: str, transit_class: str, use_ccr: bool = False
    ) -> None:
        """Skim in-vehicle by mode for a time period and transit class and store results in Emmebank.

        Args:
            time_period (str): time period abbreviation
            transit_class (str): transit class name
            use_ccr (bool): if True, will use crowding, capacity, and reliability (ccr).
                Defaults to False

        """
        mode_combinations = self._get_emme_mode_ids(transit_class, time_period)
        if use_ccr:
            total_ivtt_expr = self._invehicle_time_by_mode_ccr(
                time_period, transit_class, mode_combinations
            )
        else:
            total_ivtt_expr = self._invehicle_time_by_mode_no_ccr(
                time_period, transit_class, mode_combinations
            )
        # sum total ivtt across all modes
        self._calc_total_ivt(time_period, transit_class, total_ivtt_expr)

    def _calc_total_ivt(
        self, time_period: str, transit_class: str, total_ivtt_expr: list[str]
    ) -> None:
        """Sums matrices to get total in vehicle time and stores in the Emmebank.

        Args:
            time_period (str): time period abbreviation
            transit_class (str): transit class name
            total_ivtt_expr (list[str]): List of matrix names in Emmebank which have IVT to sum to get total.
        """
        _tp_tclass = f"{time_period}_{transit_class.name}"
        spec = {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass }_IVT"',
            "expression": "+".join(total_ivtt_expr),
        }

        self.controller.emme_manager.matrix_calculator(
            spec,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    def skim_drive_walk(self, time_period: str, transit_class: str) -> None:
        """"""
        _tp_tclass = f"{time_period}_{transit_class.name}"
        # _network = self.networks[time_period]

        # drive time here is perception factor*(drive time + toll penalty),
        # will calculate the actual drive time and substract toll penalty in the following steps
        spec1 = {
            "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
            "by_mode_subset": {
                "modes": ["D"],
                "actual_aux_transit_times": f'mf"{_tp_tclass}_DTIME"',
                "distance": f'mf"{_tp_tclass}_DDIST"',
            },
        }
        # skim walk distance in walk time matrices first,
        # will calculate the actual walk time and overwrite the distance in the following steps
        spec2 = {
            "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
            "by_mode_subset": {
                "modes": ["w"],
                "distance": f'mf"{_tp_tclass}_WAUX"',
            },
        }
        spec3 = {
            "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
            "by_mode_subset": {
                "modes": ["a"],
                "distance": f'mf"{_tp_tclass}_WACC"',
            },
        }
        spec4 = {
            "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
            "by_mode_subset": {
                "modes": ["e"],
                "distance": f'mf"{_tp_tclass}_WEGR"',
            },
        }

        self.controller.emme_manager.matrix_results(
            spec1,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )
        self.controller.emme_manager.matrix_results(
            spec2,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )
        self.controller.emme_manager.matrix_results(
            spec3,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )
        self.controller.emme_manager.matrix_results(
            spec4,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

        drive_perception_factor = self.config.drive_perception_factor
        walk_speed = self.config.walk_speed
        vot = self.config.value_of_time
        # divide drive time by mode specific perception factor to get the actual time
        # for walk time, use walk distance/walk speed
        # because the mode specific perception factors are hardcoded in the mode definition
        spec_list = [
            {
                "type": "MATRIX_CALCULATION",
                "constraint": None,
                "result": f'mf"{_tp_tclass}_DTIME"',
                "expression": f'mf"{_tp_tclass}_DTIME"/{drive_perception_factor}',
            },
            {
                "type": "MATRIX_CALCULATION",
                "constraint": None,
                "result": f'mf"{_tp_tclass}_DTIME"',
                "expression": f'mf"{_tp_tclass}_DTIME"',
            },
            {
                "type": "MATRIX_CALCULATION",
                "constraint": None,
                "result": f'mf"{_tp_tclass}_WAUX"',
                "expression": f'mf"{_tp_tclass}_WAUX"/({walk_speed}/60)',
            },
            {
                "type": "MATRIX_CALCULATION",
                "constraint": None,
                "result": f'mf"{_tp_tclass}_WACC"',
                "expression": f'mf"{_tp_tclass}_WACC"/({walk_speed}/60)',
            },
            {
                "type": "MATRIX_CALCULATION",
                "constraint": None,
                "result": f'mf"{_tp_tclass}_WEGR"',
                "expression": f'mf"{_tp_tclass}_WEGR"/({walk_speed}/60)',
            },
        ]
        self.controller.emme_manager.matrix_calculator(
            spec_list,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    def skim_penalty_toll(self, time_period: str, transit_class: str) -> None:
        """"""
        # transfer boarding time penalty
        self._run_strategy_analysis(
            time_period, transit_class, {"boarding": "@xboard_nodepen"}, "XBOATIME"
        )

        _tp_tclass = f"{time_period}_{transit_class.name}"
        if ("PNR_TRN_WLK" in _tp_tclass) or ("WLK_TRN_PNR" in _tp_tclass):
            spec = {  # subtract PNR boarding from total transfer boarding time penalty
                "type": "MATRIX_CALCULATION",
                "constraint": {
                    "by_value": {
                        "od_values": f'mf"{_tp_tclass}_XBOATIME"',
                        "interval_min": 0,
                        "interval_max": 9999999,
                        "condition": "INCLUDE",
                    }
                },
                "result": f'mf"{_tp_tclass}_XBOATIME"',
                "expression": f'(mf"{_tp_tclass}_XBOATIME" - 1).max.0',
            }

            self.controller.emme_manager.matrix_calculator(
                spec,
                scenario=self.scenarios[time_period],
                num_processors=self.controller.num_processors,
            )

        # drive toll
        if ("PNR_TRN_WLK" in _tp_tclass) or ("KNR_TRN_WLK" in _tp_tclass):
            self._run_path_analysis(
                time_period,
                transit_class,
                "ORIGIN_TO_INITIAL_BOARDING",
                {"aux_transit": "@drive_toll"},
                "DTOLL",
            )
        elif ("WLK_TRN_PNR" in _tp_tclass) or ("WLK_TRN_KNR" in _tp_tclass):
            self._run_path_analysis(
                time_period,
                transit_class,
                "FINAL_ALIGHTING_TO_DESTINATION",
                {"aux_transit": "@drive_toll"},
                "DTOLL",
            )

    def _get_emme_mode_ids(
        self, transit_class, time_period
    ) -> List[Tuple[str, List[str]]]:
        """Get the Emme mode IDs used in the assignment.

        Loads the #src_mode attribute on lines if fares are used, and the
        @base_timtr on segments if ccr is used.

        Returns:
            List of tuples of two items, the original mode name (from config)
            to a list of mode IDs used in the Emme assignment. This list
            will be one item if fares are not used, but will contain the fare
            modes used in the journey levels mode-to-mode transfer table
            generated from Apply fares.
        """
        if self.config.use_fares:
            self.controller.emme_manager.copy_attribute_values(
                self.scenarios[time_period],
                self.networks[time_period],
                {"TRANSIT_LINE": ["#src_mode"]},
            )
        if self.config.use_ccr:
            self.controller.emme_manager.copy_attribute_values(
                self.scenarios[time_period],
                self.networks[time_period],
                {"TRANSIT_SEGMENT": ["@base_timtr"]},
            )
        valid_modes = [
            mode
            for mode in self.config.modes
            if mode.type in transit_class.mode_types
            and mode.assign_type == "TRANSIT"
            and mode.type != "PNR_dummy"
        ]
        if self.config.use_fares:
            # map to used modes in apply fares case
            fare_modes = defaultdict(lambda: set([]))
            for line in self.networks[time_period].transit_lines():
                fare_modes[line["#src_mode"]].add(line.mode.id)
            emme_mode_ids = [
                (mode.name, list(fare_modes[mode.mode_id]))
                for mode in valid_modes
                if len(list(fare_modes[mode.mode_id])) > 0
            ]
        else:
            emme_mode_ids = [(mode.name, [mode.mode_id]) for mode in valid_modes]
        return emme_mode_ids

    def skim_reliability_crowding_capacity(
        self, time_period: str, transit_class
    ) -> None:
        """Generate skim results for CCR assignment and stores results in Emmebank.

        Generates the following:
        1. Link Unreliability: LINKREL
        2. Crowding penalty: CROWD
        3. Extra added wait time: EAWT
        4. Capacity penalty: CAPPEN

        Args:
            time_period (str): time period abbreviation
            transit_class: transit class
        """

        # Link unreliability
        self._run_strategy_analysis(
            time_period, transit_class, {"in_vehicle": "ul1"}, "LINKREL"
        )
        # Crowding penalty
        self._run_strategy_analysis(
            time_period, transit_class, {"in_vehicle": "@ccost"}, "CROWD"
        )
        # skim node reliability, extra added wait time (EAWT)
        self._run_strategy_analysis(
            time_period, transit_class, {"boarding": "@eawt"}, "EAWT"
        )
        # skim capacity penalty
        self._run_strategy_analysis(
            time_period, transit_class, {"boarding": "@capacity_penalty"}, "CAPPEN"
        )

    def skim_crowding(self, time_period: str, transit_class) -> None:
        """"""
        # Crowding penalty
        self._run_strategy_analysis(
            time_period, transit_class, {"in_vehicle": "@ccost"}, "CROWD"
        )

    def _run_strategy_analysis(
        self,
        time_period: str,
        transit_class,
        components: Dict[str, str],
        matrix_name_suffix: str,
    ):
        """Runs strategy analysis in Emme and stores results in emmebank.

        Args:
            time_period (str): Time period name abbreviation
            transit_class (_type_): _description_
            components (Dict[str, str]): _description_
            matrix_name_suffix (str): Appended to time period and transit class name to create output matrix name.
        """
        _tp_tclass = f"{time_period}_{transit_class.name}"
        _matrix_name = f'mf"{_tp_tclass}_{matrix_name_suffix}"'
        strategy_analysis = self.controller.emme_manager.tool(
            "inro.emme.transit_assignment.extended.strategy_based_analysis"
        )

        spec = {
            "trip_components": components,
            "sub_path_combination_operator": "+",
            "sub_strategy_combination_operator": "average",
            "selected_demand_and_transit_volumes": {
                "sub_strategies_to_retain": "ALL",
                "selection_threshold": {"lower": -999999, "upper": 999999},
            },
            "analyzed_demand": f"mfTRN_{transit_class.name}_{time_period}",
            "constraint": None,
            "results": {"strategy_values": _matrix_name},
            "type": "EXTENDED_TRANSIT_STRATEGY_ANALYSIS",
        }
        strategy_analysis(
            spec,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    def _run_path_analysis(
        self,
        time_period: str,
        transit_class,
        portion_of_path: str,
        components: Dict[str, str],
        matrix_name_suffix: str,
    ):
        """Runs path analysis in Emme and stores results in emmebank.

        Args:
            time_period (str): Time period name abbreviation
            transit_class (_type_): _description_
            components (Dict[str, str]): _description_
            matrix_name_suffix (str): Appended to time period and transit class name to create output matrix name.
        """
        _tp_tclass = f"{time_period}_{transit_class.name}"
        _matrix_name = f'mf"{_tp_tclass}_{matrix_name_suffix}"'
        path_analysis = self.controller.emme_manager.tool(
            "inro.emme.transit_assignment.extended.path_based_analysis"
        )

        spec = {
            "portion_of_path": portion_of_path,
            "trip_components": components,
            "path_operator": "+",
            "path_selection_threshold": {"lower": -999999, "upper": 999999},
            "path_to_od_aggregation": {
                "operator": "average",
                "aggregated_path_values": _matrix_name,
            },
            "analyzed_demand": None,
            "constraint": None,
            "type": "EXTENDED_TRANSIT_PATH_ANALYSIS",
        }
        path_analysis(
            spec,
            class_name=transit_class.name,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    def mask_if_not_required_modes(self, time_period: str, transit_class) -> None:
        """
        Enforce the `required_mode_combo` parameter by setting IVTs to 0 if don't have required modes.

        Args:
            time_period (str): Time period name abbreviation
            transit_class (_type_): _description_
        """
        if not transit_class.required_mode_combo:
            return

        _ivt_skims = {}
        for mode in transit_class.required_mode_combo:
            transit_modes = [m for m in self.config.modes if m.type == mode]
            for transit_mode in transit_modes:
                if mode not in _ivt_skims.keys():
                    _ivt_skims[mode] = self.matrix_cache[time_period].get_data(
                        f'mf"{time_period}_{transit_class.name}_{transit_mode.name}IVTT"'
                    )
                else:
                    _ivt_skims[mode] += self.matrix_cache[time_period].get_data(
                        f'mf"{time_period}_{transit_class.name}_{transit_mode.name}IVTT"'
                    )

        # multiply all IVT skims together and see if they are greater than zero
        has_all = None
        for key, value in _ivt_skims.items():
            if has_all is not None:
                has_all = np.multiply(has_all, value)
            else:
                has_all = value

        self._mask_skim_set(time_period, transit_class, has_all)

    def mask_above_max_transfers(self, time_period: str, transit_class):
        """Reset skims to 0 if number of transfers is greater than max_transfers.

        Args:
            time_period (str): Time period name abbreviation
            transit_class (_type_): _description_
        """
        max_transfers = self.config.max_transfers
        xfers = self.matrix_cache[time_period].get_data(
            f'mf"{time_period}_{transit_class.name}_XFERS"'
        )
        xfer_mask = np.less_equal(xfers, max_transfers)
        self._mask_skim_set(time_period, transit_class, xfer_mask)

    def _mask_skim_set(self, time_period: str, transit_class, mask_array: NumpyArray):
        """Mask a skim set (set of skims for a given time period and transit class) based on an array.

        Array values of >0 are kept. Zero are not.

        TODO add in checks for mask_array dimensions and values

        Args:
            time_period (str): Time period name abbreviation
            transit_class (_type_): _description_
            mask_array (NumpyArray): _description_
        """
        mask_array = np.greater(mask_array, 0)
        mask_array = np.less(mask_array, inf)
        for skim_key, skim in self.emmebank_skim_matrices(
            time_periods=[time_period], transit_classes=[transit_class]
        ).items():
            skim_data = self.matrix_cache[time_period].get_data(skim.name)
            self.matrix_cache[time_period].set_data(skim.name, skim_data * mask_array)

    def _export_skims(self, time_period: str, transit_class: str):
        """Export skims to OMX files by period."""
        # NOTE: skims in separate file by period
        output_skim_path = self.get_abs_path(self.config.output_skim_path)
        omx_file_path = os.path.join(
            output_skim_path,
            self.config.output_skim_filename_tmpl.format(
                time_period=time_period, tclass=transit_class.name
            ),
        )
        os.makedirs(os.path.dirname(omx_file_path), exist_ok=True)

        _matrices = self.emmebank_skim_matrices(
            time_periods=[time_period], transit_classes=[transit_class]
        )

        with OMXManager(
            omx_file_path,
            "w",
            self.scenarios[time_period],
            matrix_cache=self.matrix_cache[time_period],
            mask_max_value=1e7,
            growth_factor=1,
        ) as omx_file:
            omx_file.write_matrices(_matrices)

    def _log_debug_report(self, _time_period):
        num_zones = len(self.scenarios[_time_period].zone_numbers)
        num_cells = num_zones * num_zones
        self.logger.log(
            f"Transit impedance summary for period {_time_period}", level="DEBUG"
        )
        self.logger.log(
            f"Number of zones: {num_zones}. Number of O-D pairs: {num_cells}. "
            "Values outside -9999999, 9999999 are masked in summaries.",
            level="DEBUG",
        )
        self.logger.log(
            "name                            min       max      mean           sum",
            level="DEBUG",
        )

        temp = self.emmebank_skim_matrices(time_periods=[_time_period])

        for matrix_name in temp.keys():
            matrix_name = f'mf"{matrix_name}"'
            values = self.matrix_cache[_time_period].get_data(matrix_name)
            data = np.ma.masked_outside(values, -9999999, 9999999)
            stats = (
                f"{matrix_name:25} {data.min():9.4g} {data.max():9.4g} "
                f"{data.mean():9.4g} {data.sum(): 13.7g}"
            )
            self.logger.log(stats, level="DEBUG")

    @staticmethod
    def _copy_attribute_values(src, dst, attributes):
        for domain, attrs in attributes.items():
            values = src.get_attribute_values(domain, attrs)
            dst.set_attribute_values(domain, attrs, values)

skim_properties property

List of Skim Property named tuples: name, description.

TODO put these in config.

__init__(controller)

Constructor for TransitSkim class.

Parameters:

Name Type Description Default
controller 'RunController'

The RunController instance.

required
Source code in tm2py\components\network\transit\transit_skim.py
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def __init__(self, controller: "RunController"):
    """Constructor for TransitSkim class.

    Args:
        controller: The RunController instance.
    """
    super().__init__(controller)
    self.config = self.controller.config.transit
    self._emmebank = None

    self._networks = None
    self._scenarios = None
    self._matrix_cache = None
    self._skim_properties = None
    self._skim_matrices = {
        k: None
        for k in itertools.product(
            self.time_period_names,
            self.config.classes,
            self.skim_properties,
        )
    }

emmebank_skim_matrices(time_periods=None, transit_classes=None, skim_properties=None)

Gets skim matrices from emmebank, or lazily creates them if they don’t already exist.

Source code in tm2py\components\network\transit\transit_skim.py
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
def emmebank_skim_matrices(
    self,
    time_periods: List[str] = None,
    transit_classes=None,
    skim_properties: Skimproperty = None,
) -> dict:
    """Gets skim matrices from emmebank, or lazily creates them if they don't already exist."""
    create_matrix = self.controller.emme_manager.tool(
        "inro.emme.data.matrix.create_matrix"
    )
    if time_periods is None:
        time_periods = self.time_period_names
    if not set(time_periods).issubset(set(self.time_period_names)):
        raise ValueError(
            f"time_periods ({time_periods}) must be subset of time_period_names ({self.time_period_names})."
        )

    if transit_classes is None:
        transit_classes = self.config.classes
    if not set(transit_classes).issubset(set(self.config.classes)):
        raise ValueError(
            f"time_periods ({transit_classes}) must be subset of time_period_names ({self.config.transit_classes})."
        )

    if skim_properties is None:
        skim_properties = self.skim_properties
    if not set(skim_properties).issubset(set(self.skim_properties)):
        raise ValueError(
            f"time_periods ({skim_properties}) must be subset of time_period_names ({self.skim_properties})."
        )

    _tp_tclass_skprop = itertools.product(
        time_periods, transit_classes, skim_properties
    )
    _tp_tclass_skprop_list = []

    for _tp, _tclass, _skprop in _tp_tclass_skprop:
        a = 1
        _name = f"{_tp}_{_tclass.name}_{_skprop.name}"
        _desc = f"{_tp} {_tclass.description}: {_skprop.desc}"
        _matrix = self.scenarios[_tp].emmebank.matrix(f'mf"{_name}"')
        if not _matrix:
            _matrix = create_matrix(
                "mf", _name, _desc, scenario=self.scenarios[_tp], overwrite=True
            )
        else:
            _matrix.description = _desc

        self._skim_matrices[_name] = _matrix
        _tp_tclass_skprop_list.append(_name)

    skim_matrices = {
        k: v
        for k, v in self._skim_matrices.items()
        if k in list(_tp_tclass_skprop_list)
    }
    return skim_matrices

mask_above_max_transfers(time_period, transit_class)

Reset skims to 0 if number of transfers is greater than max_transfers.

Parameters:

Name Type Description Default
time_period str

Time period name abbreviation

required
transit_class _type_

description

required
Source code in tm2py\components\network\transit\transit_skim.py
888
889
890
891
892
893
894
895
896
897
898
899
900
def mask_above_max_transfers(self, time_period: str, transit_class):
    """Reset skims to 0 if number of transfers is greater than max_transfers.

    Args:
        time_period (str): Time period name abbreviation
        transit_class (_type_): _description_
    """
    max_transfers = self.config.max_transfers
    xfers = self.matrix_cache[time_period].get_data(
        f'mf"{time_period}_{transit_class.name}_XFERS"'
    )
    xfer_mask = np.less_equal(xfers, max_transfers)
    self._mask_skim_set(time_period, transit_class, xfer_mask)

mask_if_not_required_modes(time_period, transit_class)

Enforce the required_mode_combo parameter by setting IVTs to 0 if don’t have required modes.

Parameters:

Name Type Description Default
time_period str

Time period name abbreviation

required
transit_class _type_

description

required
Source code in tm2py\components\network\transit\transit_skim.py
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
def mask_if_not_required_modes(self, time_period: str, transit_class) -> None:
    """
    Enforce the `required_mode_combo` parameter by setting IVTs to 0 if don't have required modes.

    Args:
        time_period (str): Time period name abbreviation
        transit_class (_type_): _description_
    """
    if not transit_class.required_mode_combo:
        return

    _ivt_skims = {}
    for mode in transit_class.required_mode_combo:
        transit_modes = [m for m in self.config.modes if m.type == mode]
        for transit_mode in transit_modes:
            if mode not in _ivt_skims.keys():
                _ivt_skims[mode] = self.matrix_cache[time_period].get_data(
                    f'mf"{time_period}_{transit_class.name}_{transit_mode.name}IVTT"'
                )
            else:
                _ivt_skims[mode] += self.matrix_cache[time_period].get_data(
                    f'mf"{time_period}_{transit_class.name}_{transit_mode.name}IVTT"'
                )

    # multiply all IVT skims together and see if they are greater than zero
    has_all = None
    for key, value in _ivt_skims.items():
        if has_all is not None:
            has_all = np.multiply(has_all, value)
        else:
            has_all = value

    self._mask_skim_set(time_period, transit_class, has_all)

run()

Run transit skims.

Source code in tm2py\components\network\transit\transit_skim.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
@LogStartEnd("Transit skims")
def run(self):
    """Run transit skims."""
    self.emmebank_skim_matrices(
        self.time_period_names, self.config.classes, self.skim_properties
    )
    with self.logger.log_start_end(f"period transit skims"):
        for _time_period in self.time_period_names:
            with self.controller.emme_manager.logbook_trace(
                f"Transit skims for period {_time_period}"
            ):
                for _transit_class in self.config.classes:
                    self.run_skim_set(_time_period, _transit_class)
                    self._export_skims(_time_period, _transit_class)
                if self.logger.debug_enabled:
                    self._log_debug_report(_time_period)

run_skim_set(time_period, transit_class)

Run the transit skim calculations for a given time period and assignment class.

Results are stored in transit emmebank.

Steps
  1. determine if using transit capacity constraint
  2. skim walk, wait time, boardings, and fares
  3. skim in vehicle time by mode
  4. mask transfers above max amount
  5. mask if doesn’t have required modes
Source code in tm2py\components\network\transit\transit_skim.py
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
def run_skim_set(self, time_period: str, transit_class: str):
    """Run the transit skim calculations for a given time period and assignment class.

    Results are stored in transit emmebank.

    Steps:
        1. determine if using transit capacity constraint
        2. skim walk, wait time, boardings, and fares
        3. skim in vehicle time by mode
        4. mask transfers above max amount
        5. mask if doesn't have required modes
    """
    use_ccr = False
    congested_transit_assignment = self.config.congested_transit_assignment
    if self.controller.iteration >= 1:
        use_ccr = self.config.use_ccr
    with self.controller.emme_manager.logbook_trace(
        "First and total wait time, number of boardings, "
        "fares, and total and transfer walk time"
    ):
        self.skim_walk_wait_boards_fares(time_period, transit_class)
    with self.controller.emme_manager.logbook_trace("In-vehicle time by mode"):
        self.skim_invehicle_time_by_mode(time_period, transit_class, use_ccr)
    with self.controller.emme_manager.logbook_trace(
        "Drive distance and time",
        "Walk auxiliary time, walk access time and walk egress time",
    ):
        self.skim_drive_walk(time_period, transit_class)
    with self.controller.emme_manager.logbook_trace("Calculate crowding"):
        self.skim_crowding(time_period, transit_class)
    if use_ccr:
        with self.controller.emme_manager.logbook_trace("CCR related skims"):
            self.skim_reliability_crowding_capacity(time_period, transit_class)

skim_crowding(time_period, transit_class)

Source code in tm2py\components\network\transit\transit_skim.py
764
765
766
767
768
769
def skim_crowding(self, time_period: str, transit_class) -> None:
    """"""
    # Crowding penalty
    self._run_strategy_analysis(
        time_period, transit_class, {"in_vehicle": "@ccost"}, "CROWD"
    )

skim_drive_walk(time_period, transit_class)

Source code in tm2py\components\network\transit\transit_skim.py
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
def skim_drive_walk(self, time_period: str, transit_class: str) -> None:
    """"""
    _tp_tclass = f"{time_period}_{transit_class.name}"
    # _network = self.networks[time_period]

    # drive time here is perception factor*(drive time + toll penalty),
    # will calculate the actual drive time and substract toll penalty in the following steps
    spec1 = {
        "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
        "by_mode_subset": {
            "modes": ["D"],
            "actual_aux_transit_times": f'mf"{_tp_tclass}_DTIME"',
            "distance": f'mf"{_tp_tclass}_DDIST"',
        },
    }
    # skim walk distance in walk time matrices first,
    # will calculate the actual walk time and overwrite the distance in the following steps
    spec2 = {
        "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
        "by_mode_subset": {
            "modes": ["w"],
            "distance": f'mf"{_tp_tclass}_WAUX"',
        },
    }
    spec3 = {
        "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
        "by_mode_subset": {
            "modes": ["a"],
            "distance": f'mf"{_tp_tclass}_WACC"',
        },
    }
    spec4 = {
        "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
        "by_mode_subset": {
            "modes": ["e"],
            "distance": f'mf"{_tp_tclass}_WEGR"',
        },
    }

    self.controller.emme_manager.matrix_results(
        spec1,
        class_name=transit_class.name,
        scenario=self.scenarios[time_period],
        num_processors=self.controller.num_processors,
    )
    self.controller.emme_manager.matrix_results(
        spec2,
        class_name=transit_class.name,
        scenario=self.scenarios[time_period],
        num_processors=self.controller.num_processors,
    )
    self.controller.emme_manager.matrix_results(
        spec3,
        class_name=transit_class.name,
        scenario=self.scenarios[time_period],
        num_processors=self.controller.num_processors,
    )
    self.controller.emme_manager.matrix_results(
        spec4,
        class_name=transit_class.name,
        scenario=self.scenarios[time_period],
        num_processors=self.controller.num_processors,
    )

    drive_perception_factor = self.config.drive_perception_factor
    walk_speed = self.config.walk_speed
    vot = self.config.value_of_time
    # divide drive time by mode specific perception factor to get the actual time
    # for walk time, use walk distance/walk speed
    # because the mode specific perception factors are hardcoded in the mode definition
    spec_list = [
        {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass}_DTIME"',
            "expression": f'mf"{_tp_tclass}_DTIME"/{drive_perception_factor}',
        },
        {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass}_DTIME"',
            "expression": f'mf"{_tp_tclass}_DTIME"',
        },
        {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass}_WAUX"',
            "expression": f'mf"{_tp_tclass}_WAUX"/({walk_speed}/60)',
        },
        {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass}_WACC"',
            "expression": f'mf"{_tp_tclass}_WACC"/({walk_speed}/60)',
        },
        {
            "type": "MATRIX_CALCULATION",
            "constraint": None,
            "result": f'mf"{_tp_tclass}_WEGR"',
            "expression": f'mf"{_tp_tclass}_WEGR"/({walk_speed}/60)',
        },
    ]
    self.controller.emme_manager.matrix_calculator(
        spec_list,
        scenario=self.scenarios[time_period],
        num_processors=self.controller.num_processors,
    )

skim_invehicle_time_by_mode(time_period, transit_class, use_ccr=False)

Skim in-vehicle by mode for a time period and transit class and store results in Emmebank.

Parameters:

Name Type Description Default
time_period str

time period abbreviation

required
transit_class str

transit class name

required
use_ccr bool

if True, will use crowding, capacity, and reliability (ccr). Defaults to False

False
Source code in tm2py\components\network\transit\transit_skim.py
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
def skim_invehicle_time_by_mode(
    self, time_period: str, transit_class: str, use_ccr: bool = False
) -> None:
    """Skim in-vehicle by mode for a time period and transit class and store results in Emmebank.

    Args:
        time_period (str): time period abbreviation
        transit_class (str): transit class name
        use_ccr (bool): if True, will use crowding, capacity, and reliability (ccr).
            Defaults to False

    """
    mode_combinations = self._get_emme_mode_ids(transit_class, time_period)
    if use_ccr:
        total_ivtt_expr = self._invehicle_time_by_mode_ccr(
            time_period, transit_class, mode_combinations
        )
    else:
        total_ivtt_expr = self._invehicle_time_by_mode_no_ccr(
            time_period, transit_class, mode_combinations
        )
    # sum total ivtt across all modes
    self._calc_total_ivt(time_period, transit_class, total_ivtt_expr)

skim_penalty_toll(time_period, transit_class)

Source code in tm2py\components\network\transit\transit_skim.py
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
def skim_penalty_toll(self, time_period: str, transit_class: str) -> None:
    """"""
    # transfer boarding time penalty
    self._run_strategy_analysis(
        time_period, transit_class, {"boarding": "@xboard_nodepen"}, "XBOATIME"
    )

    _tp_tclass = f"{time_period}_{transit_class.name}"
    if ("PNR_TRN_WLK" in _tp_tclass) or ("WLK_TRN_PNR" in _tp_tclass):
        spec = {  # subtract PNR boarding from total transfer boarding time penalty
            "type": "MATRIX_CALCULATION",
            "constraint": {
                "by_value": {
                    "od_values": f'mf"{_tp_tclass}_XBOATIME"',
                    "interval_min": 0,
                    "interval_max": 9999999,
                    "condition": "INCLUDE",
                }
            },
            "result": f'mf"{_tp_tclass}_XBOATIME"',
            "expression": f'(mf"{_tp_tclass}_XBOATIME" - 1).max.0',
        }

        self.controller.emme_manager.matrix_calculator(
            spec,
            scenario=self.scenarios[time_period],
            num_processors=self.controller.num_processors,
        )

    # drive toll
    if ("PNR_TRN_WLK" in _tp_tclass) or ("KNR_TRN_WLK" in _tp_tclass):
        self._run_path_analysis(
            time_period,
            transit_class,
            "ORIGIN_TO_INITIAL_BOARDING",
            {"aux_transit": "@drive_toll"},
            "DTOLL",
        )
    elif ("WLK_TRN_PNR" in _tp_tclass) or ("WLK_TRN_KNR" in _tp_tclass):
        self._run_path_analysis(
            time_period,
            transit_class,
            "FINAL_ALIGHTING_TO_DESTINATION",
            {"aux_transit": "@drive_toll"},
            "DTOLL",
        )

skim_reliability_crowding_capacity(time_period, transit_class)

Generate skim results for CCR assignment and stores results in Emmebank.

Generates the following: 1. Link Unreliability: LINKREL 2. Crowding penalty: CROWD 3. Extra added wait time: EAWT 4. Capacity penalty: CAPPEN

Parameters:

Name Type Description Default
time_period str

time period abbreviation

required
transit_class

transit class

required
Source code in tm2py\components\network\transit\transit_skim.py
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
def skim_reliability_crowding_capacity(
    self, time_period: str, transit_class
) -> None:
    """Generate skim results for CCR assignment and stores results in Emmebank.

    Generates the following:
    1. Link Unreliability: LINKREL
    2. Crowding penalty: CROWD
    3. Extra added wait time: EAWT
    4. Capacity penalty: CAPPEN

    Args:
        time_period (str): time period abbreviation
        transit_class: transit class
    """

    # Link unreliability
    self._run_strategy_analysis(
        time_period, transit_class, {"in_vehicle": "ul1"}, "LINKREL"
    )
    # Crowding penalty
    self._run_strategy_analysis(
        time_period, transit_class, {"in_vehicle": "@ccost"}, "CROWD"
    )
    # skim node reliability, extra added wait time (EAWT)
    self._run_strategy_analysis(
        time_period, transit_class, {"boarding": "@eawt"}, "EAWT"
    )
    # skim capacity penalty
    self._run_strategy_analysis(
        time_period, transit_class, {"boarding": "@capacity_penalty"}, "CAPPEN"
    )

skim_walk_wait_boards_fares(time_period, transit_class)

Skim wait, walk, board, and fares for a given time period and transit assignment class.

Skim the first and total wait time, number of boardings, (transfers + 1) fares, total walk time, total in-vehicle time.

Source code in tm2py\components\network\transit\transit_skim.py
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
def skim_walk_wait_boards_fares(self, time_period: str, transit_class: str):
    """Skim wait, walk, board, and fares for a given time period and transit assignment class.

    Skim the first and total wait time, number of boardings, (transfers + 1)
    fares, total walk time, total in-vehicle time.
    """
    _tp_tclass = f"{time_period}_{transit_class.name}"
    _network = self.networks[time_period]
    _transit_mode_ids = [
        m.id for m in _network.modes() if m.type in ["TRANSIT", "AUX_TRANSIT"]
    ]
    spec = {
        "type": "EXTENDED_TRANSIT_MATRIX_RESULTS",
        "actual_first_waiting_times": f'mf"{_tp_tclass}_IWAIT"',
        "actual_total_waiting_times": f'mf"{_tp_tclass}_WAIT"',
        "by_mode_subset": {
            "modes": _transit_mode_ids,
            "avg_boardings": f'mf"{_tp_tclass}_BOARDS"',
        },
    }
    if self.config.use_fares:
        spec["by_mode_subset"].update(
            {
                "actual_in_vehicle_costs": f'mf"{_tp_tclass}_IN_VEHICLE_COST"',
                "actual_total_boarding_costs": f'mf"{_tp_tclass}_FARE"',
            }
        )

    self.controller.emme_manager.matrix_results(
        spec,
        class_name=transit_class.name,
        scenario=self.scenarios[time_period],
        num_processors=self.controller.num_processors,
    )

    self._calc_xfer_wait(time_period, transit_class.name)
    self._calc_boardings(time_period, transit_class.name)
    if self.config.use_fares:
        self._calc_fares(time_period, transit_class.name)

validate_inputs()

Validate inputs.

Source code in tm2py\components\network\transit\transit_skim.py
53
54
55
56
def validate_inputs(self):
    """Validate inputs."""
    # TODO add input validation
    pass

Bases: ConfigItem

Transit mode definition (see also mode in the Emme API).

Source code in tm2py\config.py
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
@dataclass(frozen=True)
class TransitModeConfig(ConfigItem):
    """Transit mode definition (see also mode in the Emme API)."""

    type: Literal[
        "WALK",
        "ACCESS",
        "EGRESS",
        "LOCAL",
        "PREMIUM",
        "DRIVE",
        "PNR_dummy",
        "KNR_dummy",
    ]
    assign_type: Literal["TRANSIT", "AUX_TRANSIT"]
    mode_id: str = Field(min_length=1, max_length=1)
    name: str = Field(max_length=10)
    description: Optional[str] = ""
    in_vehicle_perception_factor: Optional[float] = Field(default=None, ge=0)
    speed_or_time_factor: Optional[str] = Field(default="")
    initial_boarding_penalty: Optional[float] = Field(default=None, ge=0)
    transfer_boarding_penalty: Optional[float] = Field(default=None, ge=0)
    headway_fraction: Optional[float] = Field(default=None, ge=0)
    transfer_wait_perception_factor: Optional[float] = Field(default=None, ge=0)
    eawt_factor: Optional[float] = Field(default=1)

    @validator("in_vehicle_perception_factor", always=True)
    def in_vehicle_perception_factor_valid(cls, value, values):
        """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("speed_or_time_factor", always=True)
    def speed_or_time_factor_valid(cls, value, values):
        """Validate speed_or_time_factor exists if assign_type is AUX_TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
            assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
        return value

    @validator("initial_boarding_penalty", always=True)
    def initial_boarding_penalty_valid(value, values):
        """Validate initial_boarding_penalty exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("transfer_boarding_penalty", always=True)
    def transfer_boarding_penalty_valid(value, values):
        """Validate transfer_boarding_penalty exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("headway_fraction", always=True)
    def headway_fraction_valid(value, values):
        """Validate headway_fraction exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("transfer_wait_perception_factor", always=True)
    def transfer_wait_perception_factor_valid(value, values):
        """Validate transfer_wait_perception_factor exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @classmethod
    @validator("mode_id")
    def mode_id_valid(cls, value):
        """Validate mode_id."""
        assert len(value) == 1, "mode_id must be one character"
        return value

headway_fraction_valid(value, values)

Validate headway_fraction exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1139
1140
1141
1142
1143
1144
@validator("headway_fraction", always=True)
def headway_fraction_valid(value, values):
    """Validate headway_fraction exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

in_vehicle_perception_factor_valid(value, values)

Validate in_vehicle_perception_factor exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1111
1112
1113
1114
1115
1116
@validator("in_vehicle_perception_factor", always=True)
def in_vehicle_perception_factor_valid(cls, value, values):
    """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

initial_boarding_penalty_valid(value, values)

Validate initial_boarding_penalty exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1125
1126
1127
1128
1129
1130
@validator("initial_boarding_penalty", always=True)
def initial_boarding_penalty_valid(value, values):
    """Validate initial_boarding_penalty exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

mode_id_valid(value) classmethod

Validate mode_id.

Source code in tm2py\config.py
1153
1154
1155
1156
1157
1158
@classmethod
@validator("mode_id")
def mode_id_valid(cls, value):
    """Validate mode_id."""
    assert len(value) == 1, "mode_id must be one character"
    return value

speed_or_time_factor_valid(value, values)

Validate speed_or_time_factor exists if assign_type is AUX_TRANSIT.

Source code in tm2py\config.py
1118
1119
1120
1121
1122
1123
@validator("speed_or_time_factor", always=True)
def speed_or_time_factor_valid(cls, value, values):
    """Validate speed_or_time_factor exists if assign_type is AUX_TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
        assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
    return value

transfer_boarding_penalty_valid(value, values)

Validate transfer_boarding_penalty exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1132
1133
1134
1135
1136
1137
@validator("transfer_boarding_penalty", always=True)
def transfer_boarding_penalty_valid(value, values):
    """Validate transfer_boarding_penalty exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

transfer_wait_perception_factor_valid(value, values)

Validate transfer_wait_perception_factor exists if assign_type is TRANSIT.

Source code in tm2py\config.py
1146
1147
1148
1149
1150
1151
@validator("transfer_wait_perception_factor", always=True)
def transfer_wait_perception_factor_valid(value, values):
    """Validate transfer_wait_perception_factor exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

Bases: ConfigItem

Transit assignment parameters.

Source code in tm2py\config.py
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
@dataclass(frozen=True)
class TransitConfig(ConfigItem):
    """Transit assignment parameters."""

    modes: Tuple[TransitModeConfig, ...]
    classes: Tuple[TransitClassConfig, ...]
    journey_levels: TransitJourneyLevelsConfig
    apply_msa_demand: bool
    value_of_time: float
    walk_speed: float
    transit_speed: float
    effective_headway_source: str
    initial_wait_perception_factor: float
    transfer_wait_perception_factor: float
    walk_perception_factor: float
    walk_perception_factor: float
    walk_perception_factor_cbd: float
    drive_perception_factor: float
    max_transfers: int
    use_fares: bool
    fare_2015_to_2000_deflator: float
    fares_path: pathlib.Path
    fare_matrix_path: pathlib.Path
    fare_max_transfer_distance_miles: float
    override_connector_times: bool
    use_ccr: bool
    ccr_stop_criteria: Optional[AssignmentStoppingCriteriaConfig]
    ccr_weights: CcrWeightsConfig
    eawt_weights: EawtWeightsConfig
    congested_transit_assignment: bool
    congested: CongestedAssnConfig
    congested_weights: CongestedWeightsConfig
    output_skim_path: pathlib.Path
    output_skim_filename_tmpl: str = Field()
    output_skim_matrixname_tmpl: str = Field()
    output_stop_usage_path: Optional[str] = Field(default=None)
    output_transit_boardings_path: Optional[str] = Field(default=None)
    output_transit_segment_path: Optional[str] = Field(default=None)
    output_station_to_station_flow_path: Optional[str] = Field(default=None)
    output_transfer_at_station_path: Optional[str] = Field(default=None)
    timed_transfer_nodes: Tuple[int, ...] = Field()
    output_transfer_at_station_node_ids: Dict[str, int] = Field()
    max_ccr_iterations: float = None
    split_connectors_to_prevent_walk: bool = False
    input_connector_access_times_path: Optional[str] = Field(default=None)
    input_connector_egress_times_path: Optional[str] = Field(default=None)
    vehicles: Optional[TransitVehicleConfig] = Field(
        default_factory=TransitVehicleConfig
    )

    @validator("use_ccr")
    def deprecate_capacitated_assignment(cls, value, values):
        """Validate use_ccr is false."""
        assert (
            not value
        ), "capacitated transit assignment is deprecated, please set use_ccr to false"
        return value

deprecate_capacitated_assignment(value, values)

Validate use_ccr is false.

Source code in tm2py\config.py
1389
1390
1391
1392
1393
1394
1395
@validator("use_ccr")
def deprecate_capacitated_assignment(cls, value, values):
    """Validate use_ccr is false."""
    assert (
        not value
    ), "capacitated transit assignment is deprecated, please set use_ccr to false"
    return value

Active Network Components

To come.

Emme Wrappers

Emme components module.

Bases: ConfigItem

Emme-specific parameters.

Properties
Source code in tm2py\config.py
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
@dataclass(frozen=True)
class EmmeConfig(ConfigItem):
    """Emme-specific parameters.

    Properties:
        all_day_scenario_id: scenario ID to use for all day
            (initial imported) scenario with all time period data
        project_path: relative path from run_dir to Emme desktop project (.emp)
        highway_database_path: relative path to highway Emmebank
        active_north_database_path:  relative paths to active mode Emmebank for north bay
        active_south_database_path:  relative paths to active mode Emmebank for south bay
        transit_database_path: relative path to transit Emmebank
        num_processors: the number of processors to use in Emme procedures,
            either as an integer, or value MAX, MAX-N. Typically recommend
            using MAX-1 (on desktop systems) or MAX-2 (on servers with many
            logical processors) to leave capacity for background / other tasks.
    """

    all_day_scenario_id: int
    project_path: pathlib.Path
    highway_database_path: pathlib.Path
    active_north_database_path: pathlib.Path
    active_south_database_path: pathlib.Path
    transit_database_path: pathlib.Path
    num_processors: str = Field(pattern=r"^MAX$|^MAX-\d+$|^\d+$")

Errata

Logging module.

Note the general definition of logging levels as used in tm2py:

highly detailed level information which would rarely be of interest

except for detailed debugging by a developer

LogCache

Bases: LogFormatter

Caches all messages for later recording in on error logfile.

Properties
  • file_path: the absolute file path to write to
Source code in tm2py\logger.py
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
class LogCache(LogFormatter):
    """Caches all messages for later recording in on error logfile.

    Properties:
        - file_path: the absolute file path to write to
    """

    def __init__(self, file_path: str):
        """Constructor for LogCache object.

        Args:
            file_path (str): the absolute file path to write to.
        """
        super().__init__(level=0)
        self.file_path = file_path
        self._msg_cache = []

    def open(self):
        """Initialize log file (remove)."""
        if os.path.exists(self.file_path):
            os.remove(self.file_path)

    def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
        """Format and store text for later recording.

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None
        """
        self._msg_cache.append(
            (level, self._format_text(text, level, indent, timestamp))
        )

    def write_cache(self):
        """Write all cached messages."""
        with open(self.file_path, "w", encoding="utf8") as file:
            for level, text in self._msg_cache:
                file.write(f"{LEVELS_INT_TO_STR[level]:6} {text}\n")
        self.clear()

    def clear(self):
        """Clear message cache."""
        self._msg_cache = []

__init__(file_path)

Constructor for LogCache object.

Parameters:

Name Type Description Default
file_path str

the absolute file path to write to.

required
Source code in tm2py\logger.py
528
529
530
531
532
533
534
535
536
def __init__(self, file_path: str):
    """Constructor for LogCache object.

    Args:
        file_path (str): the absolute file path to write to.
    """
    super().__init__(level=0)
    self.file_path = file_path
    self._msg_cache = []

clear()

Clear message cache.

Source code in tm2py\logger.py
563
564
565
def clear(self):
    """Clear message cache."""
    self._msg_cache = []

log(text, level, indent, timestamp)

Format and store text for later recording.

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None

required
Source code in tm2py\logger.py
543
544
545
546
547
548
549
550
551
552
553
554
def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
    """Format and store text for later recording.

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None
    """
    self._msg_cache.append(
        (level, self._format_text(text, level, indent, timestamp))
    )

open()

Initialize log file (remove).

Source code in tm2py\logger.py
538
539
540
541
def open(self):
    """Initialize log file (remove)."""
    if os.path.exists(self.file_path):
        os.remove(self.file_path)

write_cache()

Write all cached messages.

Source code in tm2py\logger.py
556
557
558
559
560
561
def write_cache(self):
    """Write all cached messages."""
    with open(self.file_path, "w", encoding="utf8") as file:
        for level, text in self._msg_cache:
            file.write(f"{LEVELS_INT_TO_STR[level]:6} {text}\n")
    self.clear()

LogDisplay

Bases: LogFormatter

Format and print log text to console / Notebook.

Properties
  • level: the log level as an int
Source code in tm2py\logger.py
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
class LogDisplay(LogFormatter):
    """Format and print log text to console / Notebook.

    Properties:
        - level: the log level as an int
    """

    def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
        """Format and display text on screen (print).

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None
        """
        if level >= self.level:
            print(self._format_text(text, level, indent, timestamp))

log(text, level, indent, timestamp)

Format and display text on screen (print).

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None

required
Source code in tm2py\logger.py
508
509
510
511
512
513
514
515
516
517
518
def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
    """Format and display text on screen (print).

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None
    """
    if level >= self.level:
        print(self._format_text(text, level, indent, timestamp))

LogFile

Bases: LogFormatter

Format and write log text to file.

Properties
  • level: the log level as an int
  • file_path: the absolute file path to write to
Source code in tm2py\logger.py
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
class LogFile(LogFormatter):
    """Format and write log text to file.

    Properties:
        - level: the log level as an int
        - file_path: the absolute file path to write to
    """

    def __init__(self, level: int, file_path: str):
        """Constructor for LogFile object.

        Args:
            level (int): the log level as an int.
            file_path (str): the absolute file path to write to.
        """
        super().__init__(level)
        self.file_path = file_path
        self.log_file = None

    def open(self):
        """Open the log file for writing."""
        self.log_file = open(self.file_path, "w", encoding="utf8")

    def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
        """Log text to file and display depending upon log level and config.

        Note that log will not write to file until opened with a context.

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None for timestamp
        """
        if level >= self.level and self.log_file is not None:
            text = self._format_text(text, level, indent, timestamp)
            self.log_file.write(f"{text}\n")
            self.log_file.flush()

    def close(self):
        """Close the open log file."""
        self.log_file.close()
        self.log_file = None

__init__(level, file_path)

Constructor for LogFile object.

Parameters:

Name Type Description Default
level int

the log level as an int.

required
file_path str

the absolute file path to write to.

required
Source code in tm2py\logger.py
433
434
435
436
437
438
439
440
441
442
def __init__(self, level: int, file_path: str):
    """Constructor for LogFile object.

    Args:
        level (int): the log level as an int.
        file_path (str): the absolute file path to write to.
    """
    super().__init__(level)
    self.file_path = file_path
    self.log_file = None

close()

Close the open log file.

Source code in tm2py\logger.py
464
465
466
467
def close(self):
    """Close the open log file."""
    self.log_file.close()
    self.log_file = None

log(text, level, indent, timestamp)

Log text to file and display depending upon log level and config.

Note that log will not write to file until opened with a context.

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None for timestamp

required
Source code in tm2py\logger.py
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
    """Log text to file and display depending upon log level and config.

    Note that log will not write to file until opened with a context.

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None for timestamp
    """
    if level >= self.level and self.log_file is not None:
        text = self._format_text(text, level, indent, timestamp)
        self.log_file.write(f"{text}\n")
        self.log_file.flush()

open()

Open the log file for writing.

Source code in tm2py\logger.py
444
445
446
def open(self):
    """Open the log file for writing."""
    self.log_file = open(self.file_path, "w", encoding="utf8")

LogFileLevelOverride

Bases: LogFile

Format and write log text to file.

Properties
  • level: the log level as an int
  • file_path: the absolute file path to write to
  • iter_component_level: TODO
  • controller: TODO
Source code in tm2py\logger.py
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
class LogFileLevelOverride(LogFile):
    """Format and write log text to file.

    Properties:
        - level: the log level as an int
        - file_path: the absolute file path to write to
        - iter_component_level: TODO
        - controller: TODO
    """

    def __init__(self, level, file_path, iter_component_level, controller):
        """Constructor for LogFileLevelOverride object.

        Args:
            level (_type_): TODO
            file_path (_type_): TODO
            iter_component_level (_type_): TODO
            controller (_type_): TODO
        """
        super().__init__(level, file_path)
        self.iter_component_level = iter_component_level
        self.controller = controller

    @property
    def level(self):
        """Current log level with iter_component_level config override."""
        return self.iter_component_level.get(
            self.controller.iter_component, self._level
        )

level property

Current log level with iter_component_level config override.

__init__(level, file_path, iter_component_level, controller)

Constructor for LogFileLevelOverride object.

Parameters:

Name Type Description Default
level _type_

TODO

required
file_path _type_

TODO

required
iter_component_level _type_

TODO

required
controller _type_

TODO

required
Source code in tm2py\logger.py
480
481
482
483
484
485
486
487
488
489
490
491
def __init__(self, level, file_path, iter_component_level, controller):
    """Constructor for LogFileLevelOverride object.

    Args:
        level (_type_): TODO
        file_path (_type_): TODO
        iter_component_level (_type_): TODO
        controller (_type_): TODO
    """
    super().__init__(level, file_path)
    self.iter_component_level = iter_component_level
    self.controller = controller

LogFormatter

Base class for recording text to log.

Properties
Source code in tm2py\logger.py
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
class LogFormatter:
    """Base class for recording text to log.

    Properties:
        indent: current indentation level for the LogFormatter
        level: log filter level (as an int)
    """

    def __init__(self, level: int):
        """Constructor for LogFormatter.

        Args:
            level (int): log filter level (as an int)
        """
        self._level = level
        self.indent = 0

    @property
    def level(self):
        """The current filter level for the LogFormatter."""
        return self._level

    def increase_indent(self, level: int):
        """Increase current indent if the log level is filtered in."""
        if level >= self.level:
            self.indent += 1

    def decrease_indent(self, level: int):
        """Decrease current indent if the log level is filtered in."""
        if level >= self.level:
            self.indent -= 1

    @abstractmethod
    def log(
        self,
        text: str,
        level: int,
        indent: bool,
        timestamp: Union[str, None],
    ):
        """Format and log message text.

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None
        """

    def _format_text(
        self,
        text: str,
        level: int,
        indent: bool,
        timestamp: Union[str, None],
    ):
        """Format text for logging.

        Args:
            text (str): text to format
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts and
                timestamp width
            timestamp (str): formatted datetime as a string or None for timestamp
        """
        if timestamp is None:
            timestamp = "                        " if indent else ""
        if indent:
            num_indents = self.indent
            indent = "  " * max(num_indents, 0)
        else:
            indent = ""
        level_str = "{0:>6}".format(LEVELS_INT_TO_STR[level])
        return f"{timestamp}{level_str}: {indent}{text}"

level property

The current filter level for the LogFormatter.

__init__(level)

Constructor for LogFormatter.

Parameters:

Name Type Description Default
level int

log filter level (as an int)

required
Source code in tm2py\logger.py
357
358
359
360
361
362
363
364
def __init__(self, level: int):
    """Constructor for LogFormatter.

    Args:
        level (int): log filter level (as an int)
    """
    self._level = level
    self.indent = 0

decrease_indent(level)

Decrease current indent if the log level is filtered in.

Source code in tm2py\logger.py
376
377
378
379
def decrease_indent(self, level: int):
    """Decrease current indent if the log level is filtered in."""
    if level >= self.level:
        self.indent -= 1

increase_indent(level)

Increase current indent if the log level is filtered in.

Source code in tm2py\logger.py
371
372
373
374
def increase_indent(self, level: int):
    """Increase current indent if the log level is filtered in."""
    if level >= self.level:
        self.indent += 1

log(text, level, indent, timestamp) abstractmethod

Format and log message text.

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None

required
Source code in tm2py\logger.py
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
@abstractmethod
def log(
    self,
    text: str,
    level: int,
    indent: bool,
    timestamp: Union[str, None],
):
    """Format and log message text.

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None
    """

LogStartEnd

Log the start and end time with optional message.

Used as a Component method decorator. If msg is not provided a default message is generated with the object class and method name.

Example:: @LogStartEnd(“Highway assignment and skims”, level=”STATUS”) def run(self): pass

Properties

text (str): message text to use in the start and end record. level (str): logging level as a string.

Source code in tm2py\logger.py
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
class LogStartEnd:
    """Log the start and end time with optional message.

    Used as a Component method decorator. If msg is not provided a default
    message is generated with the object class and method name.

    Example::
        @LogStartEnd("Highway assignment and skims", level="STATUS")
        def run(self):
            pass

    Properties:
        text (str): message text to use in the start and end record.
        level (str): logging level as a string.
    """

    def __init__(self, text: str = None, level: str = "INFO"):
        """Constructor for LogStartEnd object.

        Args:
            text (str, optional): message text to use in the start and end record.
                Defaults to None.
            level (str, optional): logging level as a string. Defaults to "INFO".
        """
        self.text = text
        self.level = level

    def __call__(self, func):
        """Ability to call logger.

        Args:
            func (_type_): _description_

        Returns:
            _type_: _description_
        """

        @functools.wraps(func)
        def wrapper(obj, *args, **kwargs):
            text = self.text or obj.__class__.__name__ + " " + func.__name__
            with obj.logger.log_start_end(text, self.level):
                value = func(obj, *args, **kwargs)
            return value

        return wrapper

__call__(func)

Ability to call logger.

Parameters:

Name Type Description Default
func _type_

description

required

Returns:

Name Type Description
_type_

description

Source code in tm2py\logger.py
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
def __call__(self, func):
    """Ability to call logger.

    Args:
        func (_type_): _description_

    Returns:
        _type_: _description_
    """

    @functools.wraps(func)
    def wrapper(obj, *args, **kwargs):
        text = self.text or obj.__class__.__name__ + " " + func.__name__
        with obj.logger.log_start_end(text, self.level):
            value = func(obj, *args, **kwargs)
        return value

    return wrapper

__init__(text=None, level='INFO')

Constructor for LogStartEnd object.

Parameters:

Name Type Description Default
text str

message text to use in the start and end record. Defaults to None.

None
level str

logging level as a string. Defaults to “INFO”.

'INFO'
Source code in tm2py\logger.py
587
588
589
590
591
592
593
594
595
596
def __init__(self, text: str = None, level: str = "INFO"):
    """Constructor for LogStartEnd object.

    Args:
        text (str, optional): message text to use in the start and end record.
            Defaults to None.
        level (str, optional): logging level as a string. Defaults to "INFO".
    """
    self.text = text
    self.level = level

Logger

Logging of message text for display, text file, and Emme logbook, as well as notify to slack.

The log message levels can be one of: TRACE, DEBUG, DETAIL, INFO, STATUS, WARN, ERROR, FATAL Which will filter all messages of that severity and higher. See module note on use of descriptive level names.

logger.log(“a message”) with logger.log_start_end(“Running a set of steps”): logger.log(“Message with timestamp”) logger.log(“A debug message”, level=”DEBUG”) # equivalently, use the .debug: logger.debug(“Another debug message”) if logger.debug_enabled: # only generate this report if logging DEBUG logger.log(“A debug report that takes time to produce”, level=”DEBUG”) logger.notify_slack(“A slack message”)

Methods can also be decorated with LogStartEnd (see class for more).

Note that the Logger should only be initialized once per model run. In places where the controller is not available, the last Logger initialized can be obtained from the class method get_logger::

1
logger = Logger.get_logger()
Internal properties
Source code in tm2py\logger.py
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
class Logger:
    """Logging of message text for display, text file, and Emme logbook, as well as notify to slack.

    The log message levels can be one of:
    TRACE, DEBUG, DETAIL, INFO, STATUS, WARN, ERROR, FATAL
    Which will filter all messages of that severity and higher.
    See module note on use of descriptive level names.

    logger.log("a message")
    with logger.log_start_end("Running a set of steps"):
        logger.log("Message with timestamp")
        logger.log("A debug message", level="DEBUG")
        # equivalently, use the .debug:
        logger.debug("Another debug message")
        if logger.debug_enabled:
            # only generate this report if logging DEBUG
            logger.log("A debug report that takes time to produce", level="DEBUG")
        logger.notify_slack("A slack message")

    Methods can also be decorated with LogStartEnd (see class for more).

    Note that the Logger should only be initialized once per model run.
    In places where the controller is not available, the last Logger
    initialized can be obtained from the class method get_logger::

        logger = Logger.get_logger()

    Internal properties:
        _log_cache: the LogCache object
        _log_formatters: list of objects that format text and record, either
            to file, display (print to screen) or cache for log on error
        _use_emme_logbook: whether Emme logbook is enabled
        _slack_notifier: SlackNotifier object for sending messages to slack
    """

    # used to cache last initialized Logger
    _instance = None

    def __new__(cls, controller: RunController):
        """Logger __new__ method override. TODO.

        Args:
            controller (RunController): TODO.
        """
        # pylint: disable=unused-argument
        cls._instance = super(Logger, cls).__new__(cls)
        return cls._instance

    def __init__(self, controller: RunController):
        """Constructor for Logger object.

        Args:
            controller (RunController): Associated RunController instance.
        """
        self.controller = controller
        self._indentation = 0
        log_config = controller.config.logging
        iter_component_level = log_config.iter_component_level or []
        iter_component_level = dict(
            ((i, c), LEVELS_STR_TO_INT[l]) for i, c, l in iter_component_level
        )
        display_logger = LogDisplay(LEVELS_STR_TO_INT[log_config.display_level])
        run_log_formatter = LogFile(
            LEVELS_STR_TO_INT[log_config.run_file_level],
            os.path.join(controller.run_dir, log_config.run_file_path),
        )
        standard_log_formatter = LogFileLevelOverride(
            LEVELS_STR_TO_INT[log_config.log_file_level],
            os.path.join(controller.run_dir, log_config.log_file_path),
            iter_component_level,
            controller,
        )
        self._log_cache = LogCache(
            os.path.join(controller.run_dir, log_config.log_on_error_file_path)
        )
        self._log_formatters = [
            display_logger,
            run_log_formatter,
            standard_log_formatter,
            self._log_cache,
        ]

        self._use_emme_logbook = self.controller.config.logging.use_emme_logbook

        self._slack_notifier = SlackNotifier(self)

        # open log formatters
        for log_formatter in self._log_formatters:
            if hasattr(log_formatter, "open"):
                log_formatter.open()

    def __del__(self):
        """
        Destructor for logger object
        """
        for log_formatter in self._log_formatters:
            if hasattr(log_formatter, "close"):
                log_formatter.close()

    @classmethod
    def get_logger(cls):
        """Return the last initialized logger object."""
        return cls._instance

    def notify_slack(self, text: str):
        """Send message to slack if enabled by config.

        Args:
            text (str): text to send to slack
        """
        if self.controller.config.logging.notify_slack:
            self._slack_notifier.post_message(text)

    def log(self, text: str, level: LogLevel = "INFO", indent: bool = True):
        """Log text to file and display depending upon log level and config.

        Args:
            text (str): text to log
            level (str): logging level
            indent (bool): if true indent text based on the number of open contexts
        """
        timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S) ")
        for log_formatter in self._log_formatters:
            log_formatter.log(text, LEVELS_STR_TO_INT[level], indent, timestamp)
        if self._use_emme_logbook and self.controller.has_emme:
            self.controller.emme_manager.logbook_write(text)

    def trace(self, text: str, indent: bool = False):
        """Log text with level=TRACE.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "TRACE", indent)

    def debug(self, text: str, indent: bool = False):
        """Log text with level=DEBUG.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "DEBUG", indent)

    def detail(self, text: str, indent: bool = False):
        """Log text with level=DETAIL.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "DETAIL", indent)

    def info(self, text: str, indent: bool = False):
        """Log text with level=INFO.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "INFO", indent)

    def status(self, text: str, indent: bool = False):
        """Log text with level=STATUS.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "STATUS", indent)

    def warn(self, text: str, indent: bool = False):
        """Log text with level=WARN.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "WARN", indent)

    def error(self, text: str, indent: bool = False):
        """Log text with level=ERROR.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "ERROR", indent)

    def fatal(self, text: str, indent: bool = False):
        """Log text with level=FATAL.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "FATAL", indent)

    def log_time(self, text: str, level=1, indent=False):
        """Log message with timestamp"""
        timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S)")
        if indent:
            indent = "  " * self._indentation
            self.log(f"{timestamp}: {indent}{text}", level)
        else:
            self.log(f"{timestamp}: {text}", level)

    def _log_start(self, text: str, level: LogLevel = "INFO"):
        """Log message with timestamp and 'Start'.

        Args:
            text (str): message text
            level (str): logging level
        """
        self.log(f"Start {text}", level, indent=True)
        for log_formatter in self._log_formatters:
            log_formatter.increase_indent(LEVELS_STR_TO_INT[level])

    def _log_end(self, text: str, level: LogLevel = "INFO"):
        """Log message with timestamp and 'End'.

        Args:
            text (str): message text
            level (str): logging level
        """
        for log_formatter in self._log_formatters:
            log_formatter.decrease_indent(LEVELS_STR_TO_INT[level])
        self.log(f"End {text}", level, indent=True)

    @_context
    def log_start_end(self, text: str, level: LogLevel = "STATUS"):
        """Use with 'with' statement to log the start and end time with message.

        If using the Emme logbook (config.logging.use_emme_logbook is True), will
        also create a logbook nest in the tree view using logbook_trace.

        Args:
            text (str): message text
            level (str): logging level
        """
        with self._skip_emme_logging():
            self._log_start(text, level)
        if self._use_emme_logbook:
            with self.controller.emme_manager.logbook_trace(text):
                yield
        else:
            yield
        with self._skip_emme_logging():
            self._log_end(text, level)

    def log_dict(self, mapping: dict, level: LogLevel = "DEBUG"):
        """Format dictionary to string and log as text."""
        self.log(pformat(mapping, indent=1, width=120), level)

    @_context
    def _skip_emme_logging(self):
        """Temporary disable Emme logging (if enabled) and restore on exit.

        Intended use is with the log_start_end context and LogStartEnd decorator
        to allow use of the Emme context without double logging of the
        messages in the Emme logbook.
        """
        self._use_emme_logbook, use_emme = False, self._use_emme_logbook
        yield
        self._use_emme_logbook = use_emme

    def clear_msg_cache(self):
        """Clear all log messages from cache."""
        self._log_cache.clear()

    @property
    def debug_enabled(self) -> bool:
        """Returns True if DEBUG is currently filtered for display or print to file.

        Can be used to enable / disable debug logging which may have a performance
        impact.
        """
        debug = LEVELS_STR_TO_INT["DEBUG"]
        for log_formatter in self._log_formatters:
            if log_formatter is not self._log_cache and log_formatter.level <= debug:
                return True
        return False

    @property
    def trace_enabled(self) -> bool:
        """Returns True if TRACE is currently filtered for display or print to file.

        Can be used to enable / disable trace logging which may have a performance
        impact.
        """
        trace = LEVELS_STR_TO_INT["TRACE"]
        for log_formatter in self._log_formatters:
            if log_formatter is not self._log_cache and log_formatter.level <= trace:
                return True
        return False

debug_enabled property

Returns True if DEBUG is currently filtered for display or print to file.

Can be used to enable / disable debug logging which may have a performance impact.

trace_enabled property

Returns True if TRACE is currently filtered for display or print to file.

Can be used to enable / disable trace logging which may have a performance impact.

__del__()

Destructor for logger object

Source code in tm2py\logger.py
142
143
144
145
146
147
148
def __del__(self):
    """
    Destructor for logger object
    """
    for log_formatter in self._log_formatters:
        if hasattr(log_formatter, "close"):
            log_formatter.close()

__init__(controller)

Constructor for Logger object.

Parameters:

Name Type Description Default
controller RunController

Associated RunController instance.

required
Source code in tm2py\logger.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
def __init__(self, controller: RunController):
    """Constructor for Logger object.

    Args:
        controller (RunController): Associated RunController instance.
    """
    self.controller = controller
    self._indentation = 0
    log_config = controller.config.logging
    iter_component_level = log_config.iter_component_level or []
    iter_component_level = dict(
        ((i, c), LEVELS_STR_TO_INT[l]) for i, c, l in iter_component_level
    )
    display_logger = LogDisplay(LEVELS_STR_TO_INT[log_config.display_level])
    run_log_formatter = LogFile(
        LEVELS_STR_TO_INT[log_config.run_file_level],
        os.path.join(controller.run_dir, log_config.run_file_path),
    )
    standard_log_formatter = LogFileLevelOverride(
        LEVELS_STR_TO_INT[log_config.log_file_level],
        os.path.join(controller.run_dir, log_config.log_file_path),
        iter_component_level,
        controller,
    )
    self._log_cache = LogCache(
        os.path.join(controller.run_dir, log_config.log_on_error_file_path)
    )
    self._log_formatters = [
        display_logger,
        run_log_formatter,
        standard_log_formatter,
        self._log_cache,
    ]

    self._use_emme_logbook = self.controller.config.logging.use_emme_logbook

    self._slack_notifier = SlackNotifier(self)

    # open log formatters
    for log_formatter in self._log_formatters:
        if hasattr(log_formatter, "open"):
            log_formatter.open()

__new__(controller)

Logger new method override. TODO.

Parameters:

Name Type Description Default
controller RunController

TODO.

required
Source code in tm2py\logger.py
89
90
91
92
93
94
95
96
97
def __new__(cls, controller: RunController):
    """Logger __new__ method override. TODO.

    Args:
        controller (RunController): TODO.
    """
    # pylint: disable=unused-argument
    cls._instance = super(Logger, cls).__new__(cls)
    return cls._instance

clear_msg_cache()

Clear all log messages from cache.

Source code in tm2py\logger.py
318
319
320
def clear_msg_cache(self):
    """Clear all log messages from cache."""
    self._log_cache.clear()

debug(text, indent=False)

Log text with level=DEBUG.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
187
188
189
190
191
192
193
194
def debug(self, text: str, indent: bool = False):
    """Log text with level=DEBUG.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "DEBUG", indent)

detail(text, indent=False)

Log text with level=DETAIL.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
196
197
198
199
200
201
202
203
def detail(self, text: str, indent: bool = False):
    """Log text with level=DETAIL.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "DETAIL", indent)

error(text, indent=False)

Log text with level=ERROR.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
232
233
234
235
236
237
238
239
def error(self, text: str, indent: bool = False):
    """Log text with level=ERROR.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "ERROR", indent)

fatal(text, indent=False)

Log text with level=FATAL.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
241
242
243
244
245
246
247
248
def fatal(self, text: str, indent: bool = False):
    """Log text with level=FATAL.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "FATAL", indent)

get_logger() classmethod

Return the last initialized logger object.

Source code in tm2py\logger.py
150
151
152
153
@classmethod
def get_logger(cls):
    """Return the last initialized logger object."""
    return cls._instance

info(text, indent=False)

Log text with level=INFO.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
205
206
207
208
209
210
211
212
def info(self, text: str, indent: bool = False):
    """Log text with level=INFO.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "INFO", indent)

log(text, level='INFO', indent=True)

Log text to file and display depending upon log level and config.

Parameters:

Name Type Description Default
text str

text to log

required
level str

logging level

'INFO'
indent bool

if true indent text based on the number of open contexts

True
Source code in tm2py\logger.py
164
165
166
167
168
169
170
171
172
173
174
175
176
def log(self, text: str, level: LogLevel = "INFO", indent: bool = True):
    """Log text to file and display depending upon log level and config.

    Args:
        text (str): text to log
        level (str): logging level
        indent (bool): if true indent text based on the number of open contexts
    """
    timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S) ")
    for log_formatter in self._log_formatters:
        log_formatter.log(text, LEVELS_STR_TO_INT[level], indent, timestamp)
    if self._use_emme_logbook and self.controller.has_emme:
        self.controller.emme_manager.logbook_write(text)

log_dict(mapping, level='DEBUG')

Format dictionary to string and log as text.

Source code in tm2py\logger.py
302
303
304
def log_dict(self, mapping: dict, level: LogLevel = "DEBUG"):
    """Format dictionary to string and log as text."""
    self.log(pformat(mapping, indent=1, width=120), level)

log_start_end(text, level='STATUS')

Use with ‘with’ statement to log the start and end time with message.

If using the Emme logbook (config.logging.use_emme_logbook is True), will also create a logbook nest in the tree view using logbook_trace.

Parameters:

Name Type Description Default
text str

message text

required
level str

logging level

'STATUS'
Source code in tm2py\logger.py
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
@_context
def log_start_end(self, text: str, level: LogLevel = "STATUS"):
    """Use with 'with' statement to log the start and end time with message.

    If using the Emme logbook (config.logging.use_emme_logbook is True), will
    also create a logbook nest in the tree view using logbook_trace.

    Args:
        text (str): message text
        level (str): logging level
    """
    with self._skip_emme_logging():
        self._log_start(text, level)
    if self._use_emme_logbook:
        with self.controller.emme_manager.logbook_trace(text):
            yield
    else:
        yield
    with self._skip_emme_logging():
        self._log_end(text, level)

log_time(text, level=1, indent=False)

Log message with timestamp

Source code in tm2py\logger.py
250
251
252
253
254
255
256
257
def log_time(self, text: str, level=1, indent=False):
    """Log message with timestamp"""
    timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S)")
    if indent:
        indent = "  " * self._indentation
        self.log(f"{timestamp}: {indent}{text}", level)
    else:
        self.log(f"{timestamp}: {text}", level)

notify_slack(text)

Send message to slack if enabled by config.

Parameters:

Name Type Description Default
text str

text to send to slack

required
Source code in tm2py\logger.py
155
156
157
158
159
160
161
162
def notify_slack(self, text: str):
    """Send message to slack if enabled by config.

    Args:
        text (str): text to send to slack
    """
    if self.controller.config.logging.notify_slack:
        self._slack_notifier.post_message(text)

status(text, indent=False)

Log text with level=STATUS.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
214
215
216
217
218
219
220
221
def status(self, text: str, indent: bool = False):
    """Log text with level=STATUS.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "STATUS", indent)

trace(text, indent=False)

Log text with level=TRACE.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
178
179
180
181
182
183
184
185
def trace(self, text: str, indent: bool = False):
    """Log text with level=TRACE.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "TRACE", indent)

warn(text, indent=False)

Log text with level=WARN.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py\logger.py
223
224
225
226
227
228
229
230
def warn(self, text: str, indent: bool = False):
    """Log text with level=WARN.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "WARN", indent)

SlackNotifier

Notify slack of model run status.

The slack channel can be input directly, or is configured via text file found at “M:\Software\Slack\TravelModel_SlackWebhook.txt” (if on MTC server) rr”C:\Software\Slack\TravelModel_SlackWebhook.txt” (if local)

Properties
  • logger (Logger): object for logging of trace messages
  • slack_webhook_url (str): optional, url to use for sending the message to slack
Source code in tm2py\logger.py
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
class SlackNotifier:
    r"""Notify slack of model run status.

    The slack channel can be input directly, or is configured via text file found at
    "M:\Software\Slack\TravelModel_SlackWebhook.txt" (if on MTC server)
    rr"C:\Software\Slack\TravelModel_SlackWebhook.txt" (if local)

    Properties:
        - logger (Logger): object for logging of trace messages
        - slack_webhook_url (str): optional, url to use for sending the message to slack
    """

    def __init__(self, logger: Logger, slack_webhook_url: str = None):
        r"""Constructor for SlackNotifier object.

        Args:
            logger (Logger): logger instance.
            slack_webhook_url (str, optional): . Defaults to None, which is replaced by either:
                - r"M:\Software\Slack\TravelModel_SlackWebhook.txt" (if on MTC server)
                - r"C:\Software\Slack\TravelModel_SlackWebhook.txt" (otherwise)
        """
        self.logger = logger
        if not logger.controller.config.logging.notify_slack:
            self._slack_webhook_url = None
            return
        if slack_webhook_url is None:
            hostname = socket.getfqdn()
            if hostname.endswith(".mtc.ca.gov"):
                slack_webhook_url_file = (
                    r"M:\Software\Slack\TravelModel_SlackWebhook.txt"
                )
                self.logger.log(
                    f"SlackNotifier running on mtc host; using {slack_webhook_url_file}",
                    level="TRACE",
                )
            else:
                slack_webhook_url_file = (
                    r"C:\Software\Slack\TravelModel_SlackWebhook.txt"
                )
                self.logger.log(
                    f"SlackNotifier running on non-mtc host; using {slack_webhook_url_file}",
                    level="TRACE",
                )
            if os.path.isfile(slack_webhook_url_file):
                with open(slack_webhook_url_file, "r", encoding="utf8") as url_file:
                    self._slack_webhook_url = url_file.read()
            else:
                self._slack_webhook_url = None
        else:
            self._slack_webhook_url = slack_webhook_url
        self.logger.log(
            f"SlackNotifier using slack webhook url {self._slack_webhook_url}",
            level="TRACE",
        )

    def post_message(self, text):
        """Posts text to the slack channel via the webhook if slack_webhook_url is found.

        Args:
           text: text message to send to slack
        """
        if self._slack_webhook_url is None:
            return
        headers = {"Content-type": "application/json"}
        data = {"text": text}
        self.logger.log(f"Sending message to slack: {text}", level="TRACE")
        response = requests.post(self._slack_webhook_url, headers=headers, json=data)
        self.logger.log(f"Receiving response: {response}", level="TRACE")

__init__(logger, slack_webhook_url=None)

Constructor for SlackNotifier object.

Parameters:

Name Type Description Default
logger Logger

logger instance.

required
slack_webhook_url str

. Defaults to None, which is replaced by either: - r”M:\Software\Slack\TravelModel_SlackWebhook.txt” (if on MTC server) - r”C:\Software\Slack\TravelModel_SlackWebhook.txt” (otherwise)

None
Source code in tm2py\logger.py
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
def __init__(self, logger: Logger, slack_webhook_url: str = None):
    r"""Constructor for SlackNotifier object.

    Args:
        logger (Logger): logger instance.
        slack_webhook_url (str, optional): . Defaults to None, which is replaced by either:
            - r"M:\Software\Slack\TravelModel_SlackWebhook.txt" (if on MTC server)
            - r"C:\Software\Slack\TravelModel_SlackWebhook.txt" (otherwise)
    """
    self.logger = logger
    if not logger.controller.config.logging.notify_slack:
        self._slack_webhook_url = None
        return
    if slack_webhook_url is None:
        hostname = socket.getfqdn()
        if hostname.endswith(".mtc.ca.gov"):
            slack_webhook_url_file = (
                r"M:\Software\Slack\TravelModel_SlackWebhook.txt"
            )
            self.logger.log(
                f"SlackNotifier running on mtc host; using {slack_webhook_url_file}",
                level="TRACE",
            )
        else:
            slack_webhook_url_file = (
                r"C:\Software\Slack\TravelModel_SlackWebhook.txt"
            )
            self.logger.log(
                f"SlackNotifier running on non-mtc host; using {slack_webhook_url_file}",
                level="TRACE",
            )
        if os.path.isfile(slack_webhook_url_file):
            with open(slack_webhook_url_file, "r", encoding="utf8") as url_file:
                self._slack_webhook_url = url_file.read()
        else:
            self._slack_webhook_url = None
    else:
        self._slack_webhook_url = slack_webhook_url
    self.logger.log(
        f"SlackNotifier using slack webhook url {self._slack_webhook_url}",
        level="TRACE",
    )

post_message(text)

Posts text to the slack channel via the webhook if slack_webhook_url is found.

Parameters:

Name Type Description Default
text

text message to send to slack

required
Source code in tm2py\logger.py
673
674
675
676
677
678
679
680
681
682
683
684
685
def post_message(self, text):
    """Posts text to the slack channel via the webhook if slack_webhook_url is found.

    Args:
       text: text message to send to slack
    """
    if self._slack_webhook_url is None:
        return
    headers = {"Content-type": "application/json"}
    data = {"text": text}
    self.logger.log(f"Sending message to slack: {text}", level="TRACE")
    response = requests.post(self._slack_webhook_url, headers=headers, json=data)
    self.logger.log(f"Receiving response: {response}", level="TRACE")

Tools module for common resources / shared code and “utilities” in the tm2py package.

SpatialGridIndex

Simple spatial grid hash for fast (enough) nearest neighbor / within distance searches of points.

Source code in tm2py\tools.py
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
class SpatialGridIndex:
    """
    Simple spatial grid hash for fast (enough) nearest neighbor / within distance searches of points.
    """

    def __init__(self, size: float):
        """
        Args:
            size: the size of the grid to use for the index, relative to the point coordinates
        """
        self._size = float(size)
        self._grid_index = _defaultdict(lambda: [])

    def insert(self, obj: Any, x: float, y: float):
        """
        Add new obj with coordinates x and y.
        Args:
           obj: any python object, will be returned from search methods "nearest" and "within_distance"
           x: x-coordinate
           y: y-coordinate
        """
        grid_x, grid_y = round(x / self._size), round(y / self._size)
        self._grid_index[(grid_x, grid_y)].append((obj, x, y))

    def nearest(self, x: float, y: float):
        """Return the closest object in index to the specified coordinates
        Args:
            x: x-coordinate
            y: y-coordinate
        """
        if len(self._grid_index) == 0:
            raise Exception("SpatialGrid is empty.")

        def calc_dist(x1, y1, x2, y2):
            return sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)

        grid_x, grid_y = round(x / self._size), round(y / self._size)
        step = 0
        done = False
        found_items = []
        while not done:
            search_offsets = list(range(-1 * step, step + 1))
            search_offsets = _product(search_offsets, search_offsets)
            items = []
            for x_offset, y_offset in search_offsets:
                if abs(x_offset) != step and abs(y_offset) != step:
                    continue  # already checked this grid tile
                items.extend(self._grid_index[grid_x + x_offset, grid_y + y_offset])
            if found_items:
                done = True
            found_items.extend(items)
            step += 1
        min_dist = 1e400
        closest = None
        for i, xi, yi in found_items:
            dist = calc_dist(x, y, xi, yi)
            if dist < min_dist:
                closest = i
                min_dist = dist
        return closest

    def within_distance(self, x: float, y: float, distance: float):
        """Return all objects in index within the distance of the specified coordinates
        Args:
            x: x-coordinate
            y: y-coordinate
            distance: distance to search in point coordinate units
        """

        def point_in_circle(x1, y1, x2, y2, dist):
            return sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2) <= dist

        return self._get_items_on_grid(x, y, distance, point_in_circle)

    def within_square(self, x: float, y: float, distance: float):
        """Return all objects in index within a square box distance of the specified coordinates.
        Args:
            x: x-coordinate
            y: y-coordinate
            distance: distance to search in point coordinate units
        """

        def point_in_box(x1, y1, x2, y2, dist):
            return abs(x1 - x2) <= dist and abs(y1 - y2) <= dist

        return self._get_items_on_grid(x, y, distance, point_in_box)

    def _get_items_on_grid(self, x, y, distance, filter_func):
        grid_x, grid_y = round(x / self._size), round(y / self._size)
        num_search_grids = ceil(distance / self._size)
        search_offsets = list(range(-1 * num_search_grids, num_search_grids + 1))
        search_offsets = list(_product(search_offsets, search_offsets))
        items = []
        for x_offset, y_offset in search_offsets:
            items.extend(self._grid_index[grid_x + x_offset, grid_y + y_offset])
        filtered_items = [
            i for i, xi, yi in items if filter_func(x, y, xi, yi, distance)
        ]
        return filtered_items

__init__(size)

Parameters:

Name Type Description Default
size float

the size of the grid to use for the index, relative to the point coordinates

required
Source code in tm2py\tools.py
327
328
329
330
331
332
333
def __init__(self, size: float):
    """
    Args:
        size: the size of the grid to use for the index, relative to the point coordinates
    """
    self._size = float(size)
    self._grid_index = _defaultdict(lambda: [])

insert(obj, x, y)

Add new obj with coordinates x and y. Args: obj: any python object, will be returned from search methods “nearest” and “within_distance” x: x-coordinate y: y-coordinate

Source code in tm2py\tools.py
335
336
337
338
339
340
341
342
343
344
def insert(self, obj: Any, x: float, y: float):
    """
    Add new obj with coordinates x and y.
    Args:
       obj: any python object, will be returned from search methods "nearest" and "within_distance"
       x: x-coordinate
       y: y-coordinate
    """
    grid_x, grid_y = round(x / self._size), round(y / self._size)
    self._grid_index[(grid_x, grid_y)].append((obj, x, y))

nearest(x, y)

Return the closest object in index to the specified coordinates Args: x: x-coordinate y: y-coordinate

Source code in tm2py\tools.py
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
def nearest(self, x: float, y: float):
    """Return the closest object in index to the specified coordinates
    Args:
        x: x-coordinate
        y: y-coordinate
    """
    if len(self._grid_index) == 0:
        raise Exception("SpatialGrid is empty.")

    def calc_dist(x1, y1, x2, y2):
        return sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)

    grid_x, grid_y = round(x / self._size), round(y / self._size)
    step = 0
    done = False
    found_items = []
    while not done:
        search_offsets = list(range(-1 * step, step + 1))
        search_offsets = _product(search_offsets, search_offsets)
        items = []
        for x_offset, y_offset in search_offsets:
            if abs(x_offset) != step and abs(y_offset) != step:
                continue  # already checked this grid tile
            items.extend(self._grid_index[grid_x + x_offset, grid_y + y_offset])
        if found_items:
            done = True
        found_items.extend(items)
        step += 1
    min_dist = 1e400
    closest = None
    for i, xi, yi in found_items:
        dist = calc_dist(x, y, xi, yi)
        if dist < min_dist:
            closest = i
            min_dist = dist
    return closest

within_distance(x, y, distance)

Return all objects in index within the distance of the specified coordinates Args: x: x-coordinate y: y-coordinate distance: distance to search in point coordinate units

Source code in tm2py\tools.py
383
384
385
386
387
388
389
390
391
392
393
394
def within_distance(self, x: float, y: float, distance: float):
    """Return all objects in index within the distance of the specified coordinates
    Args:
        x: x-coordinate
        y: y-coordinate
        distance: distance to search in point coordinate units
    """

    def point_in_circle(x1, y1, x2, y2, dist):
        return sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2) <= dist

    return self._get_items_on_grid(x, y, distance, point_in_circle)

within_square(x, y, distance)

Return all objects in index within a square box distance of the specified coordinates. Args: x: x-coordinate y: y-coordinate distance: distance to search in point coordinate units

Source code in tm2py\tools.py
396
397
398
399
400
401
402
403
404
405
406
407
def within_square(self, x: float, y: float, distance: float):
    """Return all objects in index within a square box distance of the specified coordinates.
    Args:
        x: x-coordinate
        y: y-coordinate
        distance: distance to search in point coordinate units
    """

    def point_in_box(x1, y1, x2, y2, dist):
        return abs(x1 - x2) <= dist and abs(y1 - y2) <= dist

    return self._get_items_on_grid(x, y, distance, point_in_box)

download_unzip(url, out_base_dir, target_dir, zip_filename='test_data.zip')

Download and unzips a file from a URL. The zip file is removed after extraction.

Parameters:

Name Type Description Default
url str

Full URL do download from.

required
out_base_dir str

Where to unzip the file.

required
target_dir str

What to unzip the file as.

required
zip_filename str

Filename to store zip file as. Defaults to “test_data.zip”.

'test_data.zip'
Source code in tm2py\tools.py
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
def download_unzip(
    url: str, out_base_dir: str, target_dir: str, zip_filename: str = "test_data.zip"
) -> None:
    """Download and unzips a file from a URL. The zip file is removed after extraction.

    Args:
        url (str): Full URL do download from.
        out_base_dir (str): Where to unzip the file.
        target_dir (str): What to unzip the file as.
        zip_filename (str, optional): Filename to store zip file as. Defaults to "test_data.zip".
    """
    target_zip = os.path.join(out_base_dir, zip_filename)
    if not os.path.isdir(out_base_dir):
        os.makedirs(out_base_dir)
    urllib.request.Request(url)
    _download(url, target_zip)
    _unzip(target_zip, target_dir)
    os.remove(target_zip)

emme_context()

Return True if Emme is installed.

Source code in tm2py\tools.py
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
def emme_context():
    """Return True if Emme is installed."""
    import pkg_resources

    _inro_package = "inro-emme"
    _avail_packages = [pkg.key for pkg in pkg_resources.working_set]

    if _inro_package not in _avail_packages:
        print("Inro not found. Skipping inro setup.")
        mocked_inro_context()
        return False
    else:
        import inro

        if "MagicMock" in str(type(inro)):
            return False

    return True

interpolate_dfs(df, ref_points, target_point, ref_col_name='ends_with')

Interpolate for the model year assuming linear growth between the reference years.

Parameters:

Name Type Description Default
df DataFrame

dataframe to interpolate on, with ref points contained in column name per ref_col_name.

required
ref_points Collection[Union[float, int]]

reference years to interpolate between

required
target_point Union[float, int]

target year

required
ref_col_name str

column name to use for reference years. Defaults to “ends_with”.

'ends_with'
Source code in tm2py\tools.py
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
def interpolate_dfs(
    df: pd.DataFrame,
    ref_points: Collection[Union[float, int]],
    target_point: Union[float, int],
    ref_col_name: str = "ends_with",
) -> pd.DataFrame:
    """Interpolate for the model year assuming linear growth between the reference years.

    Args:
        df (pd.DataFrame): dataframe to interpolate on, with ref points contained in column
            name per ref_col_name.
        ref_points (Collection[Union[float,int]]): reference years to interpolate between
        target_point (Union[float,int]): target year
        ref_col_name (str, optional): column name to use for reference years.
            Defaults to "ends_with".
    """
    if ref_col_name not in ["ends_with"]:
        raise NotImplementedError(f"{ref_col_name} not implemented")
    if len(ref_points) != 2:
        raise NotImplementedError(f"{ref_points} reference points not implemented")

    _ref_points = list(map(int, ref_points))
    _target_point = int(target_point)

    _ref_points.sort()
    _start_point, _end_point = _ref_points
    if not _start_point <= _target_point <= _end_point:
        raise ValueError(
            f"Target Point: {_target_point} not within range of \
            Reference Points: {_ref_points}"
        )

    _start_ref_df = df[[c for c in df.columns if c.endswith(f"{_start_point}")]].copy()
    _end_ref_df = df[[c for c in df.columns if c.endswith(f"{_end_point}")]].copy()

    if len(_start_ref_df.columns) != len(_end_ref_df.columns):
        raise ValueError(
            f"{_start_point} and {_end_point} have different number of columns:\n\
           {_start_point} Columns: {_start_ref_df.columns}\n\
           {_end_point} Columns: {_end_ref_df.columns}\
        "
        )

    _start_ref_df.rename(
        columns=lambda x: x.replace(f"_{_start_point}", ""), inplace=True
    )
    _end_ref_df.rename(columns=lambda x: x.replace(f"_{_end_point}", ""), inplace=True)
    _scale_factor = float(target_point - _start_point) / (_end_point - _start_point)

    interpolated_df = (1 - _scale_factor) * _start_ref_df + _scale_factor * _end_ref_df

    return interpolated_df

mocked_inro_context()

Mocking of modules which need to be mocked for tests.

Source code in tm2py\tools.py
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
def mocked_inro_context():
    """Mocking of modules which need to be mocked for tests."""
    import sys
    from unittest.mock import MagicMock

    sys.modules["inro.emme.database.emmebank"] = MagicMock()
    sys.modules["inro.emme.database.emmebank.path"] = MagicMock(return_value=".")
    sys.modules["inro.emme.network.link"] = MagicMock()
    sys.modules["inro.emme.network.mode"] = MagicMock()
    sys.modules["inro.emme.network.node"] = MagicMock()
    sys.modules["inro.emme.network"] = MagicMock()
    sys.modules["inro.emme.database.scenario"] = MagicMock()
    sys.modules["inro.emme.database.matrix"] = MagicMock()
    sys.modules["inro.emme.network.node"] = MagicMock()
    sys.modules["inro.emme.desktop.app"] = MagicMock()
    sys.modules["inro"] = MagicMock()
    sys.modules["inro.modeller"] = MagicMock()
    sys.modules["tm2py.emme.manager.EmmeManager.project"] = MagicMock()
    sys.modules["tm2py.emme.manager.EmmeManager.emmebank"] = MagicMock()
    sys.modules["tm2py.emme.manager"] = MagicMock()

parse_num_processors(value)

Parse input value string “MAX-X” to number of available processors.

Used with Emme procedures (traffic and transit assignments, matrix caculator, etc.) Does not raise any specific errors.

Parameters:

Name Type Description Default
value [str, int, float]

int, float or string; string value can be “X” or “MAX-X”

required
Source code in tm2py\tools.py
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
def parse_num_processors(value: [str, int, float]):
    """Parse input value string "MAX-X" to number of available processors.

    Used with Emme procedures (traffic and transit assignments, matrix
    caculator, etc.) Does not raise any specific errors.

    Args:
        value: int, float or string; string value can be "X" or "MAX-X"
    """
    max_processors = multiprocessing.cpu_count()
    if isinstance(value, str):
        value = value.upper()
        if value == "MAX":
            return max_processors
        if re.match("^[0-9]+$", value):
            return int(value)
        result = re.split(r"^MAX[\s]*-[\s]*", value)
        if len(result) == 2:
            return max(max_processors - int(result[1]), 1)
    else:
        return int(value)
    return value

run_process(commands, name='')

Run system level commands as blocking process and log output and error messages.

Parameters:

Name Type Description Default
commands Collection[str]

list of one or more commands to execute

required
name str

optional name to use for the temp bat file

''
Source code in tm2py\tools.py
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
def run_process(commands: Collection[str], name: str = ""):
    """Run system level commands as blocking process and log output and error messages.

    Args:
        commands: list of one or more commands to execute
        name: optional name to use for the temp bat file
    """
    # when merged with develop_logging branch can use get_logger
    # logger = Logger.get_logger
    logger = None
    with temp_file("w", prefix=name, suffix=".bat") as (bat_file, bat_file_path):
        bat_file.write("\n".join(commands))
        bat_file.close()
        if logger:
            # temporary file to capture output error messages generated by Java
            # Note: temp file created in the current working directory
            with temp_file(mode="w+", suffix="_error.log") as (err_file, _):
                try:
                    output = _subprocess.check_output(
                        bat_file_path, stderr=err_file, shell=True
                    )
                    logger.log(output.decode("utf-8"))
                except _subprocess.CalledProcessError as error:
                    logger.log(error.output)
                    raise
                finally:
                    err_file.seek(0)
                    error_msg = err_file.read()
                    if error_msg:
                        logger.log(error_msg)
        else:
            _subprocess.check_call(bat_file_path, shell=True)

temp_file(mode='w+', prefix='', suffix='')

Temp file wrapper to return open file handle and named path.

A named temporary file (using mkstemp) with specified prefix and suffix is created and opened with the specified mode. The file handle and path are returned. The file is closed and deleted on exit.

Parameters:

Name Type Description Default
mode str

mode to open file, [rw][+][b]

'w+'
prefix str

optional text to start temp file name

''
suffix str

optional text to end temp file name

''
Source code in tm2py\tools.py
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
@_context
def temp_file(mode: str = "w+", prefix: str = "", suffix: str = ""):
    """Temp file wrapper to return open file handle and named path.

    A named temporary file (using mkstemp) with specified prefix and
    suffix is created and opened with the specified mode. The file
    handle and path are returned. The file is closed and deleted on exit.

    Args:
        mode: mode to open file, [rw][+][b]
        prefix: optional text to start temp file name
        suffix: optional text to end temp file name
    """
    file_ref, file_path = tempfile.mkstemp(prefix=prefix, suffix=suffix)
    file = os.fdopen(file_ref, mode=mode)
    try:
        yield file, file_path
    finally:
        if not file.closed:
            file.close()
        os.remove(file_path)

zonal_csv_to_matrices(csv_file, i_column='ORIG', j_column='DEST', value_columns=['VALUE'], default_value=0.0, fill_zones=False, max_zone=None, delimiter=',')

Read a CSV file with zonal data and into dataframes.

Input CSV file should have a header row specifying the I, J, and Value column names.

Parameters:

Name Type Description Default
csv_file str

description

required
i_column str

Name of j zone column. Defaults to “ORIG”.

'ORIG'
j_column str

Name of i zone column. Defaults to “DEST”.

'DEST'
value_columns str

List of columns to turn into matrices. Defaults to [“VALUE”].

['VALUE']
default_value float

Value to fill empty cells with. Defaults to 0.0.

0.0
fill_zones bool

If true, will fill zones without values to max zone with default value. Defaults to False.

False
max_zone int

If fill_zones is True, used to determine matrix size. Defaults to max(I, J).

None
delimiter str

Input file delimeter. Defaults to “,”.

','

Returns:

Name Type Description
dict Mapping[str, DataFrame]

Dictionary of Pandas dataframes with matrix names as keys.

Source code in tm2py\tools.py
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
def zonal_csv_to_matrices(
    csv_file: str,
    i_column: str = "ORIG",
    j_column: str = "DEST",
    value_columns: str = ["VALUE"],
    default_value: float = 0.0,
    fill_zones: bool = False,
    max_zone: int = None,
    delimiter: str = ",",
) -> Mapping[str, pd.DataFrame]:
    """Read a CSV file with zonal data and into dataframes.

    Input CSV file should have a header row specifying the I, J, and Value column names.

    Args:
        csv_file (str): _description_
        i_column (str, optional): Name of j zone column. Defaults to "ORIG".
        j_column (str, optional): Name of i zone column. Defaults to "DEST".
        value_columns (str, optional): List of columns to turn into matrices.
            Defaults to ["VALUE"].
        default_value (float, optional): Value to fill empty cells with. Defaults to 0.0.
        fill_zones (bool, optional): If true, will fill zones without values to max zone with
            default value. Defaults to False.
        max_zone (int, optional): If fill_zones is True, used to determine matrix size.
            Defaults to max(I, J).
        delimiter (str, optional): Input file delimeter. Defaults to ",".

    Returns:
        dict: Dictionary of Pandas dataframes with matrix names as keys.
    """
    # TODO Create a test
    _df = pd.read_csv(csv_file, delimiter=delimiter)
    _df_idx = _df.set_index([i_column, j_column])

    _dfs_dict = {v: _df_idx[v] for v in value_columns}
    if not fill_zones:
        return _dfs_dict

    if max_zone is None:
        max_zone = _df[[i_column, j_column]].max().max()

    _zone_list = list(range(1, max_zone + 1))
    for v, _df in _dfs_dict.items():
        _df[v].reindex(index=_zone_list, columns=_zone_list, fill_value=default_value)
    return _dfs_dict

Download and unzip examples for tm2py, used in tests.

get_example(example_name=_DEFAULT_EXAMPLE_NAME, example_subdir=_DEFAULT_EXAMPLE_SUBDIR, root_dir=_ROOT_DIR, retrieval_url=_DEFAULT_EXAMPLE_URL)

Returns example directory; downloads if necessary from retrieval URL.

Parameters:

Name Type Description Default
example_name str

Used to retrieve sub-folder or create it if doesn’t exist. Defaults to _DEFAULT_EXAMPLE_NAME.

_DEFAULT_EXAMPLE_NAME
example_subdir str

Where to find examples within root dir. Defaults to _DEFAULT_EXAMPLE_SUBDIR.

_DEFAULT_EXAMPLE_SUBDIR
root_dir str

Root dir of project. Defaults to _ROOT_DIR.

_ROOT_DIR
retrieval_url str

URL to retrieve example data zip from. Defaults to _DEFAULT_EXAMPLE_URL.

_DEFAULT_EXAMPLE_URL

Raises:

Type Description
FileNotFoundError

If can’t find the files after trying to download it.

Returns:

Name Type Description
str str

Path to example data.

Source code in tm2py\examples.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
def get_example(
    example_name: str = _DEFAULT_EXAMPLE_NAME,
    example_subdir: str = _DEFAULT_EXAMPLE_SUBDIR,
    root_dir: str = _ROOT_DIR,
    retrieval_url: str = _DEFAULT_EXAMPLE_URL,
) -> str:
    """Returns example directory; downloads if necessary from retrieval URL.

    Args:
        example_name (str, optional): Used to retrieve sub-folder or create it if doesn't exist.
            Defaults to _DEFAULT_EXAMPLE_NAME.
        example_subdir (str, optional): Where to find examples within root dir. Defaults
            to _DEFAULT_EXAMPLE_SUBDIR.
        root_dir (str, optional): Root dir of project. Defaults to _ROOT_DIR.
        retrieval_url (str, optional): URL to retrieve example data zip from. Defaults
            to _DEFAULT_EXAMPLE_URL.

    Raises:
        FileNotFoundError: If can't find the files after trying to download it.

    Returns:
        str: Path to example data.
    """
    _example_dir = os.path.join(root_dir, example_subdir)
    _this_example_dir = os.path.join(_example_dir, example_name)
    if os.path.isdir(_this_example_dir):
        return _this_example_dir

    download_unzip(retrieval_url, _example_dir, _this_example_dir)
    if not os.path.isdir(_this_example_dir):
        raise FileNotFoundError(f"example {_this_example_dir} not found")

    return _this_example_dir