Skip to content

API Documentation

Controller

tm2py.controller

RunController - model operation controller.

Main interface to start a TM2PY model run. Provide one or more configuration files in .toml format (by convention a scenario.toml and a model.toml)

Typical usage example: from tm2py.controller import RunController controller = RunController( [“scenario.toml”, “model.toml”]) controller.run()

Or from the command-line: python <path>/tm2py/tm2py/controller.py –s scenario.toml –m model.toml

RunController

Main operational interface for model runs.

Provide one or more config files in TOML (*.toml) format, and a run directory. If the run directory is not provided the root directory of the first config_file is used.

Properties
Internal properties
Source code in tm2py/controller.py
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
class RunController:
    """Main operational interface for model runs.

    Provide one or more config files in TOML (*.toml) format, and a run directory.
    If the run directory is not provided the root directory of the first config_file is used.

    Properties:
        config: root Configuration object
        logger: logger object
        top_sheet: placeholder for top sheet functionality (not implemented yet)
        trace: placeholder for trace functionality (not implemented yet)
        run_dir: root run directory for the model run
        iteration: current running (or last started) iteration
        component: current running (or last started) Component object
        emme_manager: EmmeManager object for centralized Emme-related (highway and
            transit assignments and skims) utilities.
        complete_components: list of components which have completed, tuple of
            (iteration, name, Component object)

    Internal properties:
        _emme_manager: EmmeManager object, cached on first access
        _iteration: current iteration
        _component: current running / last run Component
        _component_name: name of the current / last run component
        _queued_components: list of iteration, name, Component
    """

    def __init__(
        self,
        config_file: Union[Collection[Union[str, Path]], str, Path] = None,
        run_dir: Union[Path, str] = None,
        run_components: Collection[str] = component_cls_map.keys(),
    ):
        """Constructor for RunController class.

        Args:
            config_file: Single or list of config file locations as strings or Path objects.
                Defaults to None.
            run_dir: Model run directory as a Path object or string. If not provided, defaults
                to the directory of the first config_file.
            run_components: List of component names to run. Defaults to all components.
        """
        if run_dir is None:
            run_dir = Path(os.path.abspath(os.path.dirname(config_file[0])))

        self._run_dir = Path(run_dir)

        self.config = Configuration.load_toml(config_file)
        self.has_emme: bool = emme_context()
        # NOTE: Logger opens log file on __enter__ (in run), not ready for logging yet
        # Logger uses self.config.logging
        self.logger = Logger(self)
        self.top_sheet = None
        self.trace = None
        self.completed_components = []

        # mapping from defined names referenced in config to Component objects
        self._component_map = {
            k: v(self) for k, v in component_cls_map.items() if k in run_components
        }
        self._validated_components = set()
        self._emme_manager = None
        self._num_processors = None
        self._iteration = None
        self._component = None
        self._component_name = None
        self._queued_components = deque()

        self._queue_components(run_components=run_components)

    def __repr__(self):
        """Legible representation."""
        _str = f"""RunController
            Run Directory: {self.run_dir}
            Iteration: {self.iteration} of {self.run_iterations}
            Component: {self.component_name}
            Completed: {self.completed_components}
            Queued: {self._queued_components}"""
        return _str

    @property
    def run_dir(self) -> Path:
        """The root run directory of the model run."""
        return self._run_dir

    @property
    def run_iterations(self) -> List[int]:
        """List of iterations for this model run."""
        return range(
            max(1, self.config.run.start_iteration), self.config.run.end_iteration + 1
        )

    @property
    def time_period_names(self) -> List[str]:
        """Return input time_period name or names and return list of time_period names.

        Implemented here for easy access for all components.

        Returns: list of uppercased string names of time periods
        """
        return [time.name.upper() for time in self.config.time_periods]

    @property
    def num_processors(self) -> int:
        """Number of processors available for parallel processing."""
        if self._num_processors is None:
            self._num_processors = self._calculate_num_processors(
                multiprocessing.cpu_count()
            )

        return self._num_processors

    @property
    def iteration(self) -> int:
        """Current iteration of model run."""
        return self._iteration

    @property
    def component_name(self) -> str:
        """Name of current component of model run."""
        return self._component_name

    @property
    def iter_component(self) -> Tuple[int, str]:
        """Tuple of the current iteration and component name."""
        return self._iteration, self._component_name

    def component(self) -> Component:
        """Current component of model."""
        return self._component

    @property
    def emme_manager(self) -> EmmeManager:
        """Cached Emme Manager object."""
        if self._emme_manager is None:
            if self.has_emme:
                self._init_emme_manager()
            else:
                self.logger.log("Emme not found, skipping Emme-related components")
                # TODO: All of the Emme-related components need to be handled "in place" rather
                # than skippping using a Mock
                from unittest.mock import MagicMock

                self._emme_manager = MagicMock()
        return self._emme_manager

    def _init_emme_manager(self):
        """Initialize Emme manager, start Emme desktop App, and initialize Modeller."""
        self._emme_manager = EmmeManager()
        project = self._emme_manager.project(
            os.path.join(self.run_dir, self.config.emme.project_path)
        )
        # Initialize Modeller to use Emme assignment tools and other APIs
        self._emme_manager.modeller(project)

    def get_abs_path(self, rel_path: Union[Path, str]) -> Path:
        """Get the absolute path from the root run directory given a relative path."""
        if not isinstance(rel_path, Path):
            rel_path = Path(rel_path)
        return Path(os.path.join(self.run_dir, rel_path))

    def run(self):
        """Main interface to run model.

        Iterates through the self._queued_components and runs them.
        """
        self._iteration = None
        while self._queued_components:
            self.run_next()

    def run_next(self):
        """Run next component in the queue."""
        if not self._queued_components:
            raise ValueError("No components in queue")
        iteration, name, component = self._queued_components.popleft()
        if self._iteration != iteration:
            self.logger.log(f"Start iteration {iteration}")
        self._iteration = iteration
        self._component = component
        component.run()
        self.completed_components.append((iteration, name, component))

    def _queue_components(self, run_components: Collection[str] = None):
        """Add components per iteration to queue according to input Config.

        Args:
            run_components: if provided, only run these components
        """
        try:
            assert not self._queued_components
        except AssertionError:
            "Components already queued, returning without re-queuing."
            return

        print("RUN COMPOMENTS", run_components)
        _initial_components = self.config.run.initial_components
        _global_iter_components = self.config.run.global_iteration_components
        _final_components = self.config.run.final_components

        if run_components is not None:
            _initial_components = [
                c for c in _initial_components if c in run_components
            ]
            _global_iter_components = [
                c for c in _global_iter_components if c in run_components
            ]
            _final_components = [c for c in _final_components if c in run_components]

        if self.config.run.start_iteration == 0:
            for _c_name in _initial_components:
                self._add_component_to_queue(0, _c_name)

        # Queue components which are run for each iteration

        _iteration_x_components = itertools.product(
            self.run_iterations, _global_iter_components
        )

        for _iteration, _c_name in _iteration_x_components:
            self._add_component_to_queue(_iteration, _c_name)

        # Queue components which are run after final iteration
        _finalizer_iteration = self.config.run.end_iteration + 1

        for c_name in _final_components:
            self._add_component_to_queue(_finalizer_iteration, _c_name)

        # If start_component specified, remove things before its first occurance
        if self.config.run.start_component:

            _queued_c_names = [c.name for c in self._queued_components]
            if self.config.run.start_component not in _queued_c_names:
                raise ValueError(
                    f"Start component {self.config.run.start_component} not found in queued \
                    components {_queued_c_names}"
                )
            _start_c_index = _queued_c_names.index(self.config.run.start_component)
            self._queued_components = self._queued_components[_start_c_index:]

    def _add_component_to_queue(self, iteration: int, component_name: str):
        """Add component to queue (self._queued_components), first validating its inputs.

        Args:
            iteration (int): iteration to add component to.
            component_name (Component): Component to add to queue.
        """
        _component = self._component_map[component_name]
        if component_name not in self._validated_components:
            _component.validate_inputs()
            self._validated_components.add(component_name)
        self._queued_components.append((iteration, component_name, _component))

    def _calculate_num_processors(self, cpu_processors: int):
        """Convert input value (parse if string) to number of processors.

        nt or string as 'MAX-X'

        Args:
            cpu_processors (int): number of processors on current CPU
        Returns:
            An int of the number of processors to use

        Raises:
            Exception: Input value exceeds number of available processors
            Exception: Input value less than 1 processors
        """
        _config_value = self.config.emme.num_processors
        num_processors = 0
        if isinstance(_config_value, str):
            if _config_value.upper() == "MAX":
                num_processors = cpu_processors
            elif re.match("^[0-9]+$", _config_value):
                num_processors = int(_config_value)
            else:
                _processor_range = re.split(r"^MAX[/s]*-[/s]*", _config_value.upper())
                num_processors = max(cpu_processors - int(_processor_range[1]), 1)
        else:
            num_processors = int(_config_value)

        num_processors = min(cpu_processors, num_processors)
        num_processors = max(1, num_processors)

        return num_processors
__init__(config_file=None, run_dir=None, run_components=component_cls_map.keys())

Constructor for RunController class.

Parameters:

Name Type Description Default
config_file Union[Collection[Union[str, Path]], str, Path]

Single or list of config file locations as strings or Path objects. Defaults to None.

None
run_dir Union[Path, str]

Model run directory as a Path object or string. If not provided, defaults to the directory of the first config_file.

None
run_components Collection[str]

List of component names to run. Defaults to all components.

component_cls_map.keys()
Source code in tm2py/controller.py
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
def __init__(
    self,
    config_file: Union[Collection[Union[str, Path]], str, Path] = None,
    run_dir: Union[Path, str] = None,
    run_components: Collection[str] = component_cls_map.keys(),
):
    """Constructor for RunController class.

    Args:
        config_file: Single or list of config file locations as strings or Path objects.
            Defaults to None.
        run_dir: Model run directory as a Path object or string. If not provided, defaults
            to the directory of the first config_file.
        run_components: List of component names to run. Defaults to all components.
    """
    if run_dir is None:
        run_dir = Path(os.path.abspath(os.path.dirname(config_file[0])))

    self._run_dir = Path(run_dir)

    self.config = Configuration.load_toml(config_file)
    self.has_emme: bool = emme_context()
    # NOTE: Logger opens log file on __enter__ (in run), not ready for logging yet
    # Logger uses self.config.logging
    self.logger = Logger(self)
    self.top_sheet = None
    self.trace = None
    self.completed_components = []

    # mapping from defined names referenced in config to Component objects
    self._component_map = {
        k: v(self) for k, v in component_cls_map.items() if k in run_components
    }
    self._validated_components = set()
    self._emme_manager = None
    self._num_processors = None
    self._iteration = None
    self._component = None
    self._component_name = None
    self._queued_components = deque()

    self._queue_components(run_components=run_components)
__repr__()

Legible representation.

Source code in tm2py/controller.py
126
127
128
129
130
131
132
133
134
def __repr__(self):
    """Legible representation."""
    _str = f"""RunController
        Run Directory: {self.run_dir}
        Iteration: {self.iteration} of {self.run_iterations}
        Component: {self.component_name}
        Completed: {self.completed_components}
        Queued: {self._queued_components}"""
    return _str
component()

Current component of model.

Source code in tm2py/controller.py
183
184
185
def component(self) -> Component:
    """Current component of model."""
    return self._component
component_name() property

Name of current component of model run.

Source code in tm2py/controller.py
173
174
175
176
@property
def component_name(self) -> str:
    """Name of current component of model run."""
    return self._component_name
emme_manager() property

Cached Emme Manager object.

Source code in tm2py/controller.py
187
188
189
190
191
192
193
194
195
196
197
198
199
200
@property
def emme_manager(self) -> EmmeManager:
    """Cached Emme Manager object."""
    if self._emme_manager is None:
        if self.has_emme:
            self._init_emme_manager()
        else:
            self.logger.log("Emme not found, skipping Emme-related components")
            # TODO: All of the Emme-related components need to be handled "in place" rather
            # than skippping using a Mock
            from unittest.mock import MagicMock

            self._emme_manager = MagicMock()
    return self._emme_manager
get_abs_path(rel_path)

Get the absolute path from the root run directory given a relative path.

Source code in tm2py/controller.py
211
212
213
214
215
def get_abs_path(self, rel_path: Union[Path, str]) -> Path:
    """Get the absolute path from the root run directory given a relative path."""
    if not isinstance(rel_path, Path):
        rel_path = Path(rel_path)
    return Path(os.path.join(self.run_dir, rel_path))
iter_component() property

Tuple of the current iteration and component name.

Source code in tm2py/controller.py
178
179
180
181
@property
def iter_component(self) -> Tuple[int, str]:
    """Tuple of the current iteration and component name."""
    return self._iteration, self._component_name
iteration() property

Current iteration of model run.

Source code in tm2py/controller.py
168
169
170
171
@property
def iteration(self) -> int:
    """Current iteration of model run."""
    return self._iteration
num_processors() property

Number of processors available for parallel processing.

Source code in tm2py/controller.py
158
159
160
161
162
163
164
165
166
@property
def num_processors(self) -> int:
    """Number of processors available for parallel processing."""
    if self._num_processors is None:
        self._num_processors = self._calculate_num_processors(
            multiprocessing.cpu_count()
        )

    return self._num_processors
run()

Main interface to run model.

Iterates through the self._queued_components and runs them.

Source code in tm2py/controller.py
217
218
219
220
221
222
223
224
def run(self):
    """Main interface to run model.

    Iterates through the self._queued_components and runs them.
    """
    self._iteration = None
    while self._queued_components:
        self.run_next()
run_dir() property

The root run directory of the model run.

Source code in tm2py/controller.py
136
137
138
139
@property
def run_dir(self) -> Path:
    """The root run directory of the model run."""
    return self._run_dir
run_iterations() property

List of iterations for this model run.

Source code in tm2py/controller.py
141
142
143
144
145
146
@property
def run_iterations(self) -> List[int]:
    """List of iterations for this model run."""
    return range(
        max(1, self.config.run.start_iteration), self.config.run.end_iteration + 1
    )
run_next()

Run next component in the queue.

Source code in tm2py/controller.py
226
227
228
229
230
231
232
233
234
235
236
def run_next(self):
    """Run next component in the queue."""
    if not self._queued_components:
        raise ValueError("No components in queue")
    iteration, name, component = self._queued_components.popleft()
    if self._iteration != iteration:
        self.logger.log(f"Start iteration {iteration}")
    self._iteration = iteration
    self._component = component
    component.run()
    self.completed_components.append((iteration, name, component))
time_period_names() property

Return input time_period name or names and return list of time_period names.

Implemented here for easy access for all components.

Source code in tm2py/controller.py
148
149
150
151
152
153
154
155
156
@property
def time_period_names(self) -> List[str]:
    """Return input time_period name or names and return list of time_period names.

    Implemented here for easy access for all components.

    Returns: list of uppercased string names of time periods
    """
    return [time.name.upper() for time in self.config.time_periods]

Configuration

tm2py.config

Config implementation and schema.

ActiveModeShortestPathSkimConfig

Bases: ConfigItem

Active mode skim entry.

Source code in tm2py/config.py
580
581
582
583
584
585
586
587
588
@dataclass(frozen=True)
class ActiveModeShortestPathSkimConfig(ConfigItem):
    """Active mode skim entry."""

    mode: str
    roots: str
    leaves: str
    output: str
    max_dist_miles: float = None

ActiveModesConfig

Bases: ConfigItem

Active Mode skim parameters.

Source code in tm2py/config.py
591
592
593
594
595
596
@dataclass(frozen=True)
class ActiveModesConfig(ConfigItem):
    """Active Mode skim parameters."""

    emme_scenario_id: int
    shortest_path_skims: Tuple[ActiveModeShortestPathSkimConfig, ...]

AirPassengerConfig

Bases: ConfigItem

Air passenger model parameters.

Properties

highway_demand_file: output OMX file input_demand_folder: location to find the input demand csvs

filename template for input demand. Should have

{year}, {direction} and {airport} variables and end in ‘.csv’

base start year for input demand tables

used to calculate the linear interpolation, as well as in the file name template {year}_{direction}{airport}.csv

end year for input demand tables

used to calculate the linear interpolation, as well as in the file name template {year}_{direction}{airport}.csv

list of one or more airport names / codes as used in

the input file names

specification of aggregation of by-access mode

demand to highway class demand

Source code in tm2py/config.py
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
@dataclass(frozen=True)
class AirPassengerConfig(ConfigItem):
    """Air passenger model parameters.

    Properties

    highway_demand_file: output OMX file
    input_demand_folder: location to find the input demand csvs
    input_demand_filename_tmpl: filename template for input demand. Should have
        {year}, {direction} and {airport} variables and end in '.csv'
    reference_start_year: base start year for input demand tables
        used to calculate the linear interpolation, as well as
        in the file name template {year}_{direction}{airport}.csv
    reference_end_year: end year for input demand tables
        used to calculate the linear interpolation, as well as
        in the file name template {year}_{direction}{airport}.csv
    airport_names: list of one or more airport names / codes as used in
        the input file names
    demand_aggregation: specification of aggregation of by-access mode
        demand to highway class demand
    """

    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    input_demand_folder: pathlib.Path
    input_demand_filename_tmpl: str
    reference_start_year: str
    reference_end_year: str
    airport_names: List[str]
    demand_aggregation: List[AirPassengerDemandAggregationConfig]

    @validator("input_demand_filename_tmpl")
    def valid_input_demand_filename_tmpl(cls, value):
        """Validate skim matrix template has correct {}."""

        assert (
            "{year}" in value
        ), "-> 'output_skim_matrixname_tmpl must have {year}, found {value}."
        assert (
            "{direction}" in value
        ), "-> 'output_skim_matrixname_tmpl must have {direction}, found {value}."
        assert (
            "{airport}" in value
        ), "-> 'output_skim_matrixname_tmpl must have {airport}, found {value}."
        return value
valid_input_demand_filename_tmpl(value)

Validate skim matrix template has correct {}.

Source code in tm2py/config.py
315
316
317
318
319
320
321
322
323
324
325
326
327
328
@validator("input_demand_filename_tmpl")
def valid_input_demand_filename_tmpl(cls, value):
    """Validate skim matrix template has correct {}."""

    assert (
        "{year}" in value
    ), "-> 'output_skim_matrixname_tmpl must have {year}, found {value}."
    assert (
        "{direction}" in value
    ), "-> 'output_skim_matrixname_tmpl must have {direction}, found {value}."
    assert (
        "{airport}" in value
    ), "-> 'output_skim_matrixname_tmpl must have {airport}, found {value}."
    return value

AirPassengerDemandAggregationConfig

Bases: ConfigItem

Air passenger demand aggregation input parameters.

Properties
Source code in tm2py/config.py
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
@dataclass(frozen=True)
class AirPassengerDemandAggregationConfig(ConfigItem):
    """Air passenger demand aggregation input parameters.

    Properties:
        name: (src_group_name) name used for the class group in the input columns
            for the trip tables,
        mode: (result_class_name) name used in the output OMX matrix names, note
            that this should match the expected naming convention in the
            HighwayClassDemandConfig name(s)
        access_modes: list of names used for the access modes in the input
            columns for the trip tables
    """

    name: str
    mode: str
    access_modes: Tuple[str, ...]

ChoiceClassConfig

Bases: ConfigItem

Choice class parameters.

Properties
The end value in the utility equation for class c and property p is

utility[p].coeff * classes[c].property_factor[p] * sum(skim(classes[c].skim_mode,skim_p) for skim_p in property_to_skim[p])

Source code in tm2py/config.py
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
@dataclass(frozen=True)
class ChoiceClassConfig(ConfigItem):
    """Choice class parameters.

    Properties:
        property_to_skim_toll: Maps a property in the utility equation with a list of skim
            properties. If more than one skim property is listed, they will be summed together
            (e.g. cost if the sum of bridge toll and value toll). This defaults to a value in the
            code.
        property_to_skim_notoll: Maps a property in the utility equation with a list of skim
            properties for no toll choice.If more than one skim property is listed, they will
            be summed together  (e.g. cost if the sum of bridge toll and value toll). This
            defaults to a value in the code.
        property_factors: This will scale the property for this class. e.g. a shared ride cost
            could be applied a factor assuming that the cost is shared among individuals.

    The end value in the utility equation for class c and property p is:

       utility[p].coeff *
       classes[c].property_factor[p] *
       sum(skim(classes[c].skim_mode,skim_p) for skim_p in property_to_skim[p])
    """

    name: str
    skim_mode: Optional[str] = Field(default="da")
    veh_group_name: Optional[str] = Field(default="")
    property_factors: Optional[List[CoefficientConfig]] = Field(default=None)

CoefficientConfig

Bases: ConfigItem

Coefficient and properties to be used in utility or regression.

Source code in tm2py/config.py
365
366
367
368
369
370
@dataclass(frozen=True)
class CoefficientConfig(ConfigItem):
    """Coefficient and properties to be used in utility or regression."""

    property: str
    coeff: Optional[float] = Field(default=None)

ConfigItem

Bases: ABC

Base class to add partial dict-like interface to tm2py model configuration.

Allow use of .items() [“X”] and .get(“X”) .to_dict() from configuration.

Not to be constructed directly. To be used a mixin for dataclasses representing config schema. Do not use “get” “to_dict”, or “items” for key names.

Source code in tm2py/config.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
class ConfigItem(ABC):
    """Base class to add partial dict-like interface to tm2py model configuration.

    Allow use of .items() ["X"] and .get("X") .to_dict() from configuration.

    Not to be constructed directly. To be used a mixin for dataclasses
    representing config schema.
    Do not use "get" "to_dict", or "items" for key names.
    """

    def __getitem__(self, key):
        """Get item for config. D[key] -> D[key] if key in D, else raise KeyError."""
        return getattr(self, key)

    def items(self):
        """The sub-config objects in config."""
        return self.__dict__.items()

    def get(self, key, default=None):
        """Return the value for key if key is in the dictionary, else default."""
        return self.__dict__.get(key, default)
__getitem__(key)

Get item for config. D[key] -> D[key] if key in D, else raise KeyError.

Source code in tm2py/config.py
26
27
28
def __getitem__(self, key):
    """Get item for config. D[key] -> D[key] if key in D, else raise KeyError."""
    return getattr(self, key)
get(key, default=None)

Return the value for key if key is in the dictionary, else default.

Source code in tm2py/config.py
34
35
36
def get(self, key, default=None):
    """Return the value for key if key is in the dictionary, else default."""
    return self.__dict__.get(key, default)
items()

The sub-config objects in config.

Source code in tm2py/config.py
30
31
32
def items(self):
    """The sub-config objects in config."""
    return self.__dict__.items()

Configuration

Bases: ConfigItem

Source code in tm2py/config.py
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
@dataclass(frozen=True)
class Configuration(ConfigItem):
    """Configuration: root of the model configuration."""

    scenario: ScenarioConfig
    run: RunConfig
    time_periods: Tuple[TimePeriodConfig, ...]
    household: HouseholdConfig
    air_passenger: AirPassengerConfig
    internal_external: InternalExternalConfig
    truck: TruckConfig
    active_modes: ActiveModesConfig
    highway: HighwayConfig
    transit: TransitConfig
    emme: EmmeConfig
    logging: Optional[LoggingConfig] = Field(default_factory=LoggingConfig)

    @classmethod
    def load_toml(
        cls,
        toml_path: Union[List[Union[str, pathlib.Path]], str, pathlib.Path],
    ) -> "Configuration":
        """Load configuration from .toml files(s).

        Normally the config is split into a scenario_config.toml file and a
        model_config.toml file.

        Args:
            toml_path: a valid system path string or Path object to a TOML format config file or
                list of paths of path objects to a set of TOML files.

        Returns:
            A Configuration object
        """
        if not isinstance(toml_path, List):
            toml_path = [toml_path]
        toml_path = list(map(pathlib.Path, toml_path))

        data = _load_toml(toml_path[0])
        for path_item in toml_path[1:]:
            _merge_dicts(data, _load_toml(path_item))
        return cls(**data)

    @validator("highway")
    def maz_skim_period_exists(value, values):
        """Validate highway.maz_to_maz.skim_period refers to a valid period."""
        if "time_periods" in values:
            time_period_names = set(time.name for time in values["time_periods"])
            assert (
                value.maz_to_maz.skim_period in time_period_names
            ), "maz_to_maz -> skim_period -> name not found in time_periods list"
        return value
load_toml(toml_path) classmethod

Load configuration from .toml files(s).

Normally the config is split into a scenario_config.toml file and a model_config.toml file.

Parameters:

Name Type Description Default
toml_path Union[List[Union[str, pathlib.Path]], str, pathlib.Path]

a valid system path string or Path object to a TOML format config file or list of paths of path objects to a set of TOML files.

required

Returns:

Type Description
Configuration

A Configuration object

Source code in tm2py/config.py
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
@classmethod
def load_toml(
    cls,
    toml_path: Union[List[Union[str, pathlib.Path]], str, pathlib.Path],
) -> "Configuration":
    """Load configuration from .toml files(s).

    Normally the config is split into a scenario_config.toml file and a
    model_config.toml file.

    Args:
        toml_path: a valid system path string or Path object to a TOML format config file or
            list of paths of path objects to a set of TOML files.

    Returns:
        A Configuration object
    """
    if not isinstance(toml_path, List):
        toml_path = [toml_path]
    toml_path = list(map(pathlib.Path, toml_path))

    data = _load_toml(toml_path[0])
    for path_item in toml_path[1:]:
        _merge_dicts(data, _load_toml(path_item))
    return cls(**data)
maz_skim_period_exists(value, values)

Validate highway.maz_to_maz.skim_period refers to a valid period.

Source code in tm2py/config.py
1103
1104
1105
1106
1107
1108
1109
1110
1111
@validator("highway")
def maz_skim_period_exists(value, values):
    """Validate highway.maz_to_maz.skim_period refers to a valid period."""
    if "time_periods" in values:
        time_period_names = set(time.name for time in values["time_periods"])
        assert (
            value.maz_to_maz.skim_period in time_period_names
        ), "maz_to_maz -> skim_period -> name not found in time_periods list"
    return value

DemandCountyGroupConfig

Bases: ConfigItem

Grouping of counties for assignment and demand files.

Properties
Source code in tm2py/config.py
775
776
777
778
779
780
781
782
783
784
785
@dataclass(frozen=True)
class DemandCountyGroupConfig(ConfigItem):
    """Grouping of counties for assignment and demand files.

    Properties:
        number: id number for this group, must be unique
        counties: list of one or more county names
    """

    number: int = Field()
    counties: Tuple[COUNTY_NAMES, ...] = Field()

EmmeConfig

Bases: ConfigItem

Emme-specific parameters.

Properties
Source code in tm2py/config.py
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
@dataclass(frozen=True)
class EmmeConfig(ConfigItem):
    """Emme-specific parameters.

    Properties:
        all_day_scenario_id: scenario ID to use for all day
            (initial imported) scenario with all time period data
        project_path: relative path from run_dir to Emme desktop project (.emp)
        highway_database_path: relative path to highway Emmebank
        active_database_paths: list of relative paths to active mode Emmebanks
        transit_database_path: relative path to transit Emmebank
        num_processors: the number of processors to use in Emme procedures,
            either as an integer, or value MAX, MAX-N. Typically recommend
            using MAX-1 (on desktop systems) or MAX-2 (on servers with many
            logical processors) to leave capacity for background / other tasks.
    """

    all_day_scenario_id: int
    project_path: pathlib.Path
    highway_database_path: pathlib.Path
    active_database_paths: Tuple[pathlib.Path, ...]
    transit_database_path: pathlib.Path
    num_processors: str = Field(regex=r"^MAX$|^MAX-\d+$|^\d+$")

HighwayCapClassConfig

Bases: ConfigItem

Highway link capacity and speed (‘capclass’) index entry.

Properties
Source code in tm2py/config.py
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
@dataclass(frozen=True)
class HighwayCapClassConfig(ConfigItem):
    """Highway link capacity and speed ('capclass') index entry.

    Properties:
        capclass: cross index for link @capclass lookup
        capacity: value for link capacity, PCE / hour
        free_flow_speed: value for link free flow speed, miles / hour
        critical_speed: value for critical speed (Ja) used in Akcelik
            type functions
    """

    capclass: int = Field(ge=0)
    capacity: float = Field(ge=0)
    free_flow_speed: float = Field(ge=0)
    critical_speed: float = Field(ge=0)

HighwayClassConfig

Bases: ConfigItem

Highway assignment class definition.

Note that excluded_links, skims and toll attribute names include vehicle groups (“{vehicle}”) which reference the list of highway.toll.dst_vehicle_group_names (see HighwayTollsConfig). The default example model config uses: “da”, “sr2”, “sr3”, “vsm”, sml”, “med”, “lrg”

Example single class config

name = “da” description= “drive alone” mode_code= “d” [[highway.classes.demand]] source = “household” name = “SOV_GP_{period}” [[highway.classes.demand]] source = “air_passenger” name = “da” [[highway.classes.demand]] source = “internal_external” name = “da” excluded_links = [“is_toll_da”, “is_sr2”], value_of_time = 18.93, # $ / hr operating_cost_per_mile = 17.23, # cents / mile toll = [“@bridgetoll_da”] skims = [“time”, “dist”, “freeflowtime”, “bridgetoll_da”],

Properties
Source code in tm2py/config.py
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
@dataclass(frozen=True)
class HighwayClassConfig(ConfigItem):
    """Highway assignment class definition.

    Note that excluded_links, skims and toll attribute names include
    vehicle groups ("{vehicle}") which reference the list of
    highway.toll.dst_vehicle_group_names (see HighwayTollsConfig).
    The default example model config uses:
    "da", "sr2", "sr3", "vsm", sml", "med", "lrg"

    Example single class config:
        name = "da"
        description= "drive alone"
        mode_code= "d"
        [[highway.classes.demand]]
            source = "household"
            name = "SOV_GP_{period}"
        [[highway.classes.demand]]
            source = "air_passenger"
            name = "da"
        [[highway.classes.demand]]
            source = "internal_external"
            name = "da"
        excluded_links = ["is_toll_da", "is_sr2"],
        value_of_time = 18.93,  # $ / hr
        operating_cost_per_mile = 17.23,  # cents / mile
        toll = ["@bridgetoll_da"]
        skims = ["time", "dist", "freeflowtime", "bridgetoll_da"],

    Properties:
        name: short (up to 10 character) unique reference name for the class.
            used in attribute and matrix names
        description: longer text used in attribute and matrix descriptions
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords.
            Should be unique in list of :es, unless multiple classes
            have identical excluded_links specification. Cannot be the
            same as used for highway.maz_to_maz.mode_code.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        demand: list of OMX file and matrix keyname references,
            see HighwayClassDemandConfig
        excluded_links: list of keywords to identify links to exclude from
            this class' available subnetwork (generate link.modes)
            Options are:
                - "is_sr": is reserved for shared ride (@useclass in 2,3)
                - "is_sr2": is reserved for shared ride 2+ (@useclass == 2)
                - "is_sr3": is reserved for shared ride 3+ (@useclass == 3)
                - "is_auto_only": is reserved for autos (non-truck) (@useclass != 1)
                - "is_toll_{vehicle}": has a value (non-bridge) toll for the {vehicle} toll group
        toll: list of additional toll cost link attribute (values stored in cents),
            summed, one of "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
        toll_factor: optional, factor to apply to toll values in cost calculation
        pce: optional, passenger car equivalent to convert assigned demand in
            PCE units to vehicles for total assigned vehicle calculations
        skims: list of skim matrices to generate
            Options are:
                "time": pure travel time in minutes
                "dist": distance in miles
                "hovdist": distance on HOV facilities (is_sr2 or is_sr3)
                "tolldist": distance on toll facilities
                    (@tollbooth > highway.tolls.tollbooth_start_index)
                "freeflowtime": free flow travel time in minutes
                "bridgetoll_{vehicle}": bridge tolls, {vehicle} refers to toll group
                "valuetoll_{vehicle}": other, non-bridge tolls, {vehicle} refers to toll group
    """

    name: str = Field(min_length=1, max_length=10)
    veh_group_name: str = Field(min_length=1, max_length=10)
    description: Optional[str] = Field(default="")
    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    pce: Optional[float] = Field(default=1.0, gt=0)
    # Note that excluded_links, skims, and tolls validated under HighwayConfig to include
    # highway.toll.dst_vehicle_group_names names
    excluded_links: Tuple[str, ...] = Field()
    skims: Tuple[str, ...] = Field()
    toll: Tuple[str, ...] = Field()
    toll_factor: Optional[float] = Field(default=None, gt=0)
    demand: Tuple[HighwayClassDemandConfig, ...] = Field()

HighwayClassDemandConfig

Bases: ConfigItem

Highway class input source for demand.

Used to specify where to find related demand file for this highway class. Multiple

Properties
Source code in tm2py/config.py
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
@dataclass(frozen=True)
class HighwayClassDemandConfig(ConfigItem):
    """Highway class input source for demand.

    Used to specify where to find related demand file for this
    highway class. Multiple

    Properties:
        source: reference name of the component section for the
                source "highway_demand_file" location, one of:
                "household", "air_passenger", "internal_external", "truck"
        name: name of matrix in the OMX file, can include "{period}"
                placeholder
        factor: optional, multiplicative factor to generate PCEs from
                trucks or convert person-trips to vehicle-trips for HOVs
    """

    name: str = Field()
    source: str = Literal["household", "air_passenger", "internal_external", "truck"]
    factor: float = Field(default=1.0, gt=0)

HighwayConfig

Bases: ConfigItem

Highway assignment and skims parameters.

Properties
Source code in tm2py/config.py
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
@dataclass(frozen=True)
class HighwayConfig(ConfigItem):
    """Highway assignment and skims parameters.

    Properties:
        generic_highway_mode_code: single character unique mode ID for entire
            highway network (no excluded_links)
        relative_gap: target relative gap stopping criteria
        max_iterations: maximum iterations stopping criteria
        area_type_buffer_dist_miles: used to in calculation to categorize link @areatype
            The area type is determined based on the average density of nearby
            (within this buffer distance) MAZs, using (pop+jobs*2.5)/acres
        output_skim_path: relative path template from run dir for OMX output skims
        output_skim_filename_tmpl: template for OMX filename for a time period. Must include
            {time_period} in the string and end in '.omx'.
        output_skim_matrixname_tmpl: template for matrix names within OMX output skims.
            Should include {time_period}, {mode}, and {property}
        tolls: input toll specification, see HighwayTollsConfig
        maz_to_maz: maz-to-maz shortest path assignment and skim specification,
            see HighwayMazToMazConfig
        classes: highway assignment multi-class setup and skim specification,
            see HighwayClassConfig
        capclass_lookup: index cross-reference table from the link @capclass value
            to the free-flow speed, capacity, and critical speed values
    """

    generic_highway_mode_code: str = Field(min_length=1, max_length=1)
    relative_gap: float = Field(ge=0)
    max_iterations: int = Field(ge=0)
    area_type_buffer_dist_miles: float = Field(gt=0)
    output_skim_path: pathlib.Path = Field()
    output_skim_filename_tmpl: str = Field()
    output_skim_matrixname_tmpl: str = Field()
    tolls: HighwayTollsConfig = Field()
    maz_to_maz: HighwayMazToMazConfig = Field()
    classes: Tuple[HighwayClassConfig, ...] = Field()
    capclass_lookup: Tuple[HighwayCapClassConfig, ...] = Field()

    @validator("output_skim_filename_tmpl")
    def valid_skim_template(value):
        """Validate skim template has correct {} and extension."""
        assert (
            "{time_period" in value
        ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
        assert (
            value[-4:].lower() == ".omx"
        ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
        return value

    @validator("output_skim_matrixname_tmpl")
    def valid_skim_matrix_name_template(value):
        """Validate skim matrix template has correct {}."""
        assert (
            "{time_period" in value
        ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
        assert (
            "{property" in value
        ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
        assert (
            "{mode" in value
        ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
        return value

    @validator("capclass_lookup")
    def unique_capclass_numbers(value):
        """Validate list of capclass_lookup has unique .capclass values."""
        capclass_ids = [i.capclass for i in value]
        error_msg = "-> capclass value must be unique in list"
        assert len(capclass_ids) == len(set(capclass_ids)), error_msg
        return value

    @validator("classes", pre=True)
    def unique_class_names(value):
        """Validate list of classes has unique .name values."""
        class_names = [highway_class["name"] for highway_class in value]
        error_msg = "-> name value must be unique in list"
        assert len(class_names) == len(set(class_names)), error_msg
        return value

    @validator("classes")
    def validate_class_mode_excluded_links(value, values):
        """Validate list of classes has unique .mode_code or .excluded_links match."""
        # validate if any mode IDs are used twice, that they have the same excluded links sets
        mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
        for i, highway_class in enumerate(value):
            # maz_to_maz.mode_code must be unique
            if "maz_to_maz" in values:
                assert (
                    highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
                ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
            # make sure that if any mode IDs are used twice, they have the same excluded links sets
            if highway_class.mode_code in mode_excluded_links:
                ex_links1 = highway_class["excluded_links"]
                ex_links2 = mode_excluded_links[highway_class["mode_code"]]
                error_msg = (
                    f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                    f"with different excluded links: {ex_links1} and {ex_links2}"
                )
                assert ex_links1 == ex_links2, error_msg
            mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
        return value

    @validator("classes")
    def validate_class_keyword_lists(value, values):
        """Validate classes .skims, .toll, and .excluded_links values."""
        if "tolls" not in values:
            return value
        avail_skims = ["time", "dist", "hovdist", "tolldist", "freeflowtime"]
        available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
        avail_toll_attrs = []
        for name in values["tolls"].dst_vehicle_group_names:
            toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
            avail_skims.extend(toll_types)
            avail_toll_attrs.extend(["@" + name for name in toll_types])
            available_link_sets.append(f"is_toll_{name}")

        # validate class skim name list and toll attribute against toll setup
        def check_keywords(class_num, key, val, available):
            extra_keys = set(val) - set(available)
            error_msg = (
                f" -> {class_num} -> {key}: unrecognized {key} name(s): "
                f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
            )
            assert not extra_keys, error_msg

        for i, highway_class in enumerate(value):
            check_keywords(i, "skim", highway_class["skims"], avail_skims)
            check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
            check_keywords(
                i,
                "excluded_links",
                highway_class["excluded_links"],
                available_link_sets,
            )
        return value
unique_capclass_numbers(value)

Validate list of capclass_lookup has unique .capclass values.

Source code in tm2py/config.py
896
897
898
899
900
901
902
@validator("capclass_lookup")
def unique_capclass_numbers(value):
    """Validate list of capclass_lookup has unique .capclass values."""
    capclass_ids = [i.capclass for i in value]
    error_msg = "-> capclass value must be unique in list"
    assert len(capclass_ids) == len(set(capclass_ids)), error_msg
    return value
unique_class_names(value)

Validate list of classes has unique .name values.

Source code in tm2py/config.py
904
905
906
907
908
909
910
@validator("classes", pre=True)
def unique_class_names(value):
    """Validate list of classes has unique .name values."""
    class_names = [highway_class["name"] for highway_class in value]
    error_msg = "-> name value must be unique in list"
    assert len(class_names) == len(set(class_names)), error_msg
    return value
valid_skim_matrix_name_template(value)

Validate skim matrix template has correct {}.

Source code in tm2py/config.py
882
883
884
885
886
887
888
889
890
891
892
893
894
@validator("output_skim_matrixname_tmpl")
def valid_skim_matrix_name_template(value):
    """Validate skim matrix template has correct {}."""
    assert (
        "{time_period" in value
    ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
    assert (
        "{property" in value
    ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
    assert (
        "{mode" in value
    ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
    return value
valid_skim_template(value)

Validate skim template has correct {} and extension.

Source code in tm2py/config.py
871
872
873
874
875
876
877
878
879
880
@validator("output_skim_filename_tmpl")
def valid_skim_template(value):
    """Validate skim template has correct {} and extension."""
    assert (
        "{time_period" in value
    ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
    assert (
        value[-4:].lower() == ".omx"
    ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
    return value
validate_class_keyword_lists(value, values)

Validate classes .skims, .toll, and .excluded_links values.

Source code in tm2py/config.py
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
@validator("classes")
def validate_class_keyword_lists(value, values):
    """Validate classes .skims, .toll, and .excluded_links values."""
    if "tolls" not in values:
        return value
    avail_skims = ["time", "dist", "hovdist", "tolldist", "freeflowtime"]
    available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
    avail_toll_attrs = []
    for name in values["tolls"].dst_vehicle_group_names:
        toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
        avail_skims.extend(toll_types)
        avail_toll_attrs.extend(["@" + name for name in toll_types])
        available_link_sets.append(f"is_toll_{name}")

    # validate class skim name list and toll attribute against toll setup
    def check_keywords(class_num, key, val, available):
        extra_keys = set(val) - set(available)
        error_msg = (
            f" -> {class_num} -> {key}: unrecognized {key} name(s): "
            f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
        )
        assert not extra_keys, error_msg

    for i, highway_class in enumerate(value):
        check_keywords(i, "skim", highway_class["skims"], avail_skims)
        check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
        check_keywords(
            i,
            "excluded_links",
            highway_class["excluded_links"],
            available_link_sets,
        )
    return value

Validate list of classes has unique .mode_code or .excluded_links match.

Source code in tm2py/config.py
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
@validator("classes")
def validate_class_mode_excluded_links(value, values):
    """Validate list of classes has unique .mode_code or .excluded_links match."""
    # validate if any mode IDs are used twice, that they have the same excluded links sets
    mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
    for i, highway_class in enumerate(value):
        # maz_to_maz.mode_code must be unique
        if "maz_to_maz" in values:
            assert (
                highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
            ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
        # make sure that if any mode IDs are used twice, they have the same excluded links sets
        if highway_class.mode_code in mode_excluded_links:
            ex_links1 = highway_class["excluded_links"]
            ex_links2 = mode_excluded_links[highway_class["mode_code"]]
            error_msg = (
                f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                f"with different excluded links: {ex_links1} and {ex_links2}"
            )
            assert ex_links1 == ex_links2, error_msg
        mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
    return value

HighwayMazToMazConfig

Bases: ConfigItem

Highway MAZ to MAZ shortest path assignment and skim parameters.

Properties
Source code in tm2py/config.py
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
@dataclass(frozen=True)
class HighwayMazToMazConfig(ConfigItem):
    """Highway MAZ to MAZ shortest path assignment and skim parameters.

    Properties:
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords,
            plus including MAZ connectors.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        max_skim_cost: max shortest path distance to search for MAZ-to-MAZ
            skims, in generized costs units (includes operating cost
            converted to minutes)
        excluded_links: list of keywords to identify links to exclude from
            MAZ-to-MAZ paths, see HighwayClassConfig.excluded_links
        demand_file: relative path to find the input demand files
            can have use a placeholder for {period} and {number}, where the
            {period} is the time_period.name (see TimePeriodConfig)
            and {number} is the demand_count_groups[].number
            (see DemandCountyGroupConfig)
            e.g.: auto_{period}_MAZ_AUTO_{number}_{period}.omx
        demand_county_groups: List of demand county names and
        skim_period: period name to use for the shotest path skims, must
            match one of the names listed in the time_periods
        output_skim_file: relative path to resulting MAZ-to-MAZ skims
    """

    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    max_skim_cost: float = Field(gt=0)
    excluded_links: Tuple[str, ...] = Field()
    demand_file: pathlib.Path = Field()
    demand_county_groups: Tuple[DemandCountyGroupConfig, ...] = Field()
    skim_period: str = Field()
    output_skim_file: pathlib.Path = Field()

    @validator("demand_county_groups")
    def unique_group_numbers(value):
        """Validate list of demand_county_groups has unique .number values."""
        group_ids = [group.number for group in value]
        assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
        return value
unique_group_numbers(value)

Validate list of demand_county_groups has unique .number values.

Source code in tm2py/config.py
825
826
827
828
829
830
@validator("demand_county_groups")
def unique_group_numbers(value):
    """Validate list of demand_county_groups has unique .number values."""
    group_ids = [group.number for group in value]
    assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
    return value

HighwayTollsConfig

Bases: ConfigItem

Highway assignment and skim input tolls and related parameters.

Properties
Source code in tm2py/config.py
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
@dataclass(frozen=True)
class HighwayTollsConfig(ConfigItem):
    """Highway assignment and skim input tolls and related parameters.

    Properties:
        file_path: source relative file path for the highway tolls index CSV
        tollbooth_start_index: tollbooth separates links with "bridge" tolls
            (index < this value) vs. "value" tolls. These toll attributes
            can then be referenced separately in the highway.classes[].tolls
            list
        src_vehicle_group_names: name used for the vehicle toll CSV column IDs,
            of the form "toll{period}_{vehicle}"
        dst_vehicle_group_names: list of names used in destination network
            for the corresponding vehicle group. Length of list must be the same
            as src_vehicle_group_names. Used for toll related attributes and
            resulting skim matrices. Cross-referenced in list of highway.classes[],
            valid keywords for:
                excluded_links: "is_toll_{vehicle}"
                tolls: "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
                skims: "bridgetoll_{vehicle}", "valuetoll_{vehicle}"
    """

    file_path: pathlib.Path = Field()
    tollbooth_start_index: int = Field(gt=1)
    src_vehicle_group_names: Tuple[str, ...] = Field()
    dst_vehicle_group_names: Tuple[str, ...] = Field()

    @validator("dst_vehicle_group_names", always=True)
    def dst_vehicle_group_names_length(value, values):
        """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
        if "src_vehicle_group_names" in values:
            assert len(value) == len(
                values["src_vehicle_group_names"]
            ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
            assert all(
                [len(v) <= 4 for v in value]
            ), "dst_vehicle_group_names must be 4 characters or less"
        return value
dst_vehicle_group_names_length(value, values)

Validate dst_vehicle_group_names has same length as src_vehicle_group_names.

Source code in tm2py/config.py
749
750
751
752
753
754
755
756
757
758
759
@validator("dst_vehicle_group_names", always=True)
def dst_vehicle_group_names_length(value, values):
    """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
    if "src_vehicle_group_names" in values:
        assert len(value) == len(
            values["src_vehicle_group_names"]
        ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
        assert all(
            [len(v) <= 4 for v in value]
        ), "dst_vehicle_group_names must be 4 characters or less"
    return value

HouseholdConfig

Bases: ConfigItem

Household (residents) model parameters.

Source code in tm2py/config.py
257
258
259
260
261
262
@dataclass(frozen=True)
class HouseholdConfig(ConfigItem):
    """Household (residents) model parameters."""

    highway_demand_file: pathlib.Path
    transit_demand_file: pathlib.Path

ImpedanceConfig

Bases: ConfigItem

Blended skims used for accessibility/friction calculations.

Properties:I name: name to store it as, referred to in TripDistribution config skim_mode: name of the mode to use for the blended skim time_blend: blend of time periods to use; mapped to the factors (which should sum to 1)

Source code in tm2py/config.py
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
@dataclass(frozen=True)
class ImpedanceConfig(ConfigItem):
    """Blended skims used for accessibility/friction calculations.

    Properties:I
        name: name to store it as, referred to in TripDistribution config
        skim_mode: name of the mode to use for the blended skim
        time_blend: blend of time periods to use; mapped to the factors (which should sum to 1)
    """

    name: str
    skim_mode: str
    time_blend: Dict[str, float]

    @validator("time_blend", allow_reuse=True)
    def sums_to_one(value):
        """Validate highway.maz_to_maz.skim_period refers to a valid period."""
        assert sum(value.values()) - 1 < 0.0001, "blend factors must sum to 1"
        return value
sums_to_one(value)

Validate highway.maz_to_maz.skim_period refers to a valid period.

Source code in tm2py/config.py
518
519
520
521
522
@validator("time_blend", allow_reuse=True)
def sums_to_one(value):
    """Validate highway.maz_to_maz.skim_period refers to a valid period."""
    assert sum(value.values()) - 1 < 0.0001, "blend factors must sum to 1"
    return value

InternalExternalConfig

Bases: ConfigItem

Internal <-> External model parameters.

Source code in tm2py/config.py
438
439
440
441
442
443
444
445
446
447
@dataclass(frozen=True)
class InternalExternalConfig(ConfigItem):
    """Internal <-> External model parameters."""

    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    modes: List[str]
    demand: DemandGrowth
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig

LoggingConfig

Bases: ConfigItem

Logging parameters. TODO.

Properties
Source code in tm2py/config.py
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
@dataclass(frozen=True)
class LoggingConfig(ConfigItem):
    """Logging parameters. TODO.

    Properties:
        display_level: filter level for messages to show in console, default
            is STATUS
        run_file_path: relative path to high-level log file for the model run,
            default is tm2py_run_[%Y%m%d_%H%M].log
        run_file_level: filter level for messages recorded in the run log,
            default is INFO
        log_file_path: relative path to general log file with more detail
            than the run_file, default is tm2py_detail_[%Y%m%d_%H%M].log
        log_file_level: optional, filter level for messages recorded in the
            standard log, default is DETAIL
        log_on_error_file_path: relative path to use for fallback log message cache
            on error, default is tm2py_error_[%Y%m%d_%H%M].log
        notify_slack: if true notify_slack messages will be sent, default is False
        use_emme_logbook: if True log messages recorded in the standard log file will
            also be recorded in the Emme logbook, default is True
        iter_component_level: tuple of tuples of iteration, component name, log level.
            Used to override log levels (log_file_level) for debugging and recording
            more detail in the log_file_path.
            Example: [ [2, "highway", "TRACE"] ] to record all messages
            during the highway component run at iteration 2.
    """

    display_level: Optional[LogLevel] = Field(default="STATUS")
    run_file_path: Optional[str] = Field(
        default="tm2py_run_{}.log".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M")
        )
    )
    run_file_level: Optional[LogLevel] = Field(default="INFO")
    log_file_path: Optional[str] = Field(
        default="tm2py_debug_{}.log".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M")
        )
    )
    log_file_level: Optional[LogLevel] = Field(default="DEBUG")
    log_on_error_file_path: Optional[str] = Field(
        default="tm2py_error_{}.log".format(
            datetime.datetime.now().strftime("%Y%m%d_%H%M")
        )
    )

    notify_slack: Optional[bool] = Field(default=False)
    use_emme_logbook: Optional[bool] = Field(default=True)
    iter_component_level: Optional[
        Tuple[Tuple[int, ComponentNames, LogLevel], ...]
    ] = Field(default=None)

MatrixFactorConfig

Bases: ConfigItem

Mapping of zone or list of zones to factor value.

Source code in tm2py/config.py
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
@dataclass(frozen=True)
class MatrixFactorConfig(ConfigItem):
    """Mapping of zone or list of zones to factor value."""

    zone_index: Optional[Union[int, List[int]]]
    factor: Optional[float] = Field(default=None)
    i_factor: Optional[float] = Field(default=None)
    j_factor: Optional[float] = Field(default=None)
    as_growth_rate: Optional[bool] = Field(default=False)

    @validator("zone_index", allow_reuse=True)
    def valid_zone_index(value):
        """Validate zone index and turn to list if isn't one."""
        if isinstance(value, str):
            value = int(value)
        if isinstance(value, int):
            value = [value]
        assert all([x >= 0 for x in value]), "Zone_index must be greater or equal to 0"
        return value

    @validator("factor", allow_reuse=True)
    def valid_factor(value, values):
        assert (
            "i_factor" not in values.keys()
        ), "Found both `factor` and\
            `i_factor` in MatrixFactorConfig. Should be one or the other."

        assert (
            "j_factor" not in values.keys()
        ), "Found both `factor` and\
            `j_factor` in MatrixFactorConfig. Should be one or the other."
        return value
valid_zone_index(value)

Validate zone index and turn to list if isn’t one.

Source code in tm2py/config.py
341
342
343
344
345
346
347
348
349
@validator("zone_index", allow_reuse=True)
def valid_zone_index(value):
    """Validate zone index and turn to list if isn't one."""
    if isinstance(value, str):
        value = int(value)
    if isinstance(value, int):
        value = [value]
    assert all([x >= 0 for x in value]), "Zone_index must be greater or equal to 0"
    return value

RunConfig

Bases: ConfigItem

Model run parameters.

Note that the components will be executed in the order listed.

Properties
Source code in tm2py/config.py
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
@dataclass(frozen=True)
class RunConfig(ConfigItem):
    """Model run parameters.

    Note that the components will be executed in the order listed.

    Properties:
        start_iteration: start iteration number, 0 to include initial_components
        end_iteration: final iteration number
        start_component: name of component to start with, will skip components
            list prior to this component
        initial_components: list of components to run as initial (0) iteration, in order
        global_iteration_components: list of component to run at every subsequent
            iteration (max(1, start_iteration) to end_iteration), in order.
        final_components: list of components to run after final iteration, in order
    """

    initial_components: Tuple[ComponentNames, ...]
    global_iteration_components: Tuple[ComponentNames, ...]
    final_components: Tuple[ComponentNames, ...]
    start_iteration: int = Field(ge=0)
    end_iteration: int = Field(gt=0)
    start_component: Optional[Union[ComponentNames, EmptyString]] = Field(default="")

    @validator("end_iteration", allow_reuse=True)
    def end_iteration_gt_start(cls, value, values):
        """Validate end_iteration greater than start_iteration."""
        if values.get("start_iteration"):
            assert (
                value > values["start_iteration"]
            ), f"'end_iteration' ({value}) must be greater than 'start_iteration'\
                ({values['start_iteration']})"
        return value

    @validator("start_component", allow_reuse=True)
    def start_component_used(cls, value, values):
        """Validate start_component is listed in *_components."""
        if not values.get("start_component") or not value:
            return value

        if "start_iteration" in values:
            if values.get("start_iteration") == 0:
                assert value in values.get(
                    "initial_components", [value]
                ), f"'start_component' ({value}) must be one of the components listed in\
                    initial_components if 'start_iteration = 0'"
            else:
                assert value in values.get(
                    "global_iteration_components", [values]
                ), f"'start_component' ({value}) must be one of the components listed in\
                    global_iteration_components if 'start_iteration > 0'"
        return value
end_iteration_gt_start(value, values)

Validate end_iteration greater than start_iteration.

Source code in tm2py/config.py
100
101
102
103
104
105
106
107
108
@validator("end_iteration", allow_reuse=True)
def end_iteration_gt_start(cls, value, values):
    """Validate end_iteration greater than start_iteration."""
    if values.get("start_iteration"):
        assert (
            value > values["start_iteration"]
        ), f"'end_iteration' ({value}) must be greater than 'start_iteration'\
            ({values['start_iteration']})"
    return value
start_component_used(value, values)

Validate start_component is listed in *_components.

Source code in tm2py/config.py
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
@validator("start_component", allow_reuse=True)
def start_component_used(cls, value, values):
    """Validate start_component is listed in *_components."""
    if not values.get("start_component") or not value:
        return value

    if "start_iteration" in values:
        if values.get("start_iteration") == 0:
            assert value in values.get(
                "initial_components", [value]
            ), f"'start_component' ({value}) must be one of the components listed in\
                initial_components if 'start_iteration = 0'"
        else:
            assert value in values.get(
                "global_iteration_components", [values]
            ), f"'start_component' ({value}) must be one of the components listed in\
                global_iteration_components if 'start_iteration > 0'"
    return value

ScenarioConfig

Bases: ConfigItem

Scenario related parameters.

Properties
Source code in tm2py/config.py
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
@dataclass(frozen=True)
class ScenarioConfig(ConfigItem):
    """Scenario related parameters.

    Properties:
        verify: optional, default False if specified as True components will run
            additional post-process verify step to validate results / outputs
            (not implemented yet)
        maz_landuse_file: relative path to maz_landuse_file used by multiple
            components
        name: scenario name string
        year: model year, must be at least 2005
    """

    maz_landuse_file: pathlib.Path
    name: str
    year: int = Field(ge=2005)
    verify: Optional[bool] = Field(default=False)

TimeOfDayClassConfig

Bases: ConfigItem

Configuraiton for a class of time of day model.

Source code in tm2py/config.py
242
243
244
245
246
247
@dataclass(frozen=True)
class TimeOfDayClassConfig(ConfigItem):
    """Configuraiton for a class of time of day model."""

    name: str
    time_period_split: List[TimeSplitConfig]

TimeOfDayConfig

Bases: ConfigItem

Configuration for time of day model.

Source code in tm2py/config.py
250
251
252
253
254
@dataclass(frozen=True)
class TimeOfDayConfig(ConfigItem):
    """Configuration for time of day model."""

    classes: List[TimeOfDayClassConfig]

TimePeriodConfig

Bases: ConfigItem

Time time period entry.

Properties
Source code in tm2py/config.py
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
@dataclass(frozen=True)
class TimePeriodConfig(ConfigItem):
    """Time time period entry.

    Properties:
        name: name of the time period, up to four characters
        length_hours: length of the time period in hours
        highway_capacity_factor: factor to use to multiple the per-hour
            capacites in the highway network
        emme_scenario_id: scenario ID to use for Emme per-period
            assignment (highway and transit) scenarios
    """

    name: str = Field(max_length=4)
    length_hours: float = Field(gt=0)
    highway_capacity_factor: float = Field(gt=0)
    emme_scenario_id: int = Field(ge=1)
    description: Optional[str] = Field(default="")

TimeSplitConfig

Bases: ConfigItem

Split matrix i and j.

i.e. for time of day splits.

Source code in tm2py/config.py
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
@dataclass(frozen=True)
class TimeSplitConfig(ConfigItem):
    """Split matrix i and j.

    i.e. for time of day splits.
    """

    time_period: str
    production: Optional[NonNegativeFloat] = None
    attraction: Optional[NonNegativeFloat] = None
    od: Optional[NonNegativeFloat] = None

    @validator("production", "attraction", "od")
    def less_than_equal_one(cls, v):
        if v:
            assert v <= 1.0, "Value should be less than or equal to 1"
            return v

    def __post_init__(self):
        if self.od and any([self.production, self.attraction]):
            raise ValueError(
                f"TimeSplitConfig: Must either specifify an od or any of\
            production/attraction - not both.\n{self}"
            )

        if not all([self.production, self.attraction]) and any(
            [self.production, self.attraction]
        ):
            raise ValueError(
                f"TimeSplitConfig: Must have both production AND attraction\
            if one of them is specified."
            )

TollChoiceConfig

Bases: ConfigItem

Toll choice parameters.

Properties
Source code in tm2py/config.py
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
@dataclass(frozen=True)
class TollChoiceConfig(ConfigItem):
    """Toll choice parameters.

    Properties:
        property_to_skim_toll: Maps a property in the utility equation with a list of skim
            properties. If more than one skim property is listed, they will be summed together
            (e.g. cost if the sum of bridge toll and value toll). This defaults to a value in the
            code.
        property_to_skim_notoll: Maps a property in the utility equation with a list of skim
            properties for no toll choice.If more than one skim property is listed, they will
            be summed together  (e.g. cost if the sum of bridge toll and value toll). This
            defaults to a value in the code.
    """

    classes: List[ChoiceClassConfig]
    value_of_time: float
    operating_cost_per_mile: float
    property_to_skim_toll: Optional[Dict[str, List[str]]] = Field(default_factory=dict)
    property_to_skim_notoll: Optional[Dict[str, List[str]]] = Field(
        default_factory=dict
    )
    utility: Optional[List[CoefficientConfig]] = Field(default_factory=dict)

TransitConfig

Bases: ConfigItem

Transit assignment parameters.

Source code in tm2py/config.py
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
@dataclass(frozen=True)
class TransitConfig(ConfigItem):
    """Transit assignment parameters."""

    modes: Tuple[TransitModeConfig, ...]
    vehicles: Tuple[TransitVehicleConfig, ...]

    apply_msa_demand: bool
    value_of_time: float
    effective_headway_source: str
    initial_wait_perception_factor: float
    transfer_wait_perception_factor: float
    walk_perception_factor: float
    initial_boarding_penalty: float
    transfer_boarding_penalty: float
    max_transfers: int
    output_skim_path: pathlib.Path
    fares_path: pathlib.Path
    fare_matrix_path: pathlib.Path
    fare_max_transfer_distance_miles: float
    use_fares: bool
    override_connector_times: bool
    input_connector_access_times_path: Optional[pathlib.Path] = Field(default=None)
    input_connector_egress_times_path: Optional[pathlib.Path] = Field(default=None)
    output_stop_usage_path: Optional[pathlib.Path] = Field(default=None)

TransitModeConfig

Bases: ConfigItem

Transit mode definition (see also mode in the Emme API).

Source code in tm2py/config.py
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
@dataclass(frozen=True)
class TransitModeConfig(ConfigItem):
    """Transit mode definition (see also mode in the Emme API)."""

    type: Literal["WALK", "ACCESS", "EGRESS", "LOCAL", "PREMIUM"]
    assign_type: Literal["TRANSIT", "AUX_TRANSIT"]
    mode_id: str = Field(min_length=1, max_length=1)
    name: str = Field(max_length=10)
    in_vehicle_perception_factor: Optional[float] = Field(default=None, ge=0)
    speed_miles_per_hour: Optional[float] = Field(default=None, gt=0)

    @validator("in_vehicle_perception_factor", always=True)
    def in_vehicle_perception_factor_valid(value, values):
        """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("speed_miles_per_hour", always=True)
    def speed_miles_per_hour_valid(value, values):
        """Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
            assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
        return value
in_vehicle_perception_factor_valid(value, values)

Validate in_vehicle_perception_factor exists if assign_type is TRANSIT.

Source code in tm2py/config.py
981
982
983
984
985
986
@validator("in_vehicle_perception_factor", always=True)
def in_vehicle_perception_factor_valid(value, values):
    """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value
speed_miles_per_hour_valid(value, values)

Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT.

Source code in tm2py/config.py
988
989
990
991
992
993
@validator("speed_miles_per_hour", always=True)
def speed_miles_per_hour_valid(value, values):
    """Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
        assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
    return value

TransitVehicleConfig

Bases: ConfigItem

Transit vehicle definition (see also transit vehicle in the Emme API).

Source code in tm2py/config.py
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
@dataclass(frozen=True)
class TransitVehicleConfig(ConfigItem):
    """Transit vehicle definition (see also transit vehicle in the Emme API)."""

    vehicle_id: int
    mode: str
    name: str
    auto_equivalent: Optional[float] = Field(default=0, ge=0)
    seated_capacity: Optional[int] = Field(default=None, ge=0)
    total_capacity: Optional[int] = Field(default=None, ge=0)

TripDistributionClassConfig

Bases: ConfigItem

Trip Distribution parameters.

Properties
Source code in tm2py/config.py
481
482
483
484
485
486
487
488
489
490
491
492
493
@dataclass(frozen=True)
class TripDistributionClassConfig(ConfigItem):
    """Trip Distribution parameters.

    Properties:
        name: name of class to apply to
        impedance_name: refers to an impedance (skim) matrix to use - often a blended skim.
        use_k_factors: boolean on if to use k-factors
    """

    name: str
    impedance: str
    use_k_factors: bool

TripDistributionConfig

Bases: ConfigItem

Trip Distribution parameters.

Source code in tm2py/config.py
525
526
527
528
529
530
531
532
533
@dataclass(frozen=True)
class TripDistributionConfig(ConfigItem):
    """Trip Distribution parameters."""

    classes: List[TripDistributionClassConfig]
    max_balance_iterations: int
    max_balance_relative_error: float
    friction_factors_file: pathlib.Path
    k_factors_file: Optional[pathlib.Path] = None

TripGenerationClassConfig

Bases: ConfigItem

Trip Generation parameters.

Source code in tm2py/config.py
463
464
465
466
467
468
469
470
471
@dataclass(frozen=True)
class TripGenerationClassConfig(ConfigItem):
    """Trip Generation parameters."""

    name: str
    purpose: Optional[str] = Field(default=None)
    production_formula: Optional[TripGenerationFormulaConfig] = Field(default=None)
    attraction_formula: Optional[TripGenerationFormulaConfig] = Field(default=None)
    balance_to: Optional[str] = Field(default="production")

TripGenerationConfig

Bases: ConfigItem

Trip Generation parameters.

Source code in tm2py/config.py
474
475
476
477
478
@dataclass(frozen=True)
class TripGenerationConfig(ConfigItem):
    """Trip Generation parameters."""

    classes: List[TripGenerationClassConfig]

TripGenerationFormulaConfig

Bases: ConfigItem

TripProductionConfig.

Trip productions or attractions for a zone are the constant plus the sum of the rates * values in land use file for that zone.

Source code in tm2py/config.py
450
451
452
453
454
455
456
457
458
459
460
@dataclass(frozen=True)
class TripGenerationFormulaConfig(ConfigItem):
    """TripProductionConfig.

    Trip productions or attractions for a zone are the constant plus the sum of the rates * values
    in land use file for that zone.
    """

    land_use_rates: List[CoefficientConfig]
    constant: Optional[float] = Field(default=0.0)
    multiplier: Optional[float] = Field(default=1.0)

TruckClassConfig

Bases: ConfigItem

Truck class parameters.

Source code in tm2py/config.py
496
497
498
499
500
501
@dataclass(frozen=True)
class TruckClassConfig(ConfigItem):
    """Truck class parameters."""

    name: str
    description: Optional[str] = ""

TruckConfig

Bases: ConfigItem

Truck model parameters.

Source code in tm2py/config.py
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
@dataclass(frozen=True)
class TruckConfig(ConfigItem):
    """Truck model parameters."""

    classes: List[TruckClassConfig]
    impedances: List[ImpedanceConfig]
    trip_gen: TripGenerationConfig
    trip_dist: TripDistributionConfig
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig
    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str

    """
    @validator("classes")
    def class_consistency(cls, v, values):
        # TODO Can't get to work righ tnow
        _class_names = [c.name for c in v]
        _gen_classes = [c.name for c in values["trip_gen"]]
        _dist_classes = [c.name for c in values["trip_dist"]]
        _time_classes = [c.name for c in values["time_split"]]
        _toll_classes = [c.name for c in values["toll_choice"]]

        assert (
            _class_names == _gen_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.trip_gen ({_gen_classes})."
        assert (
            _class_names == _dist_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.trip_dist ({_dist_classes})."
        assert (
            _class_names == _time_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.time_split ({_time_classes})."
        assert (
            _class_names == _toll_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.toll_choice ({_toll_classes})."

        return v
    """
outfile_trip_table_tmp: str class-attribute

@validator(“classes”) def class_consistency(cls, v, values): # TODO Can’t get to work righ tnow _class_names = [c.name for c in v] _gen_classes = [c.name for c in values[“trip_gen”]] _dist_classes = [c.name for c in values[“trip_dist”]] _time_classes = [c.name for c in values[“time_split”]] _toll_classes = [c.name for c in values[“toll_choice”]]

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
assert (
    _class_names == _gen_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.trip_gen ({_gen_classes})."
assert (
    _class_names == _dist_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.trip_dist ({_dist_classes})."
assert (
    _class_names == _time_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.time_split ({_time_classes})."
assert (
    _class_names == _toll_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.toll_choice ({_toll_classes})."

return v

Configurations for each component are listed with those components

Components

Base Component

tm2py.components.component

Root component ABC.

Component

Bases: ABC

Template for Component class with several built-in methods.

A component is a piece of the model that can be run independently (of other components) given the required input data and configuration. It communicates information to other components via disk I/O (including the emmebank).

Note: if the component needs data that is not written to disk, it would be considered a subcomponent.

Abstract Methods – Each component class must have the following methods: __init___: constructor, which associates the RunController with the instantiated object run: run the component without any arguments validate_inputs: validate the inputs to the component report_progress: report progress to the user verify: verify the component’s output write_top_sheet: write outputs to topsheet test_component: test the component

Template Class methods - component classes inherit
Template Class Properties - component classes inherit

:: class MyComponent(Component):

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
def __init__(self, controller):
    super().__init__(controller)
    self._parameter = None

def run(self):
    self._step1()
    self._step2()

def _step1(self):
    pass

def _step2(self):
    pass
Source code in tm2py/components/component.py
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
class Component(ABC):
    """Template for Component class with several built-in methods.

    A component is a piece of the model that can be run independently (of other components) given
    the required input data and configuration.  It communicates information to other components via
    disk I/O (including the emmebank).

    Note: if the component needs data that is not written to disk, it would be considered a
    subcomponent.

    Abstract Methods – Each component class must have the following methods:
        __init___: constructor, which associates the RunController with the instantiated object
        run: run the component without any arguments
        validate_inputs: validate the inputs to the component
        report_progress: report progress to the user
        verify: verify the component's output
        write_top_sheet: write outputs to topsheet
        test_component: test the component

    Template Class methods - component classes inherit:
        get_abs_path: convenience method to get absolute path of the run directory
        get_emme_scenario: ....

    Template Class Properties - component classes inherit:
        controller: RunController object
        config: Config object
        time_period_names: convenience property
        top_sheet: topsheet object
        logger: logger object
        trace: trace object

    Example:
    ::
        class MyComponent(Component):

        def __init__(self, controller):
            super().__init__(controller)
            self._parameter = None

        def run(self):
            self._step1()
            self._step2()

        def _step1(self):
            pass

        def _step2(self):
            pass
    """

    def __init__(self, controller: RunController):
        """Model component template/abstract base class.

        Args:
            controller (RunController): Reference to the run controller object.
        """
        self._controller = controller
        self._trace = None

    @property
    def controller(self):
        """Parent controller."""
        return self._controller

    def get_abs_path(self, path: Union[Path, str]) -> str:
        """Convenince method to get absolute path from run directory."""
        return self.controller.get_abs_path(path).__str__()

    def get_emme_scenario(self, emmebank_path: str, time_period: str) -> EmmeScenario:
        """Get the Emme scenario object from the Emmebank at emmebank_path for the time_period ID.

        Args:
            emmebank_path: valid Emmebank path, absolute or relative to root run directory
            time_period: valid time_period ID

        Returns
            Emme Scenario object (see Emme API Reference)
        """
        if not os.path.isabs(emmebank_path):
            emmebank_path = self.get_abs_path(emmebank_path)
        emmebank = self.controller.emme_manager.emmebank(emmebank_path)
        scenario_id = {
            tp.name: tp.emme_scenario_id for tp in self.controller.config.time_periods
        }[time_period.lower()]
        return emmebank.scenario(scenario_id)

    @property
    def time_period_names(self) -> List[str]:
        """Return input time_period name or names and return list of time_period names.

        Implemented here for easy access for all components.

        Returns: list of uppercased string names of time periods
        """
        return self.controller.time_period_names

    @property
    def top_sheet(self):
        """Reference to top sheet."""
        return self.controller.top_sheet

    @property
    def logger(self):
        """Reference to logger."""
        return self.controller.logger

    @property
    def trace(self):
        """Reference to trace."""
        return self._trace

    @abstractmethod
    def validate_inputs(self):
        """Validate inputs are correct at model initiation, raise on error."""

    @abstractmethod
    def run(self):
        """Run model component."""

    # @abstractmethod
    def report_progress(self):
        """Write progress to log file."""

    # @abstractmethod
    def verify(self):
        """Verify component outputs / results."""

    # @abstractmethod
    def write_top_sheet(self):
        """Write key outputs to the model top sheet."""
__init__(controller)

Model component template/abstract base class.

Parameters:

Name Type Description Default
controller RunController

Reference to the run controller object.

required
Source code in tm2py/components/component.py
79
80
81
82
83
84
85
86
def __init__(self, controller: RunController):
    """Model component template/abstract base class.

    Args:
        controller (RunController): Reference to the run controller object.
    """
    self._controller = controller
    self._trace = None
controller() property

Parent controller.

Source code in tm2py/components/component.py
88
89
90
91
@property
def controller(self):
    """Parent controller."""
    return self._controller
get_abs_path(path)

Convenince method to get absolute path from run directory.

Source code in tm2py/components/component.py
93
94
95
def get_abs_path(self, path: Union[Path, str]) -> str:
    """Convenince method to get absolute path from run directory."""
    return self.controller.get_abs_path(path).__str__()
get_emme_scenario(emmebank_path, time_period)

Get the Emme scenario object from the Emmebank at emmebank_path for the time_period ID.

Parameters:

Name Type Description Default
emmebank_path str

valid Emmebank path, absolute or relative to root run directory

required
time_period str

valid time_period ID

required

Returns Emme Scenario object (see Emme API Reference)

Source code in tm2py/components/component.py
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def get_emme_scenario(self, emmebank_path: str, time_period: str) -> EmmeScenario:
    """Get the Emme scenario object from the Emmebank at emmebank_path for the time_period ID.

    Args:
        emmebank_path: valid Emmebank path, absolute or relative to root run directory
        time_period: valid time_period ID

    Returns
        Emme Scenario object (see Emme API Reference)
    """
    if not os.path.isabs(emmebank_path):
        emmebank_path = self.get_abs_path(emmebank_path)
    emmebank = self.controller.emme_manager.emmebank(emmebank_path)
    scenario_id = {
        tp.name: tp.emme_scenario_id for tp in self.controller.config.time_periods
    }[time_period.lower()]
    return emmebank.scenario(scenario_id)
logger() property

Reference to logger.

Source code in tm2py/components/component.py
130
131
132
133
@property
def logger(self):
    """Reference to logger."""
    return self.controller.logger
report_progress()

Write progress to log file.

Source code in tm2py/components/component.py
149
150
def report_progress(self):
    """Write progress to log file."""
run() abstractmethod

Run model component.

Source code in tm2py/components/component.py
144
145
146
@abstractmethod
def run(self):
    """Run model component."""
time_period_names() property

Return input time_period name or names and return list of time_period names.

Implemented here for easy access for all components.

Source code in tm2py/components/component.py
115
116
117
118
119
120
121
122
123
@property
def time_period_names(self) -> List[str]:
    """Return input time_period name or names and return list of time_period names.

    Implemented here for easy access for all components.

    Returns: list of uppercased string names of time periods
    """
    return self.controller.time_period_names
top_sheet() property

Reference to top sheet.

Source code in tm2py/components/component.py
125
126
127
128
@property
def top_sheet(self):
    """Reference to top sheet."""
    return self.controller.top_sheet
trace() property

Reference to trace.

Source code in tm2py/components/component.py
135
136
137
138
@property
def trace(self):
    """Reference to trace."""
    return self._trace
validate_inputs() abstractmethod

Validate inputs are correct at model initiation, raise on error.

Source code in tm2py/components/component.py
140
141
142
@abstractmethod
def validate_inputs(self):
    """Validate inputs are correct at model initiation, raise on error."""
verify()

Verify component outputs / results.

Source code in tm2py/components/component.py
153
154
def verify(self):
    """Verify component outputs / results."""
write_top_sheet()

Write key outputs to the model top sheet.

Source code in tm2py/components/component.py
157
158
def write_top_sheet(self):
    """Write key outputs to the model top sheet."""

FileFormatError

Bases: Exception

Exception raised when a file is not in the expected format.

Source code in tm2py/components/component.py
16
17
18
19
20
21
22
23
24
25
26
class FileFormatError(Exception):
    """Exception raised when a file is not in the expected format."""

    def __init__(self, f, *args):
        """Exception for invalid file formats."""
        super().__init__(args)
        self.f = f

    def __str__(self):
        """String representation for FileFormatError."""
        return f"The {self.f} is not a valid format."
__init__(f, *args)

Exception for invalid file formats.

Source code in tm2py/components/component.py
19
20
21
22
def __init__(self, f, *args):
    """Exception for invalid file formats."""
    super().__init__(args)
    self.f = f
__str__()

String representation for FileFormatError.

Source code in tm2py/components/component.py
24
25
26
def __str__(self):
    """String representation for FileFormatError."""
    return f"The {self.f} is not a valid format."

Subcomponent

Bases: Component

Template for sub-component class.

A sub-component is a more loosly defined component that allows for input into the run() method. It is used to break-up larger processes into smaller chunks which can be: (1) re-used across components (i.e toll choice) (2) updated/subbed in to a parent component(s) run method based on the expected API (3) easier to test, understand and debug. (4) more consistent with the algorithms we understand from transportation planning 101

Source code in tm2py/components/component.py
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
class Subcomponent(Component):
    """Template for sub-component class.

    A sub-component is a more loosly defined component that allows for input into the run()
    method.  It is used to break-up larger processes into smaller chunks which can be:
    (1) re-used across components (i.e toll choice)
    (2) updated/subbed in to a parent component(s) run method based on the expected API
    (3) easier to test, understand and debug.
    (4) more consistent with the algorithms we understand from transportation planning 101
    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for model sub-component abstract base class.

        Only calls the super class constructor.

        Args:
            controller (RunController): Reference to the run controller object.
            component (Component): Reference to the parent component object.
        """
        super().__init__(controller)
        self.component = component

    @abstractmethod
    def run(self, *args, **kwargs):
        """Run sub-component, allowing for multiple inputs.

        Allowing for inputs to the run() method is what differentiates a sub-component from
        a component.
        """
__init__(controller, component)

Constructor for model sub-component abstract base class.

Only calls the super class constructor.

Parameters:

Name Type Description Default
controller RunController

Reference to the run controller object.

required
component Component

Reference to the parent component object.

required
Source code in tm2py/components/component.py
172
173
174
175
176
177
178
179
180
181
182
def __init__(self, controller: RunController, component: Component):
    """Constructor for model sub-component abstract base class.

    Only calls the super class constructor.

    Args:
        controller (RunController): Reference to the run controller object.
        component (Component): Reference to the parent component object.
    """
    super().__init__(controller)
    self.component = component
run(*args, **kwargs) abstractmethod

Run sub-component, allowing for multiple inputs.

Allowing for inputs to the run() method is what differentiates a sub-component from a component.

Source code in tm2py/components/component.py
184
185
186
187
188
189
190
@abstractmethod
def run(self, *args, **kwargs):
    """Run sub-component, allowing for multiple inputs.

    Allowing for inputs to the run() method is what differentiates a sub-component from
    a component.
    """

Demand Components

tm2py.components.demand.prepare_demand

Demand loading from OMX to Emme database.

PrepareDemand

Bases: Component, ABC

Abstract base class to import and average demand.

Source code in tm2py/components/demand/prepare_demand.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
class PrepareDemand(Component, ABC):
    """Abstract base class to import and average demand."""

    def __init__(self, controller: RunController):
        """Constructor for PrepareDemand class.

        Args:
            controller (RunController): Run controller for the current run.
        """
        super().__init__(controller)
        self._emmebank = None

    def _read(self, path, name, num_zones, factor=None):
        with OMXManager(path, "r") as omx_file:
            demand = omx_file.read(name)
        if factor is not None:
            demand = factor * demand
        demand = self._redim_demand(demand, num_zones)
        return demand

    @staticmethod
    def _redim_demand(demand, num_zones):
        _shape = demand.shape
        if _shape < (num_zones, num_zones):
            demand = np.pad(
                demand, ((0, num_zones - _shape[0]), (0, num_zones - _shape[1]))
            )
        elif _shape > (num_zones, num_zones):
            ValueError(
                f"Provided demand matrix is larger ({_shape}) than the \
                specified number of zones: {num_zones}"
            )

        return demand

    # Disable too many arguments recommendation
    # pylint: disable=R0913
    def _save_demand(self, name, demand, scenario, description="", apply_msa=False):
        matrix = self._emmebank.matrix(f'mf"{name}"')
        msa_iteration = self.controller.iteration
        if not apply_msa or msa_iteration <= 1:
            if not matrix:
                ident = self._emmebank.available_matrix_identifier("FULL")
                matrix = self._emmebank.create_matrix(ident)
                matrix.name = name
                matrix.description = description
        else:
            if not matrix:
                raise Exception(f"error averaging demand: matrix {name} does not exist")
            prev_demand = matrix.get_numpy_data(scenario.id)
            demand = prev_demand + (1.0 / msa_iteration) * (demand - prev_demand)

        matrix.set_numpy_data(demand, scenario.id)

    def _create_zero_matrix(self):
        zero_matrix = self._emmebank.matrix('ms"zero"')
        if zero_matrix is None:
            ident = self._emmebank.available_matrix_identifier("SCALAR")
            zero_matrix = self._emmebank.create_matrix(ident)
            zero_matrix.name = "zero"
            zero_matrix.description = "zero demand matrix"
        zero_matrix.data = 0
__init__(controller)

Constructor for PrepareDemand class.

Parameters:

Name Type Description Default
controller RunController

Run controller for the current run.

required
Source code in tm2py/components/demand/prepare_demand.py
20
21
22
23
24
25
26
27
def __init__(self, controller: RunController):
    """Constructor for PrepareDemand class.

    Args:
        controller (RunController): Run controller for the current run.
    """
    super().__init__(controller)
    self._emmebank = None

PrepareHighwayDemand

Bases: PrepareDemand

Import and average highway demand.

Demand is imported from OMX files based on reference file paths and OMX matrix names in highway assignment config (highway.classes). The demand is average using MSA with the current demand matrices (in the Emmebank) if the controller.iteration > 1.

Parameters:

Name Type Description Default
controller RunController

parent RunController object

required
Source code in tm2py/components/demand/prepare_demand.py
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
class PrepareHighwayDemand(PrepareDemand):
    """Import and average highway demand.

    Demand is imported from OMX files based on reference file paths and OMX
    matrix names in highway assignment config (highway.classes).
    The demand is average using MSA with the current demand matrices
    (in the Emmebank) if the controller.iteration > 1.

    Args:
        controller: parent RunController object
    """

    def __init__(self, controller: RunController):
        """Constructor for PrepareHighwayDemand.

        Args:
            controller (RunController): Reference to run controller object.
        """
        super().__init__(controller)
        self._emmebank_path = None

    def validate_inputs(self):
        # TODO
        pass

    # @LogStartEnd("prepare highway demand")
    def run(self):
        """Open combined demand OMX files from demand models and prepare for assignment."""
        self._emmebank_path = self.get_abs_path(self.config.emme.highway_database_path)

        self._emmebank = self.controller.emme_manager.emmebank(self._emmebank_path)
        self._create_zero_matrix()
        for time in self.time_period_names():
            for klass in self.config.highway.classes:
                self._prepare_demand(klass.name, klass.description, klass.demand, time)

    def _prepare_demand(
        self,
        name: str,
        description: str,
        demand_config: List[Dict[str, Union[str, float]]],
        time_period: str,
    ):
        """Load demand from OMX files and save to Emme matrix for highway assignment.

        Average with previous demand (MSA) if the current iteration > 1

        Args:
            name (str): the name of the highway assignment class
            description (str): the description for the highway assignment class
            demand_config (dict): the list of file cross-reference(s) for the demand to be loaded
                {"source": <name of demand model component>,
                 "name": <OMX key name>,
                 "factor": <factor to apply to demand in this file>}
            time_period (str): the time time_period ID (name)
        """
        scenario = self.get_emme_scenario(self._emmebank_path, time_period)
        num_zones = len(scenario.zone_numbers)
        demand = self._read_demand(demand_config[0], time_period, num_zones)
        for file_config in demand_config[1:]:
            demand = demand + self._read_demand(file_config, time_period, num_zones)
        demand_name = f"{time_period}_{name}"
        description = f"{time_period} {description} demand"
        self._save_demand(demand_name, demand, scenario, description, apply_msa=True)

    def _read_demand(self, file_config, time_period, num_zones):
        # Load demand from cross-referenced source file,
        # the named demand model component under the key highway_demand_file
        source = file_config["source"]
        name = file_config["name"].format(period=time_period.upper())
        factor = file_config.get("factor")
        path = self.get_abs_path(self.config[source].highway_demand_file)
        return self._read(path.format(period=time_period), name, num_zones, factor)
__init__(controller)

Constructor for PrepareHighwayDemand.

Parameters:

Name Type Description Default
controller RunController

Reference to run controller object.

required
Source code in tm2py/components/demand/prepare_demand.py
 93
 94
 95
 96
 97
 98
 99
100
def __init__(self, controller: RunController):
    """Constructor for PrepareHighwayDemand.

    Args:
        controller (RunController): Reference to run controller object.
    """
    super().__init__(controller)
    self._emmebank_path = None
run()

Open combined demand OMX files from demand models and prepare for assignment.

Source code in tm2py/components/demand/prepare_demand.py
107
108
109
110
111
112
113
114
115
def run(self):
    """Open combined demand OMX files from demand models and prepare for assignment."""
    self._emmebank_path = self.get_abs_path(self.config.emme.highway_database_path)

    self._emmebank = self.controller.emme_manager.emmebank(self._emmebank_path)
    self._create_zero_matrix()
    for time in self.time_period_names():
        for klass in self.config.highway.classes:
            self._prepare_demand(klass.name, klass.description, klass.demand, time)

Household Demand

Personal travel demand generated by household members.

tm2py.components.demand.household

Placeholder docstring for CT-RAMP related components for household residents’ model.

HouseholdModel

Bases: Component

Run household resident model.

Source code in tm2py/components/demand/household.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
class HouseholdModel(Component):
    """Run household resident model."""

    def validate_inputs(self):
        """Validates inputs for component."""
        pass

    @LogStartEnd()
    def run(self):
        """Run the the household resident travel demand model.

        Steps:
            1. Starts household manager.
            2. Starts matrix manager.
            3. Starts resident travel model (CTRAMP).
            4. Cleans up CTRAMP java.
        """
        self._start_household_manager()
        self._start_matrix_manager()
        self._run_resident_model()
        self._stop_java()

    @staticmethod
    def _start_household_manager():
        commands = [
            "CALL CTRAMP\\runtime\\CTRampEnv.bat",
            "set PATH=%CD%\\CTRAMP\runtime;C:\\Windows\\System32;%JAVA_PATH%\bin;"
            "%TPP_PATH%;%PYTHON_PATH%;%PYTHON_PATH%\\condabin;%PYTHON_PATH%\\envs",
            'CALL CTRAMP\\runtime\\runHhMgr.cmd "%JAVA_PATH%" "%HOST_IP_ADDRESS%"',
        ]
        run_process(commands, name="start_household_manager")

    @staticmethod
    def _start_matrix_manager():
        commands = [
            "CALL CTRAMP\\runtime\\CTRampEnv.bat",
            "set PATH=%CD%\\CTRAMP\runtime;C:\\Windows\\System32;%JAVA_PATH%\bin;"
            "%TPP_PATH%;%PYTHON_PATH%;%PYTHON_PATH%\\condabin;%PYTHON_PATH%\\envs",
            'CALL CTRAMP\runtime\runMtxMgr.cmd %HOST_IP_ADDRESS% "%JAVA_PATH%"',
        ]
        run_process(commands, name="start_matrix_manager")

    def _run_resident_model(self):
        sample_rate_iteration = {1: 0.3, 2: 0.5, 3: 1, 4: 0.02, 5: 0.02}
        iteration = self.controller.iteration
        sample_rate = sample_rate_iteration[iteration]
        _shutil.copyfile("CTRAMP\\runtime\\mtctm2.properties", "mtctm2.properties")
        commands = [
            "CALL CTRAMP\\runtime\\CTRampEnv.bat",
            "set PATH=%CD%\\CTRAMP\runtime;C:\\Windows\\System32;%JAVA_PATH%\bin;"
            "%TPP_PATH%;%PYTHON_PATH%;%PYTHON_PATH%\\condabin;%PYTHON_PATH%\\envs",
            f'CALL CTRAMP\runtime\runMTCTM2ABM.cmd {sample_rate} {iteration} "%JAVA_PATH%"',
        ]
        run_process(commands, name="run_resident_model")

    @staticmethod
    def _stop_java():
        run_process(['taskkill /im "java.exe" /F'])
run()

Run the the household resident travel demand model.

Steps
  1. Starts household manager.
  2. Starts matrix manager.
  3. Starts resident travel model (CTRAMP).
  4. Cleans up CTRAMP java.
Source code in tm2py/components/demand/household.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
@LogStartEnd()
def run(self):
    """Run the the household resident travel demand model.

    Steps:
        1. Starts household manager.
        2. Starts matrix manager.
        3. Starts resident travel model (CTRAMP).
        4. Cleans up CTRAMP java.
    """
    self._start_household_manager()
    self._start_matrix_manager()
    self._run_resident_model()
    self._stop_java()
validate_inputs()

Validates inputs for component.

Source code in tm2py/components/demand/household.py
13
14
15
def validate_inputs(self):
    """Validates inputs for component."""
    pass

tm2py.config.HouseholdConfig

Bases: ConfigItem

Household (residents) model parameters.

Source code in tm2py/config.py
257
258
259
260
261
262
@dataclass(frozen=True)
class HouseholdConfig(ConfigItem):
    """Household (residents) model parameters."""

    highway_demand_file: pathlib.Path
    transit_demand_file: pathlib.Path

Air Passenger Demand

tm2py.components.demand.air_passenger

Module containing the AirPassenger class which builds the airport trip matrices.

AirPassenger

Bases: Component

Builds the airport trip matrices.

input: nonres/{year}_{tofrom}{airport}.csv output: five time-of-day-specific OMX files with matrices DA, SR2, SR3

Notes: These are independent of level-of-service.

Note that the reference names, years, file paths and other key details are controlled via the config, air_passenger section. See the AirPassengerConfig doc for details on specifying these inputs.

The following details are based on the default config values.

Creates air passenger vehicle trip tables for the Bay Area’s three major airports, namely SFO, OAK, and SJC. Geoff Gosling, a consultant, created vehicle trip tables segmented by time of day, travel mode, and access/egress direction (i.e. to the airport or from the airport) for years 2007 and 2035. The tables are based on a 2006 Air Passenger survey, which was conducted at SFO and OAK (but not SJC).

The travel modes are as follows

(a) escort (drive alone, shared ride 2, and shared ride 3+) (b) park (da, sr2, & sr3+) © rental car (da, sr2, & sr3+) (d) taxi ((da, sr2, & sr3+) (e) limo (da, sr2, & sr3+) (f) shared ride van (all assumed to be sr3); (g) hotel shuttle (all assumed to be sr3); and, (h) charter bus (all assumed to be sr3).

The shared ride van, hotel shuttle, and charter bus modes are assumed to have no deadhead travel. The return escort trip is included, as are the deadhead limo and taxi trips.

The scripts reads in csv files adapted from Mr. Gosling’s Excel files, and creates a highway-assignment ready OMX matrix file for each time-of-day interval.

Assumes that no air passengers use HOT lanes (probably not exactly true in certain future year scenarios, but the assumption is made here as a simplification). Simple linear interpolations are used to estimate vehicle demand in years other than 2007 and 2035, including 2015, 2020, 2025, 2030, and 2040.

Transit travel to the airports is not included in these vehicle trip tables.

Input

Year-, access/egress-, and airport-specific database file with 90 columns of data for each TAZ. There are 18 columns for each time-of-day interval as follows: (1) Escort, drive alone (2) Escort, shared ride 2 (3) Escort, shared ride 3+ (4) Park, drive alone (5) Park, shared ride 2 (6) Park, shared ride 3+ (7) Rental car, drive alone (8) Rental car, shared ride 2 (9) Rental car, shared ride 3+ (10) Taxi, drive alone (11) Taxi, shared ride 2 (12) Taxi, shared ride 3+ (13) Limo, drive alone (14) Limo, shared ride 2 (15) Limo, shared ride 3+ (16) Shared ride van, shared ride 3+ (17) Hotel shuttle, shared ride 3+ (18) Charter bus, shared ride 3+

Five time-of-day-specific tables, each containing origin/destination vehicle matrices for the following modes: (1) drive alone (DA) (2) shared ride 2 (SR2) (3) shared ride 3+ (SR3)

Internal properties

_start_year _end_year _mode_groups: _out_names:

Source code in tm2py/components/demand/air_passenger.py
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
class AirPassenger(Component):
    """Builds the airport trip matrices.

    input: nonres/{year}_{tofrom}{airport}.csv
    output: five time-of-day-specific OMX files with matrices DA, SR2, SR3

    Notes:
    These are independent of level-of-service.

    Note that the reference names, years, file paths and other key details
    are controlled via the config, air_passenger section. See the
    AirPassengerConfig doc for details on specifying these inputs.

    The following details are based on the default config values.

    Creates air passenger vehicle trip tables for the Bay Area's three major
    airports, namely SFO, OAK, and SJC.  Geoff Gosling, a consultant, created
    vehicle trip tables segmented by time of day, travel mode, and access/egress
    direction (i.e. to the airport or from the airport) for years 2007 and 2035.
    The tables are based on a 2006 Air Passenger survey, which was conducted
    at SFO and OAK (but not SJC).

    The travel modes are as follows:
        (a) escort (drive alone, shared ride 2, and shared ride 3+)
        (b) park (da, sr2, & sr3+)
        (c) rental car (da, sr2, & sr3+)
        (d) taxi ((da, sr2, & sr3+)
        (e) limo (da, sr2, & sr3+)
        (f) shared ride van (all assumed to be sr3);
        (g) hotel shuttle (all assumed to be sr3); and,
        (h) charter bus (all assumed to be sr3).

    The shared ride van, hotel shuttle, and charter bus modes are assumed to
    have no deadhead travel. The return escort trip is included, as are the
    deadhead limo and taxi trips.

    The scripts reads in csv files adapted from Mr. Gosling's Excel files,
    and creates a highway-assignment ready OMX matrix file for each time-of-day
    interval.

    Assumes that no air passengers use HOT lanes (probably not exactly true
    in certain future year scenarios, but the assumption is made here as a
    simplification).  Simple linear interpolations are used to estimate vehicle
    demand in years other than 2007 and 2035, including 2015, 2020, 2025, 2030,
    and 2040.

    Transit travel to the airports is not included in these vehicle trip tables.

    Input:
        Year-, access/egress-, and airport-specific database file with 90 columns
        of data for each TAZ.  There are 18 columns for each time-of-day interval
        as follows:
                (1)   Escort, drive alone
                (2)   Escort, shared ride 2
                (3)   Escort, shared ride 3+
                (4)   Park, drive alone
                (5)   Park, shared ride 2
                (6)   Park, shared ride 3+
                (7)   Rental car, drive alone
                (8)   Rental car, shared ride 2
                (9)   Rental car, shared ride 3+
                (10)  Taxi, drive alone
                (11)  Taxi, shared ride 2
                (12)  Taxi, shared ride 3+
                (13)  Limo, drive alone
                (14)  Limo, shared ride 2
                (15)  Limo, shared ride 3+
                (16)  Shared ride van, shared ride 3+
                (17)  Hotel shuttle, shared ride 3+
                (18)  Charter bus, shared ride 3+

     Output:
     Five time-of-day-specific tables, each containing origin/destination vehicle
     matrices for the following modes:
               (1) drive alone (DA)
               (2) shared ride 2 (SR2)
               (3) shared ride 3+ (SR3)

    Internal properties:
        _start_year
        _end_year
        _mode_groups:
        _out_names:
    """

    def __init__(self, controller: RunController):
        """Build the airport trip matrices.

        Args:
            controller: parent Controller object
        """
        super().__init__(controller)

        self.config = self.controller.config.air_passenger

        self.start_year = self.config.reference_start_year
        self.end_year = self.config.reference_end_year
        self.scenario_year = self.controller.config.scenario.year

        self.airports = self.controller.config.air_passenger.airport_names

        self._demand_classes = None
        self._access_mode_groups = None
        self._class_modes = None

    @property
    def classes(self):
        return [c.name for c in self.config.demand_aggregation]

    @property
    def demand_classes(self):
        if not self._demand_classes:
            self._demand_classes = {c.name: c for c in self.config.demand_aggregation}
        return self._demand_classes

    @property
    def access_mode_groups(self):
        if not self._access_mode_groups:
            self._access_mode_groups = {
                c_name: c.access_modes for c_name, c in self.demand_classes.items()
            }
        return self._access_mode_groups

    @property
    def class_modes(self):
        if self._class_modes is None:
            self._class_modes = {
                c_name: c.mode for c_name, c in self.demand_classes.items()
            }
        return self._class_modes

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run the Air Passenger Demand model to generate the demand matrices.

        Steps:
            1. Load the demand data from the CSV files.
            2. Aggregate the demand data into the assignable classes.
            3. Create the demand matrices be interpolating the demand data.
            4. Write the demand matrices to OMX files.
        """

        input_demand = self._load_air_pax_demand()
        aggr_demand = self._aggregate_demand(input_demand)

        demand = interpolate_dfs(
            aggr_demand,
            [self.start_year, self.end_year],
            self.scenario_year,
        )
        self._export_result(demand)

    def _load_air_pax_demand(self) -> pd.DataFrame:
        """Loads demand from the CSV files into single pandas dataframe.

        Uses the following configs to determine the input file names and paths:
        - self.config.air_passenger.input_demand_folder
        - self.config.air_passenger.airport_names
        - self.config.air_passenger.reference_start_year
        - self.config.air_passenger.reference_end_year

        Using the pattern: f"{year}_{direction}{airport}.csv"

        Returns: pandas dataframe with the following columns:
            (1) airport
            (2) time_of_day
            (3) access_mode
            (4) demand
        """

        _start_demand_df = self._get_air_demand_for_year(self.start_year)
        _end_demand_df = self._get_air_demand_for_year(self.end_year)

        _air_pax_demand_df = pd.merge(
            _start_demand_df,
            _end_demand_df,
            how="outer",
            suffixes=(f"_{self.start_year}", f"_{self.end_year}"),
            on=["ORIG", "DEST"],
        )

        _grouped_air_pax_demand_df = _air_pax_demand_df.groupby(["ORIG", "DEST"]).sum()
        return _grouped_air_pax_demand_df

    def _input_demand_filename(self, airport, year, direction):
        _file_name = self.config.input_demand_filename_tmpl.format(
            airport=airport, year=year, direction=direction
        )

        return os.path.join(
            self.get_abs_path(self.config.input_demand_folder), _file_name
        )

    def _get_air_demand_for_year(self, year) -> pd.DataFrame:
        """Creates a dataframe of concatenated data from CSVs for all airport x direction combos.

        Args:
            year (str): year of demand

        Returns:
            pd.DataFrame: concatenation of all CSVs that were read in as a dataframe
        """
        _airport_direction = itertools.product(
            self.airports,
            ["to", "from"],
        )
        demand_df = None
        for airport, direction in _airport_direction:
            _df = pd.read_csv(self._input_demand_filename(airport, year, direction))
            if demand_df is not None:
                demand_df = pd.concat([demand_df, _df])
            else:
                demand_df = _df

        return demand_df

    def _aggregate_demand(self, input_demand: pd.DataFrame) -> pd.DataFrame:
        """Aggregate demand accross access modes to assignable classes for each year.

        Args:
            input_demand: pandas dataframe with the columns for each combo of:
                {_period}_{_access}_{_group}_{_year}
        """
        aggr_demand = pd.DataFrame()

        _year_tp_group_accessmode = itertools.product(
            [self.start_year, self.end_year],
            self.time_period_names,
            self.access_mode_groups.items(),
        )

        # TODO This should be done entirely in pandas using group-by
        for _year, _period, (_class, _access_modes) in _year_tp_group_accessmode:
            data = input_demand[
                [f"{_period}_{_access}_{_class}_{_year}" for _access in _access_modes]
            ]
            aggr_demand[f"{_period}_{_class}_{_year}"] = data.sum(axis=1)

        return aggr_demand

    def _export_result(self, demand_df: pd.DataFrame):
        """Export resulting model year demand to OMX files by period."""
        path_tmplt = self.get_abs_path(self.config.output_trip_table_directory)
        os.makedirs(os.path.dirname(path_tmplt), exist_ok=True)

        for _period in self.time_period_names:
            _file_path = os.path.join(
                path_tmplt, self.config.outfile_trip_table_tmp.format(period=_period)
            )
            df_to_omx(
                demand_df,
                {
                    _mode: f"{_period}_{_class}"
                    for _class, _mode in self.class_modes.items()
                },
                _file_path,
                orig_column="ORIG",
                dest_column="DEST",
            )
__init__(controller)

Build the airport trip matrices.

Parameters:

Name Type Description Default
controller RunController

parent Controller object

required
Source code in tm2py/components/demand/air_passenger.py
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
def __init__(self, controller: RunController):
    """Build the airport trip matrices.

    Args:
        controller: parent Controller object
    """
    super().__init__(controller)

    self.config = self.controller.config.air_passenger

    self.start_year = self.config.reference_start_year
    self.end_year = self.config.reference_end_year
    self.scenario_year = self.controller.config.scenario.year

    self.airports = self.controller.config.air_passenger.airport_names

    self._demand_classes = None
    self._access_mode_groups = None
    self._class_modes = None
run()

Run the Air Passenger Demand model to generate the demand matrices.

Steps
  1. Load the demand data from the CSV files.
  2. Aggregate the demand data into the assignable classes.
  3. Create the demand matrices be interpolating the demand data.
  4. Write the demand matrices to OMX files.
Source code in tm2py/components/demand/air_passenger.py
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
@LogStartEnd()
def run(self):
    """Run the Air Passenger Demand model to generate the demand matrices.

    Steps:
        1. Load the demand data from the CSV files.
        2. Aggregate the demand data into the assignable classes.
        3. Create the demand matrices be interpolating the demand data.
        4. Write the demand matrices to OMX files.
    """

    input_demand = self._load_air_pax_demand()
    aggr_demand = self._aggregate_demand(input_demand)

    demand = interpolate_dfs(
        aggr_demand,
        [self.start_year, self.end_year],
        self.scenario_year,
    )
    self._export_result(demand)
validate_inputs()

Validate the inputs.

Source code in tm2py/components/demand/air_passenger.py
154
155
156
157
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

tm2py.config.AirPassengerDemandAggregationConfig

Bases: ConfigItem

Air passenger demand aggregation input parameters.

Properties
Source code in tm2py/config.py
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
@dataclass(frozen=True)
class AirPassengerDemandAggregationConfig(ConfigItem):
    """Air passenger demand aggregation input parameters.

    Properties:
        name: (src_group_name) name used for the class group in the input columns
            for the trip tables,
        mode: (result_class_name) name used in the output OMX matrix names, note
            that this should match the expected naming convention in the
            HighwayClassDemandConfig name(s)
        access_modes: list of names used for the access modes in the input
            columns for the trip tables
    """

    name: str
    mode: str
    access_modes: Tuple[str, ...]

Commercial Demand

tm2py.components.demand.commercial

Commercial vehicle / truck model module.

CommercialVehicleModel

Bases: Component

Commercial Vehicle demand model.

Generates truck demand matrices from
  • land use
  • highway network impedances
  • parameters
Segmented into four truck types

(1) very small trucks (two-axle, four-tire), (2) small trucks (two-axle, six-tire), (3) medium trucks (three-axle), (4) large or combination (four or more axle) trucks.

(1) MAZ csv data file with the employment and household counts.

(2) Skims (3) K-Factors (4)

Notes: (1) Based on the BAYCAST truck model, no significant updates. (2) Combined Chuck’s calibration adjustments into the NAICS-based model coefficients.

Source code in tm2py/components/demand/commercial.py
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
class CommercialVehicleModel(Component):
    """Commercial Vehicle demand model.

    Generates truck demand matrices from:
        - land use
        - highway network impedances
        - parameters

    Segmented into four truck types:
        (1) very small trucks (two-axle, four-tire),
        (2) small trucks (two-axle, six-tire),
        (3) medium trucks (three-axle),
        (4) large or combination (four or more axle) trucks.

    Input:  (1) MAZ csv data file with the employment and household counts.
            (2) Skims
            (3) K-Factors
            (4)
    Output:

    Notes:
    (1) Based on the BAYCAST truck model, no significant updates.
    (2) Combined Chuck's calibration adjustments into the NAICS-based model coefficients.
    """

    def __init__(self, controller: RunController):
        """Constructor for the CommercialVehicleTripGeneration component.

        Args:
            controller (RunController): Run controller for model run.
        """
        super().__init__(controller)

        self.config = self.controller.config.truck
        self.sub_components = {
            "trip generation": CommercialVehicleTripGeneration(controller, self),
            "trip distribution": CommercialVehicleTripDistribution(controller, self),
            "time of day": CommercialVehicleTimeOfDay(controller, self),
            "toll choice": CommercialVehicleTollChoice(controller, self),
        }

        self.trk_impedances = {imp.name: imp for imp in self.config.impedances}

        # Emme matrix management (lazily evaluated)
        self._matrix_cache = None

        # Interim Results
        self.total_tripends_df = None
        self.daily_demand_dict = None
        self.trkclass_tp_demand_dict = None
        self.trkclass_tp_toll_demand_dict = None

    @property
    def purposes(self):
        return list(
            set([trk_class.purpose for trk_class in self.config.trip_gen.classes])
        )

    @property
    def classes(self):
        return [trk_class.name for trk_class in self.config.classes]

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO

    @LogStartEnd()
    def run(self):
        """Run commercial vehicle model."""
        self.total_tripends_df = self.sub_components["trip generation"].run()
        self.daily_demand_dict = self.sub_components["trip distribution"].run(
            self.total_tripends_df
        )
        self.trkclass_tp_demand_dict = self.sub_components["time of day"].run(
            self.daily_demand_dict
        )
        self.trkclass_tp_toll_demand_dict = self.sub_components["toll choice"].run(
            self.trkclass_tp_demand_dict
        )
        self._export_results_as_omx(self.trkclass_tp_toll_demand_dict)

    @property
    def emmebank(self):
        """Reference to highway assignment Emmebank.

        TODO
            This should really be in the controller?
            Or part of network.skims?
        """
        return self.controller.emme_manager.emmebank(
            self.get_abs_path(self.controller.config.emme.highway_database_path)
        )

    @property
    def emme_scenario(self):
        """Return emme scenario from emmebank.

        Use first valid scenario for reference Zone IDs.

        TODO
            This should really be in the controller?
            Or part of network.skims?
        """
        _ref_scenario_id = self.controller.config.time_periods[0].emme_scenario_id
        return self.emmebank.scenario(_ref_scenario_id)

    @property
    def matrix_cache(self):
        """Access to MatrixCache to Emmebank for given emme_scenario."""
        if self._matrix_cache is None:
            self._matrix_cache = MatrixCache(self.emme_scenario)
        return self._matrix_cache

    @LogStartEnd(level="DEBUG")
    def _export_results_as_omx(self, class_demand):
        """Export assignable class demands to OMX files by time-of-day."""
        outdir = self.get_abs_path(self.config.output_trip_table_directory)
        os.makedirs(os.path.dirname(outdir), exist_ok=True)
        for period, matrices in class_demand.items():
            with OMXManager(
                os.path.join(
                    outdir, self.config.outfile_trip_table_tmp.format(period=period)
                ),
                "w",
            ) as output_file:
                for name, data in matrices.items():
                    output_file.write_array(data, name)
__init__(controller)

Constructor for the CommercialVehicleTripGeneration component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
Source code in tm2py/components/demand/commercial.py
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
def __init__(self, controller: RunController):
    """Constructor for the CommercialVehicleTripGeneration component.

    Args:
        controller (RunController): Run controller for model run.
    """
    super().__init__(controller)

    self.config = self.controller.config.truck
    self.sub_components = {
        "trip generation": CommercialVehicleTripGeneration(controller, self),
        "trip distribution": CommercialVehicleTripDistribution(controller, self),
        "time of day": CommercialVehicleTimeOfDay(controller, self),
        "toll choice": CommercialVehicleTollChoice(controller, self),
    }

    self.trk_impedances = {imp.name: imp for imp in self.config.impedances}

    # Emme matrix management (lazily evaluated)
    self._matrix_cache = None

    # Interim Results
    self.total_tripends_df = None
    self.daily_demand_dict = None
    self.trkclass_tp_demand_dict = None
    self.trkclass_tp_toll_demand_dict = None
emme_scenario() property

Return emme scenario from emmebank.

Use first valid scenario for reference Zone IDs.

TODO This should really be in the controller? Or part of network.skims?

Source code in tm2py/components/demand/commercial.py
151
152
153
154
155
156
157
158
159
160
161
162
@property
def emme_scenario(self):
    """Return emme scenario from emmebank.

    Use first valid scenario for reference Zone IDs.

    TODO
        This should really be in the controller?
        Or part of network.skims?
    """
    _ref_scenario_id = self.controller.config.time_periods[0].emme_scenario_id
    return self.emmebank.scenario(_ref_scenario_id)
emmebank() property

Reference to highway assignment Emmebank.

TODO This should really be in the controller? Or part of network.skims?

Source code in tm2py/components/demand/commercial.py
139
140
141
142
143
144
145
146
147
148
149
@property
def emmebank(self):
    """Reference to highway assignment Emmebank.

    TODO
        This should really be in the controller?
        Or part of network.skims?
    """
    return self.controller.emme_manager.emmebank(
        self.get_abs_path(self.controller.config.emme.highway_database_path)
    )
matrix_cache() property

Access to MatrixCache to Emmebank for given emme_scenario.

Source code in tm2py/components/demand/commercial.py
164
165
166
167
168
169
@property
def matrix_cache(self):
    """Access to MatrixCache to Emmebank for given emme_scenario."""
    if self._matrix_cache is None:
        self._matrix_cache = MatrixCache(self.emme_scenario)
    return self._matrix_cache
run()

Run commercial vehicle model.

Source code in tm2py/components/demand/commercial.py
124
125
126
127
128
129
130
131
132
133
134
135
136
137
@LogStartEnd()
def run(self):
    """Run commercial vehicle model."""
    self.total_tripends_df = self.sub_components["trip generation"].run()
    self.daily_demand_dict = self.sub_components["trip distribution"].run(
        self.total_tripends_df
    )
    self.trkclass_tp_demand_dict = self.sub_components["time of day"].run(
        self.daily_demand_dict
    )
    self.trkclass_tp_toll_demand_dict = self.sub_components["toll choice"].run(
        self.trkclass_tp_demand_dict
    )
    self._export_results_as_omx(self.trkclass_tp_toll_demand_dict)
validate_inputs()

Validate the inputs.

Source code in tm2py/components/demand/commercial.py
120
121
def validate_inputs(self):
    """Validate the inputs."""

CommercialVehicleTimeOfDay

Bases: Subcomponent

Commercial vehicle (truck) Time of Day Split for 4 sizes of truck.

Input: Trips origin and destination matrices by 4 truck sizes Ouput: 20 trips origin and destination matrices by 4 truck sizes by 5 times periods

Note

The diurnal factors are taken from the BAYCAST-90 model with adjustments made

during calibration to the very small truck values to better match counts.

Source code in tm2py/components/demand/commercial.py
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
class CommercialVehicleTimeOfDay(Subcomponent):
    """Commercial vehicle (truck) Time of Day Split for 4 sizes of truck.

    Input:  Trips origin and destination matrices by 4 truck sizes
    Ouput:  20 trips origin and destination matrices by 4 truck sizes by 5 times periods

    Note:
        The diurnal factors are taken from the BAYCAST-90 model with adjustments made
    during calibration to the very small truck values to better match counts.
    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for the CommercialVehicleTimeOfDay component.

        Args:
            controller (RunController): Run controller for model run.
            component (Component): Parent component of sub-component
        """
        super().__init__(controller, component)

        self.config = self.component.config.time_of_day

        self.split_factor = "od"
        self._class_configs = None
        self._class_period_splits = None

    @property
    def time_periods(self):
        return self.controller.config.time_periods

    @property
    def classes(self):
        return [trk_class.name for trk_class in self.config.classes]

    @property
    def class_configs(self):
        if not self._class_configs:
            self._class_configs = {c.name: c for c in self.config.classes}
        return self._class_configs

    @property
    def class_period_splits(self):
        """Returns split fraction dictonary mapped to [time period class][time period]."""
        if not self._class_period_splits:
            self._class_period_splits = {
                c_name: {c.time_period: c for c in config.time_period_split}
                for c_name, config in self.class_configs.items()
            }

        return self._class_period_splits

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(
        self, daily_demand: Dict[str, NumpyArray]
    ) -> Dict[str, Dict[str, NumpyArray]]:
        """Splits the daily demand by time of day based on factors in the config.

        Uses self.config.truck.classes.{class_name}.time_of_day_split to split the daily demand.

        #TODO use TimePeriodSplit
        Args:
            daily_demand: dictionary of truck type name to numpy array of
                truck type daily demand

        Returns:
             Nested dictionary of truck class: time period name => numpy array of demand
        """
        trkclass_tp_demand_dict = defaultdict(dict)

        _class_timeperiod = itertools.product(self.classes, self.time_period_names)

        for _t_class, _tp in _class_timeperiod:
            trkclass_tp_demand_dict[_t_class][_tp] = np.around(
                self.class_period_splits[_t_class][_tp.lower()][self.split_factor]
                * daily_demand[_t_class],
                decimals=2,
            )

        return trkclass_tp_demand_dict
__init__(controller, component)

Constructor for the CommercialVehicleTimeOfDay component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
component Component

Parent component of sub-component

required
Source code in tm2py/components/demand/commercial.py
767
768
769
770
771
772
773
774
775
776
777
778
779
780
def __init__(self, controller: RunController, component: Component):
    """Constructor for the CommercialVehicleTimeOfDay component.

    Args:
        controller (RunController): Run controller for model run.
        component (Component): Parent component of sub-component
    """
    super().__init__(controller, component)

    self.config = self.component.config.time_of_day

    self.split_factor = "od"
    self._class_configs = None
    self._class_period_splits = None
class_period_splits() property

Returns split fraction dictonary mapped to [time period class][time period].

Source code in tm2py/components/demand/commercial.py
796
797
798
799
800
801
802
803
804
805
@property
def class_period_splits(self):
    """Returns split fraction dictonary mapped to [time period class][time period]."""
    if not self._class_period_splits:
        self._class_period_splits = {
            c_name: {c.time_period: c for c in config.time_period_split}
            for c_name, config in self.class_configs.items()
        }

    return self._class_period_splits
run(daily_demand)

Splits the daily demand by time of day based on factors in the config.

Uses self.config.truck.classes.{class_name}.time_of_day_split to split the daily demand.

TODO use TimePeriodSplit

Parameters:

Name Type Description Default
daily_demand Dict[str, NumpyArray]

dictionary of truck type name to numpy array of truck type daily demand

required

Returns:

Type Description
Dict[str, Dict[str, NumpyArray]]

Nested dictionary of truck class: time period name => numpy array of demand

Source code in tm2py/components/demand/commercial.py
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
@LogStartEnd()
def run(
    self, daily_demand: Dict[str, NumpyArray]
) -> Dict[str, Dict[str, NumpyArray]]:
    """Splits the daily demand by time of day based on factors in the config.

    Uses self.config.truck.classes.{class_name}.time_of_day_split to split the daily demand.

    #TODO use TimePeriodSplit
    Args:
        daily_demand: dictionary of truck type name to numpy array of
            truck type daily demand

    Returns:
         Nested dictionary of truck class: time period name => numpy array of demand
    """
    trkclass_tp_demand_dict = defaultdict(dict)

    _class_timeperiod = itertools.product(self.classes, self.time_period_names)

    for _t_class, _tp in _class_timeperiod:
        trkclass_tp_demand_dict[_t_class][_tp] = np.around(
            self.class_period_splits[_t_class][_tp.lower()][self.split_factor]
            * daily_demand[_t_class],
            decimals=2,
        )

    return trkclass_tp_demand_dict
validate_inputs()

Validate the inputs.

Source code in tm2py/components/demand/commercial.py
807
808
809
810
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

CommercialVehicleTollChoice

Bases: Subcomponent

Commercial vehicle (truck) toll choice.

A binomial choice model for very small, small, medium, and large trucks. A separate value toll paying versus no value toll paying path choice model is applied to each of the twenty time period and vehicle type combinations.

(1) Trip tables by time of day and truck class

(2) Skims providing the time and cost for value toll and non-value toll paths for each; the matrix names in the OMX files are: “{period}{cls_name}_time” “{period}{cls_name}dist” “{period}{cls_name}bridgetoll{grp_name}” “{period}{cls_name}toll_time” “{period}{cls_name}toll_dist” “{period}{cls_name}toll_bridgetoll{grp_name}” “{period}_{cls_name}toll_valuetoll{grp_name}” Where period is the assignment period, cls_name is the truck assignment class name (as very small, small and medium truck are assigned as the same class) and grp_name is the truck type name (as the tolls are calculated separately for very small, small and medium).

(1) TOLLCLASS is a code, 1 through 10 are reserved for bridges; 11 and up is

reserved for value toll facilities.

1
2
3
4
5
6
7
    (2)  All costs should be coded in year 2000 cents
    (3)  The 2-axle fee is used for very small trucks
    (4)  The 2-axle fee is used for small trucks
    (5)  The 3-axle fee is used for medium trucks
    (6)  The average of the 5-axle and 6-axle fee is used for large trucks
         (about the midpoint of the fee schedule).
    (7)  The in-vehicle time coefficient is from the work trip mode choice model.
Source code in tm2py/components/demand/commercial.py
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
class CommercialVehicleTollChoice(Subcomponent):
    """Commercial vehicle (truck) toll choice.

    A binomial choice model for very small, small, medium, and large trucks.
    A separate value toll paying versus no value toll paying path choice
    model is applied to each of the twenty time period and vehicle type combinations.

    Input:  (1) Trip tables by time of day and truck class
            (2) Skims providing the time and cost for value toll and non-value toll paths
            for each; the matrix names in the OMX files are:
                "{period}_{cls_name}_time"
                "{period}_{cls_name}_dist"
                "{period}_{cls_name}_bridgetoll{grp_name}"
                "{period}_{cls_name}toll_time"
                "{period}_{cls_name}toll_dist"
                "{period}_{cls_name}toll_bridgetoll{grp_name}"
                "{period}_{cls_name}toll_valuetoll{grp_name}"
            Where period is the assignment period, cls_name is the truck assignment
            class name (as very small, small and medium truck are assigned as the
            same class) and grp_name is the truck type name (as the tolls are
            calculated separately for very small, small and medium).

    Results: a total of forty demand matrices, by time of day, truck type and toll/non-toll.

    Notes:  (1)  TOLLCLASS is a code, 1 through 10 are reserved for bridges; 11 and up is
                 reserved for value toll facilities.
            (2)  All costs should be coded in year 2000 cents
            (3)  The 2-axle fee is used for very small trucks
            (4)  The 2-axle fee is used for small trucks
            (5)  The 3-axle fee is used for medium trucks
            (6)  The average of the 5-axle and 6-axle fee is used for large trucks
                 (about the midpoint of the fee schedule).
            (7)  The in-vehicle time coefficient is from the work trip mode choice model.
    """

    def __init__(self, controller, component):
        """Constructor for Commercial Vehicle Toll Choice.

        Also calls Subclass __init__().

        Args:
            controller: model run controller
            component: parent component
        """
        super().__init__(controller, component)

        self.config = self.component.config.toll_choice

        self.sub_components = {
            "toll choice calculator": TollChoiceCalculator(
                controller,
                self,
                self.config,
            ),
        }

        # shortcut
        self._toll_choice = self.sub_components["toll choice calculator"]
        self._toll_choice.toll_skim_suffix = "trk"

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self, trkclass_tp_demand_dict):
        """Split per-period truck demands into nontoll and toll classes.

        Uses OMX skims output from highway assignment: traffic_skims_{period}.omx"""

        _tclass_time_combos = itertools.product(
            self.time_period_names, self.config.classes
        )

        class_demands = defaultdict(dict)
        for _time_period, _tclass in _tclass_time_combos:

            _split_demand = self._toll_choice.run(
                trkclass_tp_demand_dict[_tclass.name][_time_period],
                _tclass.name,
                _time_period,
            )

            class_demands[_time_period][_tclass.name] = _split_demand["non toll"]
            class_demands[_time_period][f"{_tclass.name}toll"] = _split_demand["toll"]
        return class_demands
__init__(controller, component)

Constructor for Commercial Vehicle Toll Choice.

Also calls Subclass init().

Parameters:

Name Type Description Default
controller

model run controller

required
component

parent component

required
Source code in tm2py/components/demand/commercial.py
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
def __init__(self, controller, component):
    """Constructor for Commercial Vehicle Toll Choice.

    Also calls Subclass __init__().

    Args:
        controller: model run controller
        component: parent component
    """
    super().__init__(controller, component)

    self.config = self.component.config.toll_choice

    self.sub_components = {
        "toll choice calculator": TollChoiceCalculator(
            controller,
            self,
            self.config,
        ),
    }

    # shortcut
    self._toll_choice = self.sub_components["toll choice calculator"]
    self._toll_choice.toll_skim_suffix = "trk"
run(trkclass_tp_demand_dict)

Split per-period truck demands into nontoll and toll classes.

Uses OMX skims output from highway assignment: traffic_skims_{period}.omx

Source code in tm2py/components/demand/commercial.py
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
@LogStartEnd()
def run(self, trkclass_tp_demand_dict):
    """Split per-period truck demands into nontoll and toll classes.

    Uses OMX skims output from highway assignment: traffic_skims_{period}.omx"""

    _tclass_time_combos = itertools.product(
        self.time_period_names, self.config.classes
    )

    class_demands = defaultdict(dict)
    for _time_period, _tclass in _tclass_time_combos:

        _split_demand = self._toll_choice.run(
            trkclass_tp_demand_dict[_tclass.name][_time_period],
            _tclass.name,
            _time_period,
        )

        class_demands[_time_period][_tclass.name] = _split_demand["non toll"]
        class_demands[_time_period][f"{_tclass.name}toll"] = _split_demand["toll"]
    return class_demands
validate_inputs()

Validate the inputs.

Source code in tm2py/components/demand/commercial.py
902
903
904
905
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

CommercialVehicleTripDistribution

Bases: Subcomponent

Commercial vehicle (truck) Trip Distribution for 4 sizes of truck.

The four truck types are

(1) very small trucks (two-axle, four-tire), (2) small trucks (two-axle, six-tire), (3) medium trucks (three-axle), (4) large or combination (four or more axle) trucks.

(1) Trips by 4 truck sizes

(2) highway skims for truck, time, distance, bridgetoll and value toll (3) friction factors lookup table (4) k-factors matrix

A simple gravity model is used to distribute the truck trips, with separate friction factors used for each class of truck.

A blended travel time is used as the impedance measure, specifically the weighted average of the AM travel time (one-third weight) and the midday travel time (two-thirds weight).

Input

Level-of-service matrices for the AM peak period (6 am to 10 am) and midday period (10 am to 3 pm) which contain truck-class specific estimates of congested travel time (in minutes)

A matrix of k-factors, as calibrated by Chuck Purvis. Note the very small truck model does not use k-factors; the small, medium, and large trucks use the same k-factors.

A table of friction factors in text format with the following fields, space separated: - impedance measure (blended travel time); - friction factors for very small trucks; - friction factors for small trucks; - friction factors for medium trucks; and, - friction factors for large trucks.

Notes on distribution steps

load nonres/truck_kfactors_taz.csv load nonres/truckFF.dat Apply friction factors and kfactors to produce balancing matrix apply the gravity models using friction factors from nonres/truckFF.dat (note the very small trucks do not use the K-factors) Can use Emme matrix balancing for this - important note: reference matrices by name and ensure names are unique Trips rounded to 0.01, causes some instability in results

Notes: (1) Based on the BAYCAST truck model, no significant updates. (2) Combined Chuck’s calibration adjustments into the NAICS-based model coefficients.

Source code in tm2py/components/demand/commercial.py
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
class CommercialVehicleTripDistribution(Subcomponent):
    """Commercial vehicle (truck) Trip Distribution for 4 sizes of truck.

    The four truck types are:
        (1) very small trucks (two-axle, four-tire),
        (2) small trucks (two-axle, six-tire),
        (3) medium trucks (three-axle),
        (4) large or combination (four or more axle) trucks.

    Input:  (1) Trips by 4 truck sizes
            (2) highway skims for truck, time, distance, bridgetoll and value toll
            (3) friction factors lookup table
            (4) k-factors matrix
    Ouput:  Trips origin and destination matrices by 4 truck sizes

    A simple gravity model is used to distribute the truck trips, with
    separate friction factors used for each class of truck.

    A blended travel time is used as the impedance measure, specifically the weighted average
    of the AM travel time (one-third weight) and the midday travel time (two-thirds weight).

    Input:
        Level-of-service matrices for the AM peak period (6 am to 10 am) and midday
        period (10 am to 3 pm) which contain truck-class specific estimates of
        congested travel time (in minutes)

        A matrix of k-factors, as calibrated by Chuck Purvis.  Note the very small truck model
        does not use k-factors; the small, medium, and large trucks use the same k-factors.

        A table of friction factors in text format with the following fields, space separated:
        - impedance measure (blended travel time);
        - friction factors for very small trucks;
        - friction factors for small trucks;
        - friction factors for medium trucks; and,
        - friction factors for large trucks.

    Notes on distribution steps:
        load nonres/truck_kfactors_taz.csv
        load nonres/truckFF.dat
        Apply friction factors and kfactors to produce balancing matrix
        apply the gravity models using friction factors from nonres/truckFF.dat
        (note the very small trucks do not use the K-factors)
        Can use Emme matrix balancing for this - important note: reference
        matrices by name and ensure names are unique
        Trips rounded to 0.01, causes some instability in results

    Results: four total daily trips by truck type

    Notes:
    (1) Based on the BAYCAST truck model, no significant updates.
    (2) Combined Chuck's calibration adjustments into the NAICS-based model coefficients.

    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for the CommercialVehicleTripDistribution component.

        Args:
            controller (RunController): Run controller for model run.
            component (Component): Parent component of sub-component
        """
        super().__init__(controller, component)

        self.config = self.component.config.trip_dist
        self._k_factors = None
        self._blended_skims = {}
        self._friction_factors = None
        self._friction_factor_matrices = {}

        self._class_config = None

    @property
    def class_config(self):
        if not self._class_config:
            self._class_config = {c.name: c for c in self.config.classes}

        return self._class_config

    @property
    def k_factors(self):
        """Zone-to-zone values of truck K factors.

        Returns:
             NumpyArray: Zone-to-zone values of truck K factors.
        """
        if self._k_factors is None:
            self._k_factors = self._load_k_factors()
        return self._k_factors

    def _load_k_factors(self):
        """Loads k-factors from self.config.truck.k_factors_file csv file.

        Returns:
            NumpyArray: Zone-to-zone values of truck K factors.

        """
        """return zonal_csv_to_matrices(
            self.get_abs_path(self.config.k_factors_file),
            i_column="I_taz_tm2_v2_2",
            j_column="J_taz_tm2_v2_2",
            value_columns="truck_k",
            fill_zones=True,
            default_value=0,
            max_zone=max(self.component.emme_scenario.zone_numbers),
        )["truck_k"].values"""
        data = pd.read_csv(self.get_abs_path(self.config.k_factors_file))
        zones = np.unique(data["I_taz_tm2_v2_2"])
        num_data_zones = len(zones)
        row_index = np.searchsorted(zones, data["I_taz_tm2_v2_2"])
        col_index = np.searchsorted(zones, data["J_taz_tm2_v2_2"])
        k_factors = np.zeros((num_data_zones, num_data_zones))
        k_factors[row_index, col_index] = data["truck_k"]
        num_zones = len(self.component.emme_scenario.zone_numbers)
        padding = ((0, num_zones - num_data_zones), (0, num_zones - num_data_zones))
        k_factors = np.pad(k_factors, padding)

        return k_factors

    def blended_skims(self, mode: str):
        """Get blended skim. Creates it if doesn't already exist.

        Args:
            mode (str): Mode for skim

        Returns:
            _type_: _description_
        """
        if mode not in self._blended_skims:
            self._blended_skims[mode] = get_blended_skim(
                self.controller,
                mode=mode,
                blend=self.component.trk_impedances[mode]["time_blend"],
            )
        return self._blended_skims[mode]

    def friction_factor_matrices(
        self, trk_class: str, k_factors: Union[None, NumpyArray] = None
    ) -> NumpyArray:
        """Zone to zone NumpyArray of impedances for a given truck class.

        Args:
            trk_class (str): Truck class abbreviated name
            k_factors (Union[None,NumpyArray]): If not None, gives an zone-by-zone array of
                k-factors--additive impedances to be added on top of friciton factors.
                Defaults to None.

        Returns:
            NumpyArray: Zone-by-zone matrix of friction factors
        """
        if not self._friction_factor_matrices.get(trk_class):
            self._friction_factor_matrices[
                trk_class
            ] = self._calculate_friction_factor_matrix(
                trk_class,
                self.class_config[trk_class].impedance,
                self.k_factors,
                self.class_config[trk_class].use_k_factors,
            )

        return self._friction_factor_matrices[trk_class]

    @LogStartEnd(level="DEBUG")
    def _calculate_friction_factor_matrix(
        self,
        segment_name,
        blended_skim_name: str,
        k_factors: Union[None, NumpyArray] = None,
        use_k_factors: bool = False,
    ):
        """Calculates friction matrix by interpolating time; optionally multiplying by k_factors.

        Args:
            segment_name: Name of the segment to calculate the friction factors for (i.e. vstruck)
            blended_skim_name (str): Name of blended skim
            k_factors (Union[None,NumpyArray): Optional k-factors matrix

        Returns:
            friction_matrix NumpyArray: friction matrix for a truck class
        """
        _friction_matrix = np.interp(
            self.blended_skims(blended_skim_name),
            self.friction_factors["time"].tolist(),
            self.friction_factors[segment_name],
        )

        if use_k_factors:
            if k_factors is not None:
                _friction_matrix = _friction_matrix * k_factors

        return _friction_matrix

    @property
    def friction_factors(self):
        """Table of friction factors for each time band by truck class.

        Returns:
            pd.DataFrame: DataFrame of friction factors read from disk.
        """
        if self._friction_factors is None:
            self._friction_factors = self._read_ffactors()
        return self._friction_factors

    def _read_ffactors(self) -> pd.DataFrame:
        """Load friction factors lookup tables from csv file to dataframe.

        Reads from file: config.truck.friction_factors_file with following assumed column order:
            time: Time
            vsmtrk: Very Small Truck FF
            smltrk: Small Truck FF
            medtrk: Medium Truck FF
            lrgtrk: Large Truck FF
        """
        _file_path = self.get_abs_path(self.config.friction_factors_file)
        return pd.read_csv(_file_path)

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self, tripends_df) -> Dict[str, NumpyArray]:
        """Run commercial vehicle trip distribution."""
        daily_demand_dict = {
            tc: self._distribute_ods(tripends_df, tc) for tc in self.component.classes
        }

        return daily_demand_dict

    @LogStartEnd(level="DEBUG")
    def _distribute_ods(
        self,
        tripends_df: pd.DataFrame,
        trk_class: str,
        orig_factor: float = 0.5,
        dest_factor: float = 0.5,
    ) -> NumpyArray:
        """Distribute a trip ends for a given a truck class.

        Args:
            tripends_df: dataframe with trip ends as "{trk_class}_prod" and{trk_class}_attr".
            trk_class: name of truck class to distribute.
            orig_factor (float, optional): Amount to factor towards origins. Defaults to 0.5.
            dest_factor (float, optional): Amount to factor towards destinations. Defaults to 0.5.

        Returns:
            NumpyArray: Distributed trip ends for given truck class
        """
        if orig_factor + dest_factor != 1.0:
            raise ValueError(
                "orig_factor ({orig_factor}) and dest_factor ({dest_factor}) must\
                sum to 1.0"
            )

        _prod_attr_matrix = self._matrix_balancing(
            tripends_df[f"{trk_class}_productions"].to_numpy(),
            tripends_df[f"{trk_class}_attractions"].to_numpy(),
            trk_class,
        )
        daily_demand = (
            orig_factor * _prod_attr_matrix
            + dest_factor * _prod_attr_matrix.transpose()
        )

        self.logger.log(
            f"{trk_class}, prod sum: {_prod_attr_matrix.sum()}, "
            f"daily sum: {daily_demand.sum()}",
            level="DEBUG",
        )

        return daily_demand

    def _matrix_balancing(
        self,
        orig_totals: NumpyArray,
        dest_totals: NumpyArray,
        trk_class: str,
    ) -> NumpyArray:
        """Distribute origins and destinations based on friction factors for a givein truck class.

        Args:
            orig_totals: Total demand for origins as a numpy array
            dest_totals: Total demand for destinations as a numpy array
            trk_class (str): Truck class name



        """
        matrix_balancing = self.controller.emme_manager.tool(
            "inro.emme.matrix_calculation.matrix_balancing"
        )
        matrix_round = self.controller.emme_manager.tool(
            "inro.emme.matrix_calculation.matrix_controlled_rounding"
        )

        # Transfer numpy to emmebank
        _ff_emme_mx_name = self.component.matrix_cache.set_data(
            f"{trk_class}_friction",
            self.friction_factor_matrices(trk_class),
            matrix_type="FULL",
        ).name

        _orig_tots_emme_mx_name = self.component.matrix_cache.set_data(
            f"{trk_class}_prod", orig_totals, matrix_type="ORIGIN"
        ).name

        _dest_tots_emme_mx_name = self.component.matrix_cache.set_data(
            f"{trk_class}_attr", dest_totals, matrix_type="DESTINATION"
        ).name

        # Create a destination matrix for output to live in Emmebank
        _result_emme_mx_name = self.component.matrix_cache.get_or_init_matrix(
            f"{trk_class}_daily_demand"
        ).name

        spec = {
            "od_values_to_balance": _ff_emme_mx_name,
            "origin_totals": _orig_tots_emme_mx_name,
            "destination_totals": _dest_tots_emme_mx_name,
            "allowable_difference": 0.01,
            "max_relative_error": self.config.max_balance_relative_error,
            "max_iterations": self.config.max_balance_iterations,
            "results": {"od_balanced_values": _result_emme_mx_name},
            "performance_settings": {
                "allowed_memory": None,
                "number_of_processors": self.controller.num_processors,
            },
            "type": "MATRIX_BALANCING",
        }
        matrix_balancing(spec, scenario=self.component.emme_scenario)

        matrix_round(
            _result_emme_mx_name,
            _result_emme_mx_name,
            min_demand=0.01,
            values_to_round="ALL_NON_ZERO",
            scenario=self.component.emme_scenario,
        )

        return self.component.matrix_cache.get_data(_result_emme_mx_name)
__init__(controller, component)

Constructor for the CommercialVehicleTripDistribution component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
component Component

Parent component of sub-component

required
Source code in tm2py/components/demand/commercial.py
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
def __init__(self, controller: RunController, component: Component):
    """Constructor for the CommercialVehicleTripDistribution component.

    Args:
        controller (RunController): Run controller for model run.
        component (Component): Parent component of sub-component
    """
    super().__init__(controller, component)

    self.config = self.component.config.trip_dist
    self._k_factors = None
    self._blended_skims = {}
    self._friction_factors = None
    self._friction_factor_matrices = {}

    self._class_config = None
blended_skims(mode)

Get blended skim. Creates it if doesn’t already exist.

Parameters:

Name Type Description Default
mode str

Mode for skim

required

Returns:

Name Type Description
_type_

description

Source code in tm2py/components/demand/commercial.py
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
def blended_skims(self, mode: str):
    """Get blended skim. Creates it if doesn't already exist.

    Args:
        mode (str): Mode for skim

    Returns:
        _type_: _description_
    """
    if mode not in self._blended_skims:
        self._blended_skims[mode] = get_blended_skim(
            self.controller,
            mode=mode,
            blend=self.component.trk_impedances[mode]["time_blend"],
        )
    return self._blended_skims[mode]
friction_factor_matrices(trk_class, k_factors=None)

Zone to zone NumpyArray of impedances for a given truck class.

Parameters:

Name Type Description Default
trk_class str

Truck class abbreviated name

required
k_factors Union[None, NumpyArray]

If not None, gives an zone-by-zone array of k-factors–additive impedances to be added on top of friciton factors. Defaults to None.

None

Returns:

Name Type Description
NumpyArray NumpyArray

Zone-by-zone matrix of friction factors

Source code in tm2py/components/demand/commercial.py
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
def friction_factor_matrices(
    self, trk_class: str, k_factors: Union[None, NumpyArray] = None
) -> NumpyArray:
    """Zone to zone NumpyArray of impedances for a given truck class.

    Args:
        trk_class (str): Truck class abbreviated name
        k_factors (Union[None,NumpyArray]): If not None, gives an zone-by-zone array of
            k-factors--additive impedances to be added on top of friciton factors.
            Defaults to None.

    Returns:
        NumpyArray: Zone-by-zone matrix of friction factors
    """
    if not self._friction_factor_matrices.get(trk_class):
        self._friction_factor_matrices[
            trk_class
        ] = self._calculate_friction_factor_matrix(
            trk_class,
            self.class_config[trk_class].impedance,
            self.k_factors,
            self.class_config[trk_class].use_k_factors,
        )

    return self._friction_factor_matrices[trk_class]
friction_factors() property

Table of friction factors for each time band by truck class.

Returns:

Type Description

pd.DataFrame: DataFrame of friction factors read from disk.

Source code in tm2py/components/demand/commercial.py
605
606
607
608
609
610
611
612
613
614
@property
def friction_factors(self):
    """Table of friction factors for each time band by truck class.

    Returns:
        pd.DataFrame: DataFrame of friction factors read from disk.
    """
    if self._friction_factors is None:
        self._friction_factors = self._read_ffactors()
    return self._friction_factors
k_factors() property

Zone-to-zone values of truck K factors.

Returns:

Name Type Description
NumpyArray

Zone-to-zone values of truck K factors.

Source code in tm2py/components/demand/commercial.py
492
493
494
495
496
497
498
499
500
501
@property
def k_factors(self):
    """Zone-to-zone values of truck K factors.

    Returns:
         NumpyArray: Zone-to-zone values of truck K factors.
    """
    if self._k_factors is None:
        self._k_factors = self._load_k_factors()
    return self._k_factors
run(tripends_df)

Run commercial vehicle trip distribution.

Source code in tm2py/components/demand/commercial.py
634
635
636
637
638
639
640
641
@LogStartEnd()
def run(self, tripends_df) -> Dict[str, NumpyArray]:
    """Run commercial vehicle trip distribution."""
    daily_demand_dict = {
        tc: self._distribute_ods(tripends_df, tc) for tc in self.component.classes
    }

    return daily_demand_dict
validate_inputs()

Validate the inputs.

Source code in tm2py/components/demand/commercial.py
629
630
631
632
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

CommercialVehicleTripGeneration

Bases: Subcomponent

Commercial vehicle (truck) Trip Generation for 4 sizes of truck.

The four truck types are

(1) very small trucks (two-axle, four-tire), (2) small trucks (two-axle, six-tire), (3) medium trucks (three-axle), (4) large or combination (four or more axle) trucks.

Trip generation

Use linear regression models to generate trip ends, balancing attractions to productions. Based on BAYCAST truck model.

The truck trip generation models for small trucks (two-axle, six tire), medium trucks (three-axle), and large or combination (four or more axle) trucks are taken directly from the study: “I-880 Intermodal Corridor Study: Truck Travel in the San Francisco Bay Area”, prepared by Barton Aschman in December 1992. The coefficients are on page 223 of this report.

The very small truck generation model is based on the Phoenix four-tire truck model documented in the TMIP Quick Response Freight Manual.

Note that certain production models previously used SIC-based employment categories. To both maintain consistency with the BAYCAST truck model and update the model to use NAICS-based employment categories, new regression models were estimated relating the NAICS-based employment data with the SIC-based-predicted trips. The goal here is not to create a new truck model, but to mimic the old model with the available data. Please see the excel spreadsheet TruckModel.xlsx for details. The NAICS-based model results replicate the SIC-based model results quite well.

Source code in tm2py/components/demand/commercial.py
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
class CommercialVehicleTripGeneration(Subcomponent):
    """Commercial vehicle (truck) Trip Generation for 4 sizes of truck.

    The four truck types are:
        (1) very small trucks (two-axle, four-tire),
        (2) small trucks (two-axle, six-tire),
        (3) medium trucks (three-axle),
        (4) large or combination (four or more axle) trucks.

    Input:  (1) MAZ csv data file with the employment and household counts.
    Ouput:  Trips by 4 truck sizes

    Trip generation
    ---------------
    Use linear regression models to generate trip ends,
    balancing attractions to productions. Based on BAYCAST truck model.

    The truck trip generation models for small trucks (two-axle, six tire),
    medium trucks (three-axle), and large or combination (four or more axle)
    trucks are taken directly from the study: "I-880 Intermodal Corridor Study:
    Truck Travel in the San Francisco Bay Area", prepared by Barton Aschman in
    December 1992.  The coefficients are on page 223 of this report.

    The very small truck generation model is based on the Phoenix four-tire
    truck model documented in the TMIP Quick Response Freight Manual.

    Note that certain production models previously used SIC-based employment
    categories.  To both maintain consistency with the BAYCAST truck model and
    update the model to use NAICS-based employment categories, new regression
    models were estimated relating the NAICS-based employment data with the
    SIC-based-predicted trips.  The goal here is not to create a new truck
    model, but to mimic the old model with the available data.  Please see
    the excel spreadsheet TruckModel.xlsx for details.  The NAICS-based model
    results replicate the SIC-based model results quite well.
    """

    def __init__(self, controller: RunController, component: Component):
        """Constructor for the CommercialVehicleTripGeneration component.

        Args:
            controller (RunController): Run controller for model run.
            component (Component): Parent component of sub-component
        """
        super().__init__(controller, component)
        self.config = self.component.config.trip_gen

    def validate_inputs(self):
        """Validate the inputs."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run commercial vehicle trip distribution."""
        _landuse_df = self._aggregate_landuse()
        _unbalanced_tripends_df = self._generate_trip_ends(_landuse_df)
        _balanced_tripends_df = self._balance_pa(_unbalanced_tripends_df)
        total_tripends_df = self._aggregate_by_class(_balanced_tripends_df)
        return total_tripends_df

    @LogStartEnd(level="DEBUG")
    def _aggregate_landuse(self) -> pd.DataFrame:
        """Aggregates landuse data from input CSV by MAZ to TAZ and employment groups.

        TOTEMP, total employment (same regardless of classification system)
        RETEMPN, retail trade employment per the NAICS classification system
        FPSEMPN, financial and professional services employment per NAICS
        HEREMPN, health, educational, and recreational employment per  NAICS
        OTHEMPN, other employment per the NAICS classification system
        AGREMPN, agricultural employment per the NAICS classificatin system
        MWTEMPN, manufacturing, warehousing, and transportation employment per NAICS
        TOTHH, total households
        """
        maz_data_file = self.get_abs_path(
            self.controller.config.scenario.maz_landuse_file
        )
        maz_input_data = pd.read_csv(maz_data_file)
        zones = self.component.emme_scenario.zone_numbers
        maz_input_data = maz_input_data[maz_input_data["TAZ_ORIGINAL"].isin(zones)]
        taz_input_data = maz_input_data.groupby(["TAZ_ORIGINAL"]).sum()
        taz_input_data = taz_input_data.sort_values(by="TAZ_ORIGINAL")
        # combine categories
        taz_landuse = pd.DataFrame()
        for total_column, sub_categories in _land_use_aggregation.items():
            taz_landuse[total_column] = taz_input_data[sub_categories].sum(axis=1)
        taz_landuse.reset_index(inplace=True)
        return taz_landuse

    @LogStartEnd(level="DEBUG")
    def _generate_trip_ends(self, landuse_df: pd.DataFrame) -> pd.DataFrame:
        """Generate productions and attractions by class based on landuse and truck trip rates.

        Args:
            landuse_df (pd.DataFrame): DataFrame with aggregated landuse data.
                Expected columns for landuse are: AGREMPN, RETEMPN, FPSEMPN, HEREMPN,
                MWTEMPN, OTHEMPN, TOTEMP, TOTHH

        Returns:
            pd.DataFrame: DataFrame with unbalanced production and attraction trip ends.
        """
        tripends_df = pd.DataFrame()

        _class_pa = itertools.product(
            self.config.classes,
            ["production_formula", "attraction_formula"],
        )

        # TODO Do this with multi-indexing rather than relying on column naming

        for _c, _pa in _class_pa:

            _trip_type = _c.purpose
            _trk_class = _c.name

            if _pa.endswith("_formula"):
                _pa_short = _pa.split("_")[0]

            # linked trips (non-garage-based) - attractions (equal productions)
            if (_trip_type == "linked") & (_pa_short == "attraction"):
                tripends_df[f"{_trip_type}_{_trk_class}_{_pa_short}s"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ]
            else:
                _constant = _c[_pa].constant
                _multiplier = _c[_pa].multiplier

                land_use_rates = pd.DataFrame(_c[_pa].land_use_rates).T
                land_use_rates = land_use_rates.rename(
                    columns=land_use_rates.loc["property"]
                ).drop("property", axis=0)

                _rate_trips_df = landuse_df.mul(land_use_rates.iloc[0])
                _trips_df = _rate_trips_df * _multiplier + _constant

                tripends_df[f"{_trip_type}_{_trk_class}_{_pa_short}s"] = _trips_df.sum(
                    axis=1
                ).round()

        return tripends_df

    @LogStartEnd(level="DEBUG")
    def _balance_pa(self, tripends_df: pd.DataFrame) -> pd.DataFrame:
        """Balance production and attractions.

        Args:
            tripends_df (pd.DataFrame): DataFrame with unbalanced production and attraction
                trip ends.

        Returns:
            pd.DataFrame: DataFrame with balanced production and attraction trip ends.
        """

        for _c in self.config.classes:
            _trip_type = _c.purpose
            _trk_class = _c.name
            _balance_to = _c.balance_to

            _tots = {
                "attractions": tripends_df[
                    f"{_trip_type}_{_trk_class}_attractions"
                ].sum(),
                "productions": tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ].sum(),
            }

            # if productions OR attractions are zero, fill one with other
            if not _tots["attractions"]:
                tripends_df[f"{_trip_type}_{_trk_class}_attractions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ]

            elif not _tots["productions"]:
                tripends_df[f"{_trip_type}_{_trk_class}_productions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_attractions"
                ]

            # otherwise balance based on sums
            elif _balance_to == "productions":
                tripends_df[f"{_trip_type}_{_trk_class}_attractions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_attractions"
                ] * (_tots["productions"] / _tots["attractions"])

            elif _balance_to == "attractions":
                tripends_df[f"{_trip_type}_{_trk_class}_productions"] = tripends_df[
                    f"{_trip_type}_{_trk_class}_productions"
                ] * (_tots["attractions"] / _tots["productions"])
            else:
                raise ValueError(f"{_balance_to} is not a valid balance_to value")
        return tripends_df

    @LogStartEnd(level="DEBUG")
    def _aggregate_by_class(self, tripends_df: pd.DataFrame) -> pd.DataFrame:
        """Sum tripends by class across trip purpose.

        Args:
            tripends_df (pd.DataFrame): DataFrame with balanced production and attraction

        Returns:
            pd.DataFrame: DataFrame with aggregated tripends by truck class. Returned columns are:
                vsmtrk_prod, vsmtrk_attr,
                smltrk_prod, smltrk_attr,
                medtrk_prod, medtrk_attr,
                lrgtrk_prod, lrgtrk_attr
        """
        agg_tripends_df = pd.DataFrame()

        _class_pa = itertools.product(
            self.component.classes,
            ["productions", "attractions"],
        )

        for _trk_class, _pa in _class_pa:
            _sum_cols = [
                c for c in tripends_df.columns if c.endswith(f"_{_trk_class}_{_pa}")
            ]
            agg_tripends_df[f"{_trk_class}_{_pa}"] = pd.Series(
                tripends_df[_sum_cols].sum(axis=1)
            )

        agg_tripends_df.round(decimals=7)

        self.logger.log(agg_tripends_df.describe().to_string(), level="DEBUG")

        return agg_tripends_df
__init__(controller, component)

Constructor for the CommercialVehicleTripGeneration component.

Parameters:

Name Type Description Default
controller RunController

Run controller for model run.

required
component Component

Parent component of sub-component

required
Source code in tm2py/components/demand/commercial.py
223
224
225
226
227
228
229
230
231
def __init__(self, controller: RunController, component: Component):
    """Constructor for the CommercialVehicleTripGeneration component.

    Args:
        controller (RunController): Run controller for model run.
        component (Component): Parent component of sub-component
    """
    super().__init__(controller, component)
    self.config = self.component.config.trip_gen
run()

Run commercial vehicle trip distribution.

Source code in tm2py/components/demand/commercial.py
238
239
240
241
242
243
244
245
@LogStartEnd()
def run(self):
    """Run commercial vehicle trip distribution."""
    _landuse_df = self._aggregate_landuse()
    _unbalanced_tripends_df = self._generate_trip_ends(_landuse_df)
    _balanced_tripends_df = self._balance_pa(_unbalanced_tripends_df)
    total_tripends_df = self._aggregate_by_class(_balanced_tripends_df)
    return total_tripends_df
validate_inputs()

Validate the inputs.

Source code in tm2py/components/demand/commercial.py
233
234
235
236
def validate_inputs(self):
    """Validate the inputs."""
    # TODO
    pass

tm2py.config.TruckConfig

Bases: ConfigItem

Truck model parameters.

Source code in tm2py/config.py
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
@dataclass(frozen=True)
class TruckConfig(ConfigItem):
    """Truck model parameters."""

    classes: List[TruckClassConfig]
    impedances: List[ImpedanceConfig]
    trip_gen: TripGenerationConfig
    trip_dist: TripDistributionConfig
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig
    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str

    """
    @validator("classes")
    def class_consistency(cls, v, values):
        # TODO Can't get to work righ tnow
        _class_names = [c.name for c in v]
        _gen_classes = [c.name for c in values["trip_gen"]]
        _dist_classes = [c.name for c in values["trip_dist"]]
        _time_classes = [c.name for c in values["time_split"]]
        _toll_classes = [c.name for c in values["toll_choice"]]

        assert (
            _class_names == _gen_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.trip_gen ({_gen_classes})."
        assert (
            _class_names == _dist_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.trip_dist ({_dist_classes})."
        assert (
            _class_names == _time_classes
        ), "truck.classes ({_class_names}) doesn't  equal\
            class names in truck.time_split ({_time_classes})."
        assert (
            _class_names == _toll_classes
        ), "truck.classes ({_class_names}) doesn't equal\
            class names in truck.toll_choice ({_toll_classes})."

        return v
    """

outfile_trip_table_tmp: str class-attribute

@validator(“classes”) def class_consistency(cls, v, values): # TODO Can’t get to work righ tnow _class_names = [c.name for c in v] _gen_classes = [c.name for c in values[“trip_gen”]] _dist_classes = [c.name for c in values[“trip_dist”]] _time_classes = [c.name for c in values[“time_split”]] _toll_classes = [c.name for c in values[“toll_choice”]]

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
assert (
    _class_names == _gen_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.trip_gen ({_gen_classes})."
assert (
    _class_names == _dist_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.trip_dist ({_dist_classes})."
assert (
    _class_names == _time_classes
), "truck.classes ({_class_names}) doesn't  equal            class names in truck.time_split ({_time_classes})."
assert (
    _class_names == _toll_classes
), "truck.classes ({_class_names}) doesn't equal            class names in truck.toll_choice ({_toll_classes})."

return v

Inter-regional Demand

tm2py.components.demand.internal_external

Module containing Internal <-> External trip model.

ExternalDemand

Bases: Subcomponent

Forecast of daily internal<->external demand based on growth from a base year.

Create a daily matrix that includes internal/external, external/internal, and external/external passenger vehicle travel (based on Census 2000 journey-to-work flows). These trip tables are based on total traffic counts, which include trucks, but trucks are not explicitly segmented from passenger vehicles. This short-coming is a hold-over from BAYCAST and will be addressed in the next model update.

The row and column totals are taken from count station data provided by Caltrans. The BAYCAST 2006 IX matrix is used as the base matrix and scaled to match forecast year growth assumptions. The script generates estimates for the model forecast year; the growth rates were discussed with neighboring MPOs as part of the SB 375 target setting process.

Input: (1) Station-specific assumed growth rates for each forecast year (the lack of external/external movements through the region allows simple factoring of cells without re-balancing); (2) An input base matrix derived from the Census journey-to-work data.

Output: (1) Four-table, forecast-year specific trip tables containing internal/external, external/internal, and external/external vehicle (xxx or person xxx) travel.

Governed by class DemandGrowth Config:

    highway_demand_file:
    input_demand_file:
    input_demand_matrixname_tmpl:
    modes:
    reference_year:
    annual_growth_rate:
    special_gateway_adjust:

Source code in tm2py/components/demand/internal_external.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
class ExternalDemand(Subcomponent):
    """Forecast of daily internal<->external demand based on growth from a base year.

    Create a daily matrix that includes internal/external, external/internal,
    and external/external passenger vehicle travel (based on Census 2000 journey-to-work flows).
    These trip tables are based on total traffic counts, which include trucks, but trucks are
    not explicitly segmented from passenger vehicles.  This short-coming is a hold-over from
    BAYCAST and will be addressed in the next model update.

    The row and column totals are taken from count station data provided by Caltrans.  The
    BAYCAST 2006 IX matrix is used as the base matrix and scaled to match forecast year growth
    assumptions. The script generates estimates for the model forecast year; the growth rates
    were discussed with neighboring MPOs as part of the SB 375 target setting process.

     Input:  (1)  Station-specific assumed growth rates for each forecast year (the lack of
                  external/external movements through the region allows simple factoring of
                  cells without re-balancing);
             (2)  An input base matrix derived from the Census journey-to-work data.

     Output: (1) Four-table, forecast-year specific trip tables containing internal/external,
                 external/internal, and external/external vehicle (xxx or person xxx) travel.


    Governed by class DemandGrowth Config:
    ```
        highway_demand_file:
        input_demand_file:
        input_demand_matrixname_tmpl:
        modes:
        reference_year:
        annual_growth_rate:
        special_gateway_adjust:
    ```
    """

    def __init__(self, controller, component):

        super().__init__(controller, component)
        self.config = self.component.config.demand
        # Loaded lazily
        self._base_demand = None

    @property
    def year(self):
        return self.controller.config.scenario.year

    @property
    def modes(self):
        return self.component.classes

    @property
    def input_demand_file(self):
        return self.get_abs_path(self.config.input_demand_file)

    @property
    def base_demand(self):
        if self._base_demand is None:
            self._load_base_demand()
        return self._base_demand

    def validate_inputs(self):
        # TODO
        pass

    def _load_base_demand(self):
        """Load reference matrices from .omx to self._base_demand

        input file template: self.config.internal_external.input_demand_matrixname_tmpl
        modes: self.config.internal_external.modes
        """
        _mx_name_tmpl = self.config.input_demand_matrixname_tmpl
        _matrices = {m: _mx_name_tmpl.format(mode=m.upper()) for m in self.modes}

        self._base_demand = omx_to_dict(self.input_demand_file, matrices=_matrices)

    def run(self, base_demand: Dict[str, NumpyArray] = None) -> Dict[str, NumpyArray]:
        """Calculate adjusted demand based on scenario year and growth rates.

        Steps:
        - 1.1 apply special factors to certain gateways based on ID
        - 1.2 apply gateway-specific annual growth rates to results of step 1
           to generate year specific forecast

        Args:
            demand: dictionary of input daily demand matrices (numpy arrays)

        Returns:
             Dictionary of Numpy matrices of daily PA by class mode
        """
        # Build adjustment matrix to be applied to all input matrices
        # special gateway adjustments based on zone index
        if base_demand is None:
            base_demand = self.base_demand
        _num_years = self.year - self.config.reference_year
        _adj_matrix = np.ones(base_demand["da"].shape)

        _adj_matrix = create_matrix_factors(
            default_matrix=_adj_matrix,
            matrix_factors=self.config.special_gateway_adjust,
        )

        _adj_matrix = create_matrix_factors(
            default_matrix=_adj_matrix,
            matrix_factors=self.config.annual_growth_rate,
            periods=_num_years,
        )

        daily_prod_attract = dict(
            (_mode, _demand * _adj_matrix) for _mode, _demand in base_demand.items()
        )
        return daily_prod_attract
run(base_demand=None)

Calculate adjusted demand based on scenario year and growth rates.

Steps: - 1.1 apply special factors to certain gateways based on ID - 1.2 apply gateway-specific annual growth rates to results of step 1 to generate year specific forecast

Parameters:

Name Type Description Default
demand

dictionary of input daily demand matrices (numpy arrays)

required

Returns:

Type Description
Dict[str, NumpyArray]

Dictionary of Numpy matrices of daily PA by class mode

Source code in tm2py/components/demand/internal_external.py
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
def run(self, base_demand: Dict[str, NumpyArray] = None) -> Dict[str, NumpyArray]:
    """Calculate adjusted demand based on scenario year and growth rates.

    Steps:
    - 1.1 apply special factors to certain gateways based on ID
    - 1.2 apply gateway-specific annual growth rates to results of step 1
       to generate year specific forecast

    Args:
        demand: dictionary of input daily demand matrices (numpy arrays)

    Returns:
         Dictionary of Numpy matrices of daily PA by class mode
    """
    # Build adjustment matrix to be applied to all input matrices
    # special gateway adjustments based on zone index
    if base_demand is None:
        base_demand = self.base_demand
    _num_years = self.year - self.config.reference_year
    _adj_matrix = np.ones(base_demand["da"].shape)

    _adj_matrix = create_matrix_factors(
        default_matrix=_adj_matrix,
        matrix_factors=self.config.special_gateway_adjust,
    )

    _adj_matrix = create_matrix_factors(
        default_matrix=_adj_matrix,
        matrix_factors=self.config.annual_growth_rate,
        periods=_num_years,
    )

    daily_prod_attract = dict(
        (_mode, _demand * _adj_matrix) for _mode, _demand in base_demand.items()
    )
    return daily_prod_attract

ExternalTollChoice

Bases: Subcomponent

Toll choice

Apply a binomial choice model for drive alone, shared ride 2, and shared ride 3 internal/external personal vehicle travel.

(1) Time-period-specific origin/destination matrices of drive alone, shared ride 2,

and share ride 3+ internal/external trip tables.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
    (2) Skims providing the time and cost for value toll and non-value toll paths for each

        traffic_skims_{period}.omx, where {period} is the time period ID,
        {class} is the class name da, sr2, sr2, with the following matrix names
          Non-value-toll paying time: {period}_{class}_time,
          Non-value-toll distance: {period}_{class}_dist,
          Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
          Value-toll paying time is: {period}_{class}toll_time,
          Value-toll paying distance is: {period}_{class}toll_dist,
          Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
          Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

Output: Five, six-table trip matrices, one for each time period. Two tables for each vehicle class representing value-toll paying path trips and non-value-toll paying path trips

Governed by TollClassConfig
classes:
value_of_time:
operating_cost_per_mile:
property_to_skim_toll:
property_to_skim_notoll:
utility:
Source code in tm2py/components/demand/internal_external.py
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
class ExternalTollChoice(Subcomponent):
    """Toll choice
    -----------
    Apply a binomial choice model for drive alone, shared ride 2, and shared ride 3
    internal/external personal vehicle travel.

    Input:  (1) Time-period-specific origin/destination matrices of drive alone, shared ride 2,
                and share ride 3+ internal/external trip tables.
            (2) Skims providing the time and cost for value toll and non-value toll paths for each

                traffic_skims_{period}.omx, where {period} is the time period ID,
                {class} is the class name da, sr2, sr2, with the following matrix names
                  Non-value-toll paying time: {period}_{class}_time,
                  Non-value-toll distance: {period}_{class}_dist,
                  Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
                  Value-toll paying time is: {period}_{class}toll_time,
                  Value-toll paying distance is: {period}_{class}toll_dist,
                  Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
                  Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

     Output: Five, six-table trip matrices, one for each time period.  Two tables for each vehicle
             class representing value-toll paying path trips and non-value-toll paying path trips

    Governed by TollClassConfig:

        ```
        classes:
        value_of_time:
        operating_cost_per_mile:
        property_to_skim_toll:
        property_to_skim_notoll:
        utility:
        ```
    """

    def __init__(self, controller, component):
        super().__init__(controller, component)

        self.config = self.component.config.toll_choice

        self.sub_components = {
            "toll choice calculator": TollChoiceCalculator(
                controller, component, self.config
            ),
        }

        # shortcut
        self._toll_choice = self.sub_components["toll choice calculator"]
        self._toll_choice.toll_skim_suffix = "trk"

    def validate_inputs(self):
        # TODO
        pass

    @LogStartEnd()
    def run(
        self, period_demand: Dict[str, Dict[str, NumpyArray]]
    ) -> Dict[str, Dict[str, NumpyArray]]:
        """Binary toll / non-toll choice model by class.

        input: result of _ix_time_of_day
        skims:
            traffic_skims_{period}.omx, where {period} is the time period ID,
            {class} is the class name da, sr2, sr2, with the following matrix names
              Non-value-toll paying time: {period}_{class}_time,
              Non-value-toll distance: {period}_{class}_dist,
              Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
              Value-toll paying time is: {period}_{class}toll_time,
              Value-toll paying distance is: {period}_{class}toll_dist,
              Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
              Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

        STEPS:
        3.1: For each time of day, for each da, sr2, sr3, calculate
             - utility of toll and nontoll
             - probability of toll / nontoll
             - split demand into toll and nontoll matrices

        """

        _time_class_combos = itertools.product(
            self.time_period_names, self.component.classes
        )

        class_demands = defaultdict(dict)
        for _time_period, _class in _time_class_combos:

            if _time_period in period_demand.keys():
                None
            elif _time_period.lower() in period_demand.keys():
                _time_period = _time_period.lower()
            elif _time_period.upper() in period_demand.keys():
                _time_period = _time_period.upper()
            else:
                raise ValueError(
                    f"Period {_time_period} not an available time period.\
                    Available periods are:  {period_demand.keys()}"
                )

            _split_demand = self._toll_choice.run(
                period_demand[_time_period][_class], _class, _time_period
            )

            class_demands[_time_period][_class] = _split_demand["non toll"]
            class_demands[_time_period][f"{_class}toll"] = _split_demand["toll"]
        return class_demands
run(period_demand)

Binary toll / non-toll choice model by class.

input: result of _ix_time_of_day

skims

traffic_skims_{period}.omx, where {period} is the time period ID, {class} is the class name da, sr2, sr2, with the following matrix names Non-value-toll paying time: {period}{class}_time, Non-value-toll distance: {period}{class}dist, Non-value-toll bridge toll is: {period}{class}bridgetoll{class}, Value-toll paying time is: {period}{class}toll_time, Value-toll paying distance is: {period}{class}toll_dist, Value-toll bridge toll is: {period}{class}toll_bridgetoll{class}, Value-toll value toll is: {period}{class}toll_valuetoll{class},

3.1: For each time of day, for each da, sr2, sr3, calculate - utility of toll and nontoll - probability of toll / nontoll - split demand into toll and nontoll matrices

Source code in tm2py/components/demand/internal_external.py
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
@LogStartEnd()
def run(
    self, period_demand: Dict[str, Dict[str, NumpyArray]]
) -> Dict[str, Dict[str, NumpyArray]]:
    """Binary toll / non-toll choice model by class.

    input: result of _ix_time_of_day
    skims:
        traffic_skims_{period}.omx, where {period} is the time period ID,
        {class} is the class name da, sr2, sr2, with the following matrix names
          Non-value-toll paying time: {period}_{class}_time,
          Non-value-toll distance: {period}_{class}_dist,
          Non-value-toll bridge toll is: {period}_{class}_bridgetoll_{class},
          Value-toll paying time is: {period}_{class}toll_time,
          Value-toll paying distance is: {period}_{class}toll_dist,
          Value-toll bridge toll is: {period}_{class}toll_bridgetoll_{class},
          Value-toll value toll is: {period}_{class}toll_valuetoll_{class},

    STEPS:
    3.1: For each time of day, for each da, sr2, sr3, calculate
         - utility of toll and nontoll
         - probability of toll / nontoll
         - split demand into toll and nontoll matrices

    """

    _time_class_combos = itertools.product(
        self.time_period_names, self.component.classes
    )

    class_demands = defaultdict(dict)
    for _time_period, _class in _time_class_combos:

        if _time_period in period_demand.keys():
            None
        elif _time_period.lower() in period_demand.keys():
            _time_period = _time_period.lower()
        elif _time_period.upper() in period_demand.keys():
            _time_period = _time_period.upper()
        else:
            raise ValueError(
                f"Period {_time_period} not an available time period.\
                Available periods are:  {period_demand.keys()}"
            )

        _split_demand = self._toll_choice.run(
            period_demand[_time_period][_class], _class, _time_period
        )

        class_demands[_time_period][_class] = _split_demand["non toll"]
        class_demands[_time_period][f"{_class}toll"] = _split_demand["toll"]
    return class_demands

InternalExternal

Bases: Component

Develop Internal <-> External trip tables from land use and impedances.

  1. Grow demand from base year using static rates ::ExternalDemand
  2. Split by time of day using static factors ::TimePeriodSplit
  3. Apply basic toll binomial choice model: ::ExternalTollChoice
Governed by InternalExternalConfig
Source code in tm2py/components/demand/internal_external.py
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
class InternalExternal(Component):
    """Develop Internal <-> External trip tables from land use and impedances.

    1. Grow demand from base year using static rates ::ExternalDemand
    2. Split by time of day using static factors ::TimePeriodSplit
    3. Apply basic toll binomial choice model: ::ExternalTollChoice

    Governed by InternalExternalConfig:
        highway_demand_file:
        input_demand_file:
        input_demand_matrixname_tmpl:
        modes:
        reference_year:
        annual_growth_rate: List[MatrixFactorConfig]
        time_of_day: TimeOfDayConfig
        toll_choice: TollChoiceConfig
        special_gateway_adjust: Optional[List[MatrixFactorConfig]]
    """

    def __init__(self, controller: RunController):
        super().__init__(controller)
        self.config = self.controller.config.internal_external

        self.sub_components = {
            "demand forecast": ExternalDemand(controller, self),
            "time of day": TimePeriodSplit(
                controller, self, self.config.time_of_day.classes[0].time_period_split
            ),
            "toll choice": ExternalTollChoice(controller, self),
        }

    @property
    def classes(self):
        return self.config.modes

    def validate_inputs(self):
        """Validate inputs to component."""
        ## TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run internal/external travel demand component."""

        daily_demand = self.sub_components["demand forecast"].run()
        period_demand = self.sub_components["time of day"].run(daily_demand)
        class_demands = self.sub_components["toll choice"].run(period_demand)
        self._export_results(class_demands)

    @LogStartEnd()
    def _export_results(self, demand: Dict[str, Dict[str, NumpyArray]]):
        """Export assignable class demands to OMX files by time-of-day."""
        outdir = self.get_abs_path(self.config.output_trip_table_directory)
        os.makedirs(outdir, exist_ok=True)
        for period, matrices in demand.items():
            with OMXManager(
                os.path.join(
                    outdir, self.config.outfile_trip_table_tmp.format(period=period)
                ),
                "w",
            ) as output_file:
                for name, data in matrices.items():
                    output_file.write_array(data, name)
run()

Run internal/external travel demand component.

Source code in tm2py/components/demand/internal_external.py
67
68
69
70
71
72
73
74
@LogStartEnd()
def run(self):
    """Run internal/external travel demand component."""

    daily_demand = self.sub_components["demand forecast"].run()
    period_demand = self.sub_components["time of day"].run(daily_demand)
    class_demands = self.sub_components["toll choice"].run(period_demand)
    self._export_results(class_demands)
validate_inputs()

Validate inputs to component.

Source code in tm2py/components/demand/internal_external.py
62
63
64
65
def validate_inputs(self):
    """Validate inputs to component."""
    ## TODO
    pass

tm2py.config.InternalExternalConfig

Bases: ConfigItem

Internal <-> External model parameters.

Source code in tm2py/config.py
438
439
440
441
442
443
444
445
446
447
@dataclass(frozen=True)
class InternalExternalConfig(ConfigItem):
    """Internal <-> External model parameters."""

    output_trip_table_directory: pathlib.Path
    outfile_trip_table_tmp: str
    modes: List[str]
    demand: DemandGrowth
    time_of_day: TimeOfDayConfig
    toll_choice: TollChoiceConfig

Visitor Demand

tm2py.components.demand.visitor

Visitor module.

Highway Network Components

tm2py.components.network.highway.highway_network

Module for highway network preparation steps.

Creates required attributes and populates input values needed for highway assignments. The toll values, VDFs, per-class cost (tolls+operating costs), modes and skim link attributes are calculated.

The following keys and tables are used from the config

highway.tolls.file_path: relative path to input toll file highway.tolls.src_vehicle_group_names: names used in tolls file for toll class values highway.tolls.dst_vehicle_group_names: corresponding names used in network attributes toll classes highway.tolls.tollbooth_start_index: index to split point bridge tolls (< this value) from distance value tolls (>= this value) highway.classes: the list of assignment classes, see the notes under highway_assign for detailed explanation highway.capclass_lookup: the lookup table mapping the link @capclass setting to capacity (@capacity), free_flow_speed (@free_flow_speec) and critical_speed (used to calculate @ja for akcelik type functions) highway.generic_highway_mode_code: unique (with other mode_codes) single character used to label entire auto network in Emme highway.maz_to_maz.mode_code: unique (with other mode_codes) single character used to label MAZ local auto network including connectors

The following link attributes are created (overwritten) and are subsequently used in the highway assignments. - “@flow_XX”: link PCE flows per class, where XX is the class name in the config - “@maz_flow”: Assigned MAZ-to-MAZ flow

The following attributes are calculated
  • vdf: volume delay function to use
  • “@capacity”: total link capacity
  • “@ja”: akcelik delay parameter
  • “@hov_length”: length with HOV lanes
  • “@toll_length”: length with tolls
  • “@bridgetoll_YY”: the bridge toll for class subgroup YY
  • “@valuetoll_YY”: the “value”, non-bridge toll for class subgroup YY
  • “@cost_YY”: total cost for class YY

PrepareNetwork

Bases: Component

Highway network preparation.

Source code in tm2py/components/network/highway/highway_network.py
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
class PrepareNetwork(Component):
    """Highway network preparation."""

    def __init__(self, controller: "RunController"):
        """Constructor for PPrepareNetwork.

        Args:
            controller (RunController): Reference to run controller object.
        """
        super().__init__(controller)
        self.config = self.controller.config.highway

    @LogStartEnd("Prepare network attributes and modes")
    def run(self):
        """Run network preparation step."""
        for time in self.time_period_names:
            with self.controller.emme_manager.logbook_trace(
                f"prepare for highway assignment {time}"
            ):
                scenario = self.get_emme_scenario(
                    self.controller.config.emme.highway_database_path, time
                )
                self._create_class_attributes(scenario, time)
                network = scenario.get_network()
                self._set_tolls(network, time)
                self._set_vdf_attributes(network, time)
                self._set_link_modes(network)
                self._calc_link_skim_lengths(network)
                self._calc_link_class_costs(network)
                scenario.publish_network(network)

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        toll_file_path = self.get_abs_path(self.config.tolls.file_path)
        if not os.path.exists(toll_file_path):
            self.logger.log(
                f"Tolls file (config.highway.tolls.file_path) does not exist: {toll_file_path}",
                level="ERROR",
            )
            raise FileNotFoundError(f"Tolls file does not exist: {toll_file_path}")
        src_veh_groups = self.config.tolls.src_vehicle_group_names
        columns = ["fac_index"]
        for time in self.controller.config.time_periods:
            for vehicle in src_veh_groups:
                columns.append(f"toll{time.name.lower()}_{vehicle}")
        with open(toll_file_path, "r", encoding="UTF8") as toll_file:
            header = set(h.strip() for h in next(toll_file).split(","))
            missing = []
            for column in columns:
                if column not in header:
                    missing.append(column)
                    self.logger.log(
                        f"Tolls file missing column: {column}", level="ERROR"
                    )
        if missing:
            raise FileFormatError(
                f"Tolls file missing {len(missing)} columns: {', '.join(missing)}"
            )

    def _create_class_attributes(self, scenario: EmmeScenario, time_period: str):
        """Create required network attributes including per-class cost and flow attributes."""
        create_attribute = self.controller.emme_manager.tool(
            "inro.emme.data.extra_attribute.create_extra_attribute"
        )
        attributes = {
            "LINK": [
                ("@capacity", "total link capacity"),
                ("@ja", "akcelik delay parameter"),
                ("@maz_flow", "Assigned MAZ-to-MAZ flow"),
                ("@hov_length", "length with HOV lanes"),
                ("@toll_length", "length with tolls"),
            ]
        }
        # toll field attributes by bridge and value and toll definition
        dst_veh_groups = self.config.tolls.dst_vehicle_group_names
        for dst_veh in dst_veh_groups:
            for toll_type in "bridge", "value":
                attributes["LINK"].append(
                    (
                        f"@{toll_type}toll_{dst_veh}",
                        f"{toll_type} toll value for {dst_veh}",
                    )
                )
        # results for link cost and assigned flow
        for assign_class in self.config.classes:
            attributes["LINK"].append(
                (
                    f"@cost_{assign_class.name.lower()}",
                    f'{time_period} {assign_class["description"]} total costs'[:40],
                )
            )
            attributes["LINK"].append(
                (
                    f"@flow_{assign_class.name.lower()}",
                    f'{time_period} {assign_class["description"]} link volume'[:40],
                )
            )
        for domain, attrs in attributes.items():
            for name, desc in attrs:
                create_attribute(domain, name, desc, overwrite=True, scenario=scenario)

    def _set_tolls(self, network: EmmeNetwork, time_period: str):
        """Set the tolls in the network from the toll reference file."""
        toll_index = self._get_toll_indices()
        src_veh_groups = self.config.tolls.src_vehicle_group_names
        dst_veh_groups = self.config.tolls.dst_vehicle_group_names
        tollbooth_start_index = self.config.tolls.tollbooth_start_index
        for link in network.links():
            if link["@tollbooth"]:
                index = (
                    link["@tollbooth"] * 1000
                    + link["@tollseg"] * 10
                    + link["@useclass"]
                )
                data_row = toll_index.get(index)
                if data_row is None:
                    self.logger.warn(
                        f"set tolls failed index lookup {index}, link {link.id}",
                        indent=True,
                    )
                    continue  # tolls will remain at zero
                # if index is below tollbooth start index then this is a bridge
                # (point toll), available for all traffic assignment classes
                if link["@tollbooth"] < tollbooth_start_index:
                    for src_veh, dst_veh in zip(src_veh_groups, dst_veh_groups):
                        link[f"@bridgetoll_{dst_veh}"] = (
                            data_row[f"toll{time_period.lower()}_{src_veh}"] * 100
                        )
                else:  # else, this is a tollway with a per-mile charge
                    for src_veh, dst_veh in zip(src_veh_groups, dst_veh_groups):
                        link[f"@valuetoll_{dst_veh}"] = (
                            data_row[f"toll{time_period.lower()}_{src_veh}"]
                            * link.length
                            * 100
                        )

    def _get_toll_indices(self) -> Dict[int, Dict[str, str]]:
        """Get the mapping of toll lookup table from the toll reference file."""
        toll_file_path = self.get_abs_path(self.config.tolls.file_path)
        self.logger.debug(f"toll_file_path {toll_file_path}", indent=True)
        tolls = {}
        with open(toll_file_path, "r", encoding="UTF8") as toll_file:
            header = [h.strip() for h in next(toll_file).split(",")]
            for line in toll_file:
                data = dict(zip(header, line.split(",")))
                tolls[int(data["fac_index"])] = data
        return tolls

    def _set_vdf_attributes(self, network: EmmeNetwork, time_period: str):
        """Set capacity, VDF and critical speed on links."""
        capacity_map = {}
        critical_speed_map = {}
        for row in self.config.capclass_lookup:
            if row.get("capacity") is not None:
                capacity_map[row["capclass"]] = row.get("capacity")
            if row.get("critical_speed") is not None:
                critical_speed_map[row["capclass"]] = row.get("critical_speed")
        tp_mapping = {
            tp.name.upper(): tp.highway_capacity_factor
            for tp in self.controller.config.time_periods
        }
        period_capacity_factor = tp_mapping[time_period]
        akcelik_vdfs = [3, 4, 5, 7, 8, 10, 11, 12, 13, 14]
        for link in network.links():
            cap_lanehour = capacity_map[link["@capclass"]]
            link["@capacity"] = cap_lanehour * period_capacity_factor * link["@lanes"]
            link.volume_delay_func = int(link["@ft"])
            # re-mapping links with type 99 to type 7 "local road of minor importance"
            if link.volume_delay_func == 99:
                link.volume_delay_func = 7
            # num_lanes not used directly, but set for reference
            link.num_lanes = max(min(9.9, link["@lanes"]), 1.0)
            if link.volume_delay_func in akcelik_vdfs and link["@free_flow_speed"] > 0:
                dist = link.length
                critical_speed = critical_speed_map[link["@capclass"]]
                t_c = dist / critical_speed
                t_o = dist / link["@free_flow_speed"]
                link["@ja"] = 16 * (t_c - t_o) ** 2

    def _set_link_modes(self, network: EmmeNetwork):
        """Set the link modes based on the per-class 'excluded_links' set."""
        # first reset link modes (script run more than once)
        # "generic_highway_mode_code" must already be created (in import to Emme script)
        auto_mode = {network.mode(self.config.generic_highway_mode_code)}
        used_modes = {
            network.mode(assign_class.mode_code) for assign_class in self.config.classes
        }
        used_modes.add(network.mode(self.config.maz_to_maz.mode_code))
        for link in network.links():
            link.modes -= used_modes
            if link["@drive_link"]:
                link.modes |= auto_mode
        for mode in used_modes:
            if mode is not None:
                network.delete_mode(mode)

        # Create special access/egress mode for MAZ connectors
        maz_access_mode = network.create_mode(
            "AUX_AUTO", self.config.maz_to_maz.mode_code
        )
        maz_access_mode.description = "MAZ access"
        # create modes from class spec
        # (duplicate mode codes allowed provided the excluded_links is the same)
        mode_excluded_links = {}
        for assign_class in self.config.classes:
            if assign_class.mode_code in mode_excluded_links:
                if (
                    assign_class.excluded_links
                    != mode_excluded_links[assign_class.mode_code]
                ):
                    ex_links1 = mode_excluded_links[assign_class.mode_code]
                    ex_links2 = assign_class.excluded_links
                    raise Exception(
                        f"config error: highway.classes, duplicated mode codes "
                        f"('{assign_class.mode_code}') with different excluded "
                        f"links: {ex_links1} and {ex_links2}"
                    )
                continue
            mode = network.create_mode("AUX_AUTO", assign_class.mode_code)
            mode.description = assign_class.name
            mode_excluded_links[mode.id] = assign_class.excluded_links

        dst_veh_groups = self.config.tolls.dst_vehicle_group_names
        for link in network.links():
            modes = set(m.id for m in link.modes)
            if link.i_node["@maz_id"] + link.j_node["@maz_id"] > 0:
                modes.add(maz_access_mode.id)
                link.modes = modes
                continue
            if not link["@drive_link"]:
                continue
            exclude_links_map = {
                "is_sr": link["@useclass"] in [2, 3],
                "is_sr2": link["@useclass"] == 2,
                "is_sr3": link["@useclass"] == 3,
                "is_auto_only": link["@useclass"] in [2, 3, 4],
            }
            for dst_veh in dst_veh_groups:
                exclude_links_map[f"is_toll_{dst_veh}"] = (
                    link[f"@valuetoll_{dst_veh}"] > 0
                )
            self._apply_exclusions(
                self.config.maz_to_maz.excluded_links,
                maz_access_mode.id,
                modes,
                exclude_links_map,
            )
            for assign_class in self.config.classes:
                self._apply_exclusions(
                    assign_class.excluded_links,
                    assign_class.mode_code,
                    modes,
                    exclude_links_map,
                )
            link.modes = modes

    @staticmethod
    def _apply_exclusions(
        excluded_links_criteria: List[str],
        mode_code: str,
        modes_set: Set[str],
        link_values: Dict[str, bool],
    ):
        """Apply the exclusion criteria to set the link modes."""
        for criteria in excluded_links_criteria:
            if link_values[criteria]:
                return
        modes_set.add(mode_code)

    def _calc_link_skim_lengths(self, network: EmmeNetwork):
        """Calculate the length attributes used in the highway skims."""
        tollbooth_start_index = self.config.tolls.tollbooth_start_index
        for link in network.links():
            # distance in hov lanes / facilities
            if 2 <= link["@useclass"] <= 3:
                link["@hov_length"] = link.length
            else:
                link["@hov_length"] = 0
            # distance on non-bridge toll facilities
            if link["@tollbooth"] > tollbooth_start_index:
                link["@toll_length"] = link.length
            else:
                link["@toll_length"] = 0

    def _calc_link_class_costs(self, network: EmmeNetwork):
        """Calculate the per-class link cost from the tolls and operating costs."""
        for assign_class in self.config.classes:
            cost_attr = f"@cost_{assign_class.name.lower()}"
            op_cost = assign_class["operating_cost_per_mile"]
            toll_factor = assign_class.get("toll_factor")
            if toll_factor is None:
                toll_factor = 1.0
            for link in network.links():
                toll_value = sum(link[toll_attr] for toll_attr in assign_class["toll"])
                link[cost_attr] = link.length * op_cost + toll_value * toll_factor
__init__(controller)

Constructor for PPrepareNetwork.

Parameters:

Name Type Description Default
controller RunController

Reference to run controller object.

required
Source code in tm2py/components/network/highway/highway_network.py
63
64
65
66
67
68
69
70
def __init__(self, controller: "RunController"):
    """Constructor for PPrepareNetwork.

    Args:
        controller (RunController): Reference to run controller object.
    """
    super().__init__(controller)
    self.config = self.controller.config.highway
run()

Run network preparation step.

Source code in tm2py/components/network/highway/highway_network.py
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
@LogStartEnd("Prepare network attributes and modes")
def run(self):
    """Run network preparation step."""
    for time in self.time_period_names:
        with self.controller.emme_manager.logbook_trace(
            f"prepare for highway assignment {time}"
        ):
            scenario = self.get_emme_scenario(
                self.controller.config.emme.highway_database_path, time
            )
            self._create_class_attributes(scenario, time)
            network = scenario.get_network()
            self._set_tolls(network, time)
            self._set_vdf_attributes(network, time)
            self._set_link_modes(network)
            self._calc_link_skim_lengths(network)
            self._calc_link_class_costs(network)
            scenario.publish_network(network)
validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py/components/network/highway/highway_network.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    toll_file_path = self.get_abs_path(self.config.tolls.file_path)
    if not os.path.exists(toll_file_path):
        self.logger.log(
            f"Tolls file (config.highway.tolls.file_path) does not exist: {toll_file_path}",
            level="ERROR",
        )
        raise FileNotFoundError(f"Tolls file does not exist: {toll_file_path}")
    src_veh_groups = self.config.tolls.src_vehicle_group_names
    columns = ["fac_index"]
    for time in self.controller.config.time_periods:
        for vehicle in src_veh_groups:
            columns.append(f"toll{time.name.lower()}_{vehicle}")
    with open(toll_file_path, "r", encoding="UTF8") as toll_file:
        header = set(h.strip() for h in next(toll_file).split(","))
        missing = []
        for column in columns:
            if column not in header:
                missing.append(column)
                self.logger.log(
                    f"Tolls file missing column: {column}", level="ERROR"
                )
    if missing:
        raise FileFormatError(
            f"Tolls file missing {len(missing)} columns: {', '.join(missing)}"
        )

tm2py.components.network.highway.highway_assign

Highway assignment and skim component.

Performs equilibrium traffic assignment and generates resulting skims. The assignmend is configured using the “highway” table in the source config. See the config documentation for details. The traffic assignment runs according to the list of assignment classes under highway.classes.

Other relevant parameters from the config are: - emme.num_processors: number of processors as integer or “MAX” or “MAX-N” - time_periods[].emme_scenario_id: Emme scenario number to use for each period - time_periods[].highway_capacity_factor

The Emme network must have the following attributes available:

Link - attributes: - “length” in feet - “vdf”, volume delay function (volume delay functions must also be setup) - “@useclass”, vehicle-class restrictions classification, auto-only, HOV only - “@free_flow_time”, the free flow time (in minutes) - “@tollXX_YY”, the toll for period XX and class subgroup (see truck class) named YY, used together with @tollbooth to generate @bridgetoll_YY and @valuetoll_YY - “@maz_flow”, the background traffic MAZ-to-MAZ SP assigned flow from highway_maz, if controller.iteration > 0 - modes: must be set on links and match the specified mode codes in the traffic config

Network results - attributes: - @flow_XX: link PCE flows per class, where XX is the class name in the config - timau: auto travel time - volau: total assigned flow in PCE

Notes: - Output matrices are in miles, minutes, and cents (2010 dollars) and are stored/ as real values; - Intrazonal distance/time is one half the distance/time to the nearest neighbor; - Intrazonal bridge and value tolls are assumed to be zero

AssignmentClass

Highway assignment class, represents data from config and conversion to Emme specs.

Source code in tm2py/components/network/highway/highway_assign.py
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
class AssignmentClass:
    """Highway assignment class, represents data from config and conversion to Emme specs."""

    def __init__(self, class_config, time_period, iteration):
        """Constructor of Highway Assignment class.

        Args:
            class_config (_type_): _description_
            time_period (_type_): _description_
            iteration (_type_): _description_
        """
        self.class_config = class_config
        self.time_period = time_period
        self.iteration = iteration
        self.name = class_config["name"].lower()
        self.skims = class_config.get("skims", [])

    @property
    def emme_highway_class_spec(self) -> EmmeHighwayClassSpec:
        """Construct and return Emme traffic assignment class specification.

        Converted from input config (highway.classes), see Emme Help for
        SOLA traffic assignment for specification details.
        Adds time_period as part of demand and skim matrix names.

        Returns:
            A nested dictionary corresponding to the expected Emme traffic
            class specification used in the SOLA assignment.
        """
        if self.iteration == 0:
            demand_matrix = 'ms"zero"'
        else:
            demand_matrix = f'mf"{self.time_period}_{self.name}"'
        class_spec = {
            "mode": self.class_config.mode_code,
            "demand": demand_matrix,
            "generalized_cost": {
                "link_costs": f"@cost_{self.name.lower()}",  # cost in $0.01
                # $/hr -> min/$0.01
                "perception_factor": 0.6 / self.class_config.value_of_time,
            },
            "results": {
                "link_volumes": f"@flow_{self.name.lower()}",
                "od_travel_times": {
                    "shortest_paths": f"mf{self.time_period}_{self.name}_time"
                },
            },
            "path_analyses": self.emme_class_analysis,
        }
        return class_spec

    @property
    def emme_class_analysis(self) -> List[EmmeHighwayAnalysisSpec]:
        """Construct and return a list of path analyses specs which generate the required skims.

        Returns:
            A list of nested dictionaries corresponding to the Emme path analysis
            (per-class) specification used in the SOLA assignment.
        """
        class_analysis = []
        if "time" in self.skims:
            class_analysis.append(
                self.emme_analysis_spec(
                    f"@cost_{self.name}".lower(),
                    f"mf{self.time_period}_{self.name}_cost",
                )
            )
        for skim_type in self.skims:
            if skim_type == "time":
                continue
            if "_" in skim_type:
                skim_type, group = skim_type.split("_")
            else:
                group = ""
            matrix_name = f"mf{self.time_period}_{self.name}_{skim_type}{group}"
            class_analysis.append(
                self.emme_analysis_spec(
                    self.skim_analysis_link_attribute(skim_type, group),
                    matrix_name,
                )
            )
        return class_analysis

    @property
    def skim_matrices(self) -> List[str]:
        """Returns: List of skim matrix names for this class."""
        skim_matrices = []
        if "time" in self.skims:
            skim_matrices.extend(
                [
                    f"{self.time_period}_{self.name}_time",
                    f"{self.time_period}_{self.name}_cost",
                ]
            )
        for skim_type in self.skims:
            if skim_type == "time":
                continue
            if "_" in skim_type:
                skim_type, group = skim_type.split("_")
            else:
                group = ""
            skim_matrices.append(f"{self.time_period}_{self.name}_{skim_type}{group}")
        return skim_matrices

    @staticmethod
    def emme_analysis_spec(link_attr: str, matrix_name: str) -> EmmeHighwayAnalysisSpec:
        """Returns Emme highway class path analysis spec.

        See Emme Help for SOLA assignment for full specification details.
        Args:
            link_attr: input link attribute for which to sum values along the paths
            matrix_name: full matrix name to store the result of the path analysis

        Returns:
            The nested dictionary specification which will generate the skim
            of link attribute values.
        """
        analysis_spec = {
            "link_component": link_attr,
            "turn_component": None,
            "operator": "+",
            "selection_threshold": {"lower": None, "upper": None},
            "path_to_od_composition": {
                "considered_paths": "ALL",
                "multiply_path_proportions_by": {
                    "analyzed_demand": False,
                    "path_value": True,
                },
            },
            "results": {
                "od_values": matrix_name,
                "selected_link_volumes": None,
                "selected_turn_volumes": None,
            },
        }
        return analysis_spec

    @staticmethod
    def skim_analysis_link_attribute(skim: str, group: str) -> str:
        """Return the link attribute name for the specified skim type and group.

        Args:
            skim: name of skim requested, one of dist, hovdist, tolldist, freeflowtime,
                bridgetoll, or valuetoll
            group: subgroup name for the bridgetoll or valuetoll, corresponds to one of
                the names from config.highway.tolls.dst_vehicle_group_names
        Returns:
            A string of the link attribute name used in the analysis.
        """
        lookup = {
            "dist": "length",  # NOTE: length must be in miles
            "hovdist": "@hov_length",
            "tolldist": "@toll_length",
            "freeflowtime": "@free_flow_time",
            "bridgetoll": f"@bridgetoll_{group}",
            "valuetoll": f"@valuetoll_{group}",
        }
        return lookup[skim]
__init__(class_config, time_period, iteration)

Constructor of Highway Assignment class.

Parameters:

Name Type Description Default
class_config _type_

description

required
time_period _type_

description

required
iteration _type_

description

required
Source code in tm2py/components/network/highway/highway_assign.py
363
364
365
366
367
368
369
370
371
372
373
374
375
def __init__(self, class_config, time_period, iteration):
    """Constructor of Highway Assignment class.

    Args:
        class_config (_type_): _description_
        time_period (_type_): _description_
        iteration (_type_): _description_
    """
    self.class_config = class_config
    self.time_period = time_period
    self.iteration = iteration
    self.name = class_config["name"].lower()
    self.skims = class_config.get("skims", [])
emme_analysis_spec(link_attr, matrix_name) staticmethod

Returns Emme highway class path analysis spec.

See Emme Help for SOLA assignment for full specification details.

Parameters:

Name Type Description Default
link_attr str

input link attribute for which to sum values along the paths

required
matrix_name str

full matrix name to store the result of the path analysis

required

Returns:

Type Description
EmmeHighwayAnalysisSpec

The nested dictionary specification which will generate the skim

EmmeHighwayAnalysisSpec

of link attribute values.

Source code in tm2py/components/network/highway/highway_assign.py
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
@staticmethod
def emme_analysis_spec(link_attr: str, matrix_name: str) -> EmmeHighwayAnalysisSpec:
    """Returns Emme highway class path analysis spec.

    See Emme Help for SOLA assignment for full specification details.
    Args:
        link_attr: input link attribute for which to sum values along the paths
        matrix_name: full matrix name to store the result of the path analysis

    Returns:
        The nested dictionary specification which will generate the skim
        of link attribute values.
    """
    analysis_spec = {
        "link_component": link_attr,
        "turn_component": None,
        "operator": "+",
        "selection_threshold": {"lower": None, "upper": None},
        "path_to_od_composition": {
            "considered_paths": "ALL",
            "multiply_path_proportions_by": {
                "analyzed_demand": False,
                "path_value": True,
            },
        },
        "results": {
            "od_values": matrix_name,
            "selected_link_volumes": None,
            "selected_turn_volumes": None,
        },
    }
    return analysis_spec
emme_class_analysis() property

Construct and return a list of path analyses specs which generate the required skims.

Returns:

Type Description
List[EmmeHighwayAnalysisSpec]

A list of nested dictionaries corresponding to the Emme path analysis

List[EmmeHighwayAnalysisSpec]

(per-class) specification used in the SOLA assignment.

Source code in tm2py/components/network/highway/highway_assign.py
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
@property
def emme_class_analysis(self) -> List[EmmeHighwayAnalysisSpec]:
    """Construct and return a list of path analyses specs which generate the required skims.

    Returns:
        A list of nested dictionaries corresponding to the Emme path analysis
        (per-class) specification used in the SOLA assignment.
    """
    class_analysis = []
    if "time" in self.skims:
        class_analysis.append(
            self.emme_analysis_spec(
                f"@cost_{self.name}".lower(),
                f"mf{self.time_period}_{self.name}_cost",
            )
        )
    for skim_type in self.skims:
        if skim_type == "time":
            continue
        if "_" in skim_type:
            skim_type, group = skim_type.split("_")
        else:
            group = ""
        matrix_name = f"mf{self.time_period}_{self.name}_{skim_type}{group}"
        class_analysis.append(
            self.emme_analysis_spec(
                self.skim_analysis_link_attribute(skim_type, group),
                matrix_name,
            )
        )
    return class_analysis
emme_highway_class_spec() property

Construct and return Emme traffic assignment class specification.

Converted from input config (highway.classes), see Emme Help for SOLA traffic assignment for specification details. Adds time_period as part of demand and skim matrix names.

Returns:

Type Description
EmmeHighwayClassSpec

A nested dictionary corresponding to the expected Emme traffic

EmmeHighwayClassSpec

class specification used in the SOLA assignment.

Source code in tm2py/components/network/highway/highway_assign.py
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
@property
def emme_highway_class_spec(self) -> EmmeHighwayClassSpec:
    """Construct and return Emme traffic assignment class specification.

    Converted from input config (highway.classes), see Emme Help for
    SOLA traffic assignment for specification details.
    Adds time_period as part of demand and skim matrix names.

    Returns:
        A nested dictionary corresponding to the expected Emme traffic
        class specification used in the SOLA assignment.
    """
    if self.iteration == 0:
        demand_matrix = 'ms"zero"'
    else:
        demand_matrix = f'mf"{self.time_period}_{self.name}"'
    class_spec = {
        "mode": self.class_config.mode_code,
        "demand": demand_matrix,
        "generalized_cost": {
            "link_costs": f"@cost_{self.name.lower()}",  # cost in $0.01
            # $/hr -> min/$0.01
            "perception_factor": 0.6 / self.class_config.value_of_time,
        },
        "results": {
            "link_volumes": f"@flow_{self.name.lower()}",
            "od_travel_times": {
                "shortest_paths": f"mf{self.time_period}_{self.name}_time"
            },
        },
        "path_analyses": self.emme_class_analysis,
    }
    return class_spec

Return the link attribute name for the specified skim type and group.

Parameters:

Name Type Description Default
skim str

name of skim requested, one of dist, hovdist, tolldist, freeflowtime, bridgetoll, or valuetoll

required
group str

subgroup name for the bridgetoll or valuetoll, corresponds to one of the names from config.highway.tolls.dst_vehicle_group_names

required

Returns:

Type Description
str

A string of the link attribute name used in the analysis.

Source code in tm2py/components/network/highway/highway_assign.py
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
@staticmethod
def skim_analysis_link_attribute(skim: str, group: str) -> str:
    """Return the link attribute name for the specified skim type and group.

    Args:
        skim: name of skim requested, one of dist, hovdist, tolldist, freeflowtime,
            bridgetoll, or valuetoll
        group: subgroup name for the bridgetoll or valuetoll, corresponds to one of
            the names from config.highway.tolls.dst_vehicle_group_names
    Returns:
        A string of the link attribute name used in the analysis.
    """
    lookup = {
        "dist": "length",  # NOTE: length must be in miles
        "hovdist": "@hov_length",
        "tolldist": "@toll_length",
        "freeflowtime": "@free_flow_time",
        "bridgetoll": f"@bridgetoll_{group}",
        "valuetoll": f"@valuetoll_{group}",
    }
    return lookup[skim]
skim_matrices() property
Source code in tm2py/components/network/highway/highway_assign.py
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
@property
def skim_matrices(self) -> List[str]:
    """Returns: List of skim matrix names for this class."""
    skim_matrices = []
    if "time" in self.skims:
        skim_matrices.extend(
            [
                f"{self.time_period}_{self.name}_time",
                f"{self.time_period}_{self.name}_cost",
            ]
        )
    for skim_type in self.skims:
        if skim_type == "time":
            continue
        if "_" in skim_type:
            skim_type, group = skim_type.split("_")
        else:
            group = ""
        skim_matrices.append(f"{self.time_period}_{self.name}_{skim_type}{group}")
    return skim_matrices

HighwayAssignment

Bases: Component

Highway assignment and skims.

Parameters:

Name Type Description Default
controller RunController

parent RunController object

required
Source code in tm2py/components/network/highway/highway_assign.py
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
class HighwayAssignment(Component):
    """Highway assignment and skims.

    Args:
        controller: parent RunController object
    """

    def __init__(self, controller: RunController):
        """Constructor for HighwayAssignment components.

        Args:
            controller (RunController): Reference to current run controller.
        """
        super().__init__(controller)

        self.config = self.controller.config.highway

        self._matrix_cache = None
        self._skim_matrices = []
        self._class_config = None

    @property
    def classes(self):
        # self.hwy_classes
        return [c.name for c in self.config.classes]

    @property
    def class_config(self):
        # self.hwy_class_configs
        if not self._class_config:
            self._class_config = {c.name: c for c in self.config.classes}

        return self._class_config

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        # TODO
        pass

    @LogStartEnd("Highway assignment and skims", level="STATUS")
    def run(self):
        """Run highway assignment."""
        demand = PrepareHighwayDemand(self.controller)
        if self.controller.iteration >= 1:
            demand.run()
        for time in self.time_period_names:
            scenario = self.get_emme_scenario(
                self.controller.config.emme.highway_database_path, time
            )
            with self._setup(scenario, time):
                iteration = self.controller.iteration
                assign_classes = [
                    AssignmentClass(c, time, iteration) for c in self.config.classes
                ]
                if iteration > 0:
                    self._copy_maz_flow(scenario)
                else:
                    self._reset_background_traffic(scenario)
                self._create_skim_matrices(scenario, assign_classes)
                assign_spec = self._get_assignment_spec(assign_classes)
                # self.logger.log_dict(assign_spec, level="DEBUG")
                with self.logger.log_start_end(
                    "Run SOLA assignment with path analyses", level="INFO"
                ):
                    assign = self.controller.emme_manager.tool(
                        "inro.emme.traffic_assignment.sola_traffic_assignment"
                    )
                    assign(assign_spec, scenario, chart_log_interval=1)

                # Subtract non-time costs from gen cost to get the raw travel time
                for emme_class_spec in assign_spec["classes"]:
                    self._calc_time_skim(emme_class_spec)
                # Set intra-zonal for time and dist to be 1/2 nearest neighbour
                for class_config in self.config.classes:
                    self._set_intrazonal_values(
                        time,
                        class_config["name"],
                        class_config["skims"],
                    )
                self._export_skims(scenario, time)
                if self.logger.debug_enabled:
                    self._log_debug_report(scenario, time)

    @_context
    def _setup(self, scenario: EmmeScenario, time_period: str):
        """Setup and teardown for Emme Matrix cache and list of skim matrices.

        Args:
            scenario: Emme scenario object
            time_period: time period name
        """
        self._matrix_cache = MatrixCache(scenario)
        self._skim_matrices = []
        msg = f"Highway assignment for period {time_period}"
        with self.logger.log_start_end(msg, level="STATUS"):
            try:
                yield
            finally:
                self._matrix_cache.clear()
                self._matrix_cache = None
                self._skim_matrices = []

    def _copy_maz_flow(self, scenario: EmmeScenario):
        """Copy maz_flow from MAZ demand assignment to ul1 for background traffic.

        Args:
            scenario: Emme scenario object
        """
        self.logger.log(
            "Copy @maz_flow to ul1 for background traffic", indent=True, level="DETAIL"
        )
        net_calc = NetworkCalculator(scenario)
        net_calc("ul1", "@maz_flow")

    def _reset_background_traffic(self, scenario: EmmeScenario):
        """Set ul1 for background traffic to 0 (no maz-maz flow).

        Args:
            scenario: Emme scenario object
        """
        self.logger.log(
            "Set ul1 to 0 for background traffic", indent=True, level="DETAIL"
        )
        net_calc = NetworkCalculator(scenario)
        net_calc("ul1", "0")

    def _create_skim_matrices(
        self, scenario: EmmeScenario, assign_classes: List[AssignmentClass]
    ):
        """Create matrices to store skim results in Emme database.

        Also add the matrices to list of self._skim_matrices.

        Args:
            scenario: Emme scenario object
            assign_classes: list of AssignmentClass objects
        """
        create_matrix = self.controller.emme_manager.tool(
            "inro.emme.data.matrix.create_matrix"
        )

        with self.logger.log_start_end("Creating skim matrices", level="DETAIL"):
            for klass in assign_classes:
                for matrix_name in klass.skim_matrices:
                    matrix = scenario.emmebank.matrix(f'mf"{matrix_name}"')
                    if not matrix:
                        matrix = create_matrix(
                            "mf", matrix_name, scenario=scenario, overwrite=True
                        )
                        self.logger.debug(
                            f"Create matrix name: {matrix_name}, id: {matrix.id}"
                        )
                    self._skim_matrices.append(matrix)

    def _get_assignment_spec(
        self, assign_classes: List[AssignmentClass]
    ) -> EmmeTrafficAssignmentSpec:
        """Generate template Emme SOLA assignment specification.

        Args:
            assign_classes: list of AssignmentClass objects

        Returns
            Emme specification for SOLA traffic assignment

        """
        relative_gap = self.config.relative_gap
        max_iterations = self.config.max_iterations
        # NOTE: mazmazvol as background traffic in link.data1 ("ul1")
        base_spec = {
            "type": "SOLA_TRAFFIC_ASSIGNMENT",
            "background_traffic": {
                "link_component": "ul1",
                "turn_component": None,
                "add_transit_vehicles": False,
            },
            "classes": [klass.emme_highway_class_spec for klass in assign_classes],
            "stopping_criteria": {
                "max_iterations": max_iterations,
                "best_relative_gap": 0.0,
                "relative_gap": relative_gap,
                "normalized_gap": 0.0,
            },
            "performance_settings": {
                "number_of_processors": self.controller.num_processors
            },
        }
        return base_spec

    def _calc_time_skim(self, emme_class_spec: EmmeHighwayClassSpec):
        """Calculate the real time skim =gen_cost-per_fac*link_costs.

        Args:
            emme_class_spec: dictionary of the per-class spec sub-section from the
                Emme SOLA assignment spec, classes list
        """
        od_travel_times = emme_class_spec["results"]["od_travel_times"][
            "shortest_paths"
        ]
        if od_travel_times is not None:
            # Total link costs is always the first analysis
            cost = emme_class_spec["path_analyses"][0]["results"]["od_values"]
            factor = emme_class_spec["generalized_cost"]["perception_factor"]
            gencost_data = self._matrix_cache.get_data(od_travel_times)
            cost_data = self._matrix_cache.get_data(cost)
            time_data = gencost_data - (factor * cost_data)
            self._matrix_cache.set_data(od_travel_times, time_data)

    def _set_intrazonal_values(
        self, time_period: str, class_name: str, skims: List[str]
    ):
        """Set the intrazonal values to 1/2 nearest neighbour for time and distance skims.

        Args:
            time_period: time period name (from config)
            class_name: highway class name (from config)
            skims: list of requested skims (from config)
        """
        for skim_name in skims:
            if skim_name in ["time", "distance", "freeflowtime", "hovdist", "tolldist"]:
                matrix_name = f"mf{time_period}_{class_name}_{skim_name}"
                self.logger.debug(f"Setting intrazonals to 0.5*min for {matrix_name}")
                data = self._matrix_cache.get_data(matrix_name)
                # NOTE: sets values for external zones as well
                np.fill_diagonal(data, np.inf)
                data[np.diag_indices_from(data)] = 0.5 * np.nanmin(data, 1)
                self._matrix_cache.set_data(matrix_name, data)

    def _export_skims(self, scenario: EmmeScenario, time_period: str):
        """Export skims to OMX files by period.

        Args:
            scenario: Emme scenario object
            time_period: time period name
        """
        # NOTE: skims in separate file by period
        self.logger.debug(
            "_export_skims: self.config.output_skim_path:{}".format(
                self.config.output_skim_path
            )
        )
        omx_file_path = self.get_abs_path(
            self.config.output_skim_path
            / self.config.output_skim_filename_tmpl.format(time_period=time_period)
        )
        self.logger.debug(
            f"export {len(self._skim_matrices)} skim matrices to {omx_file_path}"
        )
        os.makedirs(os.path.dirname(omx_file_path), exist_ok=True)
        with OMXManager(
            omx_file_path, "w", scenario, matrix_cache=self._matrix_cache
        ) as omx_file:
            omx_file.write_matrices(self._skim_matrices)

    def _log_debug_report(self, scenario: EmmeScenario, time_period: str):
        num_zones = len(scenario.zone_numbers)
        num_cells = num_zones * num_zones
        self.logger.debug(f"Highway skim summary for period {time_period}")
        self.logger.debug(
            f"Number of zones: {num_zones}. Number of O-D pairs: {num_cells}. "
            "Values outside -9999999, 9999999 are masked in summaries."
        )
        self.logger.debug(
            "name                            min       max      mean           sum"
        )
        for matrix in self._skim_matrices:
            values = self._matrix_cache.get_data(matrix)
            data = np.ma.masked_outside(values, -9999999, 9999999)
            stats = (
                f"{matrix.name:25} {data.min():9.4g} {data.max():9.4g} "
                f"{data.mean():9.4g} {data.sum(): 13.7g}"
            )
            self.logger.debug(stats)
__init__(controller)

Constructor for HighwayAssignment components.

Parameters:

Name Type Description Default
controller RunController

Reference to current run controller.

required
Source code in tm2py/components/network/highway/highway_assign.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def __init__(self, controller: RunController):
    """Constructor for HighwayAssignment components.

    Args:
        controller (RunController): Reference to current run controller.
    """
    super().__init__(controller)

    self.config = self.controller.config.highway

    self._matrix_cache = None
    self._skim_matrices = []
    self._class_config = None
run()

Run highway assignment.

Source code in tm2py/components/network/highway/highway_assign.py
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
@LogStartEnd("Highway assignment and skims", level="STATUS")
def run(self):
    """Run highway assignment."""
    demand = PrepareHighwayDemand(self.controller)
    if self.controller.iteration >= 1:
        demand.run()
    for time in self.time_period_names:
        scenario = self.get_emme_scenario(
            self.controller.config.emme.highway_database_path, time
        )
        with self._setup(scenario, time):
            iteration = self.controller.iteration
            assign_classes = [
                AssignmentClass(c, time, iteration) for c in self.config.classes
            ]
            if iteration > 0:
                self._copy_maz_flow(scenario)
            else:
                self._reset_background_traffic(scenario)
            self._create_skim_matrices(scenario, assign_classes)
            assign_spec = self._get_assignment_spec(assign_classes)
            # self.logger.log_dict(assign_spec, level="DEBUG")
            with self.logger.log_start_end(
                "Run SOLA assignment with path analyses", level="INFO"
            ):
                assign = self.controller.emme_manager.tool(
                    "inro.emme.traffic_assignment.sola_traffic_assignment"
                )
                assign(assign_spec, scenario, chart_log_interval=1)

            # Subtract non-time costs from gen cost to get the raw travel time
            for emme_class_spec in assign_spec["classes"]:
                self._calc_time_skim(emme_class_spec)
            # Set intra-zonal for time and dist to be 1/2 nearest neighbour
            for class_config in self.config.classes:
                self._set_intrazonal_values(
                    time,
                    class_config["name"],
                    class_config["skims"],
                )
            self._export_skims(scenario, time)
            if self.logger.debug_enabled:
                self._log_debug_report(scenario, time)
validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py/components/network/highway/highway_assign.py
119
120
121
122
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    # TODO
    pass

tm2py.config.HighwayConfig

Bases: ConfigItem

Highway assignment and skims parameters.

Properties
Source code in tm2py/config.py
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
@dataclass(frozen=True)
class HighwayConfig(ConfigItem):
    """Highway assignment and skims parameters.

    Properties:
        generic_highway_mode_code: single character unique mode ID for entire
            highway network (no excluded_links)
        relative_gap: target relative gap stopping criteria
        max_iterations: maximum iterations stopping criteria
        area_type_buffer_dist_miles: used to in calculation to categorize link @areatype
            The area type is determined based on the average density of nearby
            (within this buffer distance) MAZs, using (pop+jobs*2.5)/acres
        output_skim_path: relative path template from run dir for OMX output skims
        output_skim_filename_tmpl: template for OMX filename for a time period. Must include
            {time_period} in the string and end in '.omx'.
        output_skim_matrixname_tmpl: template for matrix names within OMX output skims.
            Should include {time_period}, {mode}, and {property}
        tolls: input toll specification, see HighwayTollsConfig
        maz_to_maz: maz-to-maz shortest path assignment and skim specification,
            see HighwayMazToMazConfig
        classes: highway assignment multi-class setup and skim specification,
            see HighwayClassConfig
        capclass_lookup: index cross-reference table from the link @capclass value
            to the free-flow speed, capacity, and critical speed values
    """

    generic_highway_mode_code: str = Field(min_length=1, max_length=1)
    relative_gap: float = Field(ge=0)
    max_iterations: int = Field(ge=0)
    area_type_buffer_dist_miles: float = Field(gt=0)
    output_skim_path: pathlib.Path = Field()
    output_skim_filename_tmpl: str = Field()
    output_skim_matrixname_tmpl: str = Field()
    tolls: HighwayTollsConfig = Field()
    maz_to_maz: HighwayMazToMazConfig = Field()
    classes: Tuple[HighwayClassConfig, ...] = Field()
    capclass_lookup: Tuple[HighwayCapClassConfig, ...] = Field()

    @validator("output_skim_filename_tmpl")
    def valid_skim_template(value):
        """Validate skim template has correct {} and extension."""
        assert (
            "{time_period" in value
        ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
        assert (
            value[-4:].lower() == ".omx"
        ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
        return value

    @validator("output_skim_matrixname_tmpl")
    def valid_skim_matrix_name_template(value):
        """Validate skim matrix template has correct {}."""
        assert (
            "{time_period" in value
        ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
        assert (
            "{property" in value
        ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
        assert (
            "{mode" in value
        ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
        return value

    @validator("capclass_lookup")
    def unique_capclass_numbers(value):
        """Validate list of capclass_lookup has unique .capclass values."""
        capclass_ids = [i.capclass for i in value]
        error_msg = "-> capclass value must be unique in list"
        assert len(capclass_ids) == len(set(capclass_ids)), error_msg
        return value

    @validator("classes", pre=True)
    def unique_class_names(value):
        """Validate list of classes has unique .name values."""
        class_names = [highway_class["name"] for highway_class in value]
        error_msg = "-> name value must be unique in list"
        assert len(class_names) == len(set(class_names)), error_msg
        return value

    @validator("classes")
    def validate_class_mode_excluded_links(value, values):
        """Validate list of classes has unique .mode_code or .excluded_links match."""
        # validate if any mode IDs are used twice, that they have the same excluded links sets
        mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
        for i, highway_class in enumerate(value):
            # maz_to_maz.mode_code must be unique
            if "maz_to_maz" in values:
                assert (
                    highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
                ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
            # make sure that if any mode IDs are used twice, they have the same excluded links sets
            if highway_class.mode_code in mode_excluded_links:
                ex_links1 = highway_class["excluded_links"]
                ex_links2 = mode_excluded_links[highway_class["mode_code"]]
                error_msg = (
                    f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                    f"with different excluded links: {ex_links1} and {ex_links2}"
                )
                assert ex_links1 == ex_links2, error_msg
            mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
        return value

    @validator("classes")
    def validate_class_keyword_lists(value, values):
        """Validate classes .skims, .toll, and .excluded_links values."""
        if "tolls" not in values:
            return value
        avail_skims = ["time", "dist", "hovdist", "tolldist", "freeflowtime"]
        available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
        avail_toll_attrs = []
        for name in values["tolls"].dst_vehicle_group_names:
            toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
            avail_skims.extend(toll_types)
            avail_toll_attrs.extend(["@" + name for name in toll_types])
            available_link_sets.append(f"is_toll_{name}")

        # validate class skim name list and toll attribute against toll setup
        def check_keywords(class_num, key, val, available):
            extra_keys = set(val) - set(available)
            error_msg = (
                f" -> {class_num} -> {key}: unrecognized {key} name(s): "
                f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
            )
            assert not extra_keys, error_msg

        for i, highway_class in enumerate(value):
            check_keywords(i, "skim", highway_class["skims"], avail_skims)
            check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
            check_keywords(
                i,
                "excluded_links",
                highway_class["excluded_links"],
                available_link_sets,
            )
        return value

unique_capclass_numbers(value)

Validate list of capclass_lookup has unique .capclass values.

Source code in tm2py/config.py
896
897
898
899
900
901
902
@validator("capclass_lookup")
def unique_capclass_numbers(value):
    """Validate list of capclass_lookup has unique .capclass values."""
    capclass_ids = [i.capclass for i in value]
    error_msg = "-> capclass value must be unique in list"
    assert len(capclass_ids) == len(set(capclass_ids)), error_msg
    return value

unique_class_names(value)

Validate list of classes has unique .name values.

Source code in tm2py/config.py
904
905
906
907
908
909
910
@validator("classes", pre=True)
def unique_class_names(value):
    """Validate list of classes has unique .name values."""
    class_names = [highway_class["name"] for highway_class in value]
    error_msg = "-> name value must be unique in list"
    assert len(class_names) == len(set(class_names)), error_msg
    return value

valid_skim_matrix_name_template(value)

Validate skim matrix template has correct {}.

Source code in tm2py/config.py
882
883
884
885
886
887
888
889
890
891
892
893
894
@validator("output_skim_matrixname_tmpl")
def valid_skim_matrix_name_template(value):
    """Validate skim matrix template has correct {}."""
    assert (
        "{time_period" in value
    ), "-> 'output_skim_matrixname_tmpl must have {time_period}, found {value}."
    assert (
        "{property" in value
    ), "-> 'output_skim_matrixname_tmpl must have {property}, found {value}."
    assert (
        "{mode" in value
    ), "-> 'output_skim_matrixname_tmpl must have {mode}, found {value}."
    return value

valid_skim_template(value)

Validate skim template has correct {} and extension.

Source code in tm2py/config.py
871
872
873
874
875
876
877
878
879
880
@validator("output_skim_filename_tmpl")
def valid_skim_template(value):
    """Validate skim template has correct {} and extension."""
    assert (
        "{time_period" in value
    ), f"-> output_skim_filename_tmpl must have {{time_period}}', found {value}."
    assert (
        value[-4:].lower() == ".omx"
    ), f"-> 'output_skim_filename_tmpl must end in '.omx', found {value[-4:].lower() }"
    return value

validate_class_keyword_lists(value, values)

Validate classes .skims, .toll, and .excluded_links values.

Source code in tm2py/config.py
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
@validator("classes")
def validate_class_keyword_lists(value, values):
    """Validate classes .skims, .toll, and .excluded_links values."""
    if "tolls" not in values:
        return value
    avail_skims = ["time", "dist", "hovdist", "tolldist", "freeflowtime"]
    available_link_sets = ["is_sr", "is_sr2", "is_sr3", "is_auto_only"]
    avail_toll_attrs = []
    for name in values["tolls"].dst_vehicle_group_names:
        toll_types = [f"bridgetoll_{name}", f"valuetoll_{name}"]
        avail_skims.extend(toll_types)
        avail_toll_attrs.extend(["@" + name for name in toll_types])
        available_link_sets.append(f"is_toll_{name}")

    # validate class skim name list and toll attribute against toll setup
    def check_keywords(class_num, key, val, available):
        extra_keys = set(val) - set(available)
        error_msg = (
            f" -> {class_num} -> {key}: unrecognized {key} name(s): "
            f"{','.join(extra_keys)}.  Available names are: {', '.join(available)}"
        )
        assert not extra_keys, error_msg

    for i, highway_class in enumerate(value):
        check_keywords(i, "skim", highway_class["skims"], avail_skims)
        check_keywords(i, "toll", highway_class["toll"], avail_toll_attrs)
        check_keywords(
            i,
            "excluded_links",
            highway_class["excluded_links"],
            available_link_sets,
        )
    return value

Validate list of classes has unique .mode_code or .excluded_links match.

Source code in tm2py/config.py
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
@validator("classes")
def validate_class_mode_excluded_links(value, values):
    """Validate list of classes has unique .mode_code or .excluded_links match."""
    # validate if any mode IDs are used twice, that they have the same excluded links sets
    mode_excluded_links = {values["generic_highway_mode_code"]: set([])}
    for i, highway_class in enumerate(value):
        # maz_to_maz.mode_code must be unique
        if "maz_to_maz" in values:
            assert (
                highway_class["mode_code"] != values["maz_to_maz"]["mode_code"]
            ), f"-> {i} -> mode_code: cannot be the same as the highway.maz_to_maz.mode_code"
        # make sure that if any mode IDs are used twice, they have the same excluded links sets
        if highway_class.mode_code in mode_excluded_links:
            ex_links1 = highway_class["excluded_links"]
            ex_links2 = mode_excluded_links[highway_class["mode_code"]]
            error_msg = (
                f"-> {i}: duplicated mode codes ('{highway_class['mode_code']}') "
                f"with different excluded links: {ex_links1} and {ex_links2}"
            )
            assert ex_links1 == ex_links2, error_msg
        mode_excluded_links[highway_class.mode_code] = highway_class.excluded_links
    return value

tm2py.config.HighwayClassConfig

Bases: ConfigItem

Highway assignment class definition.

Note that excluded_links, skims and toll attribute names include vehicle groups (“{vehicle}”) which reference the list of highway.toll.dst_vehicle_group_names (see HighwayTollsConfig). The default example model config uses: “da”, “sr2”, “sr3”, “vsm”, sml”, “med”, “lrg”

Example single class config

name = “da” description= “drive alone” mode_code= “d” [[highway.classes.demand]] source = “household” name = “SOV_GP_{period}” [[highway.classes.demand]] source = “air_passenger” name = “da” [[highway.classes.demand]] source = “internal_external” name = “da” excluded_links = [“is_toll_da”, “is_sr2”], value_of_time = 18.93, # $ / hr operating_cost_per_mile = 17.23, # cents / mile toll = [“@bridgetoll_da”] skims = [“time”, “dist”, “freeflowtime”, “bridgetoll_da”],

Properties
Source code in tm2py/config.py
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
@dataclass(frozen=True)
class HighwayClassConfig(ConfigItem):
    """Highway assignment class definition.

    Note that excluded_links, skims and toll attribute names include
    vehicle groups ("{vehicle}") which reference the list of
    highway.toll.dst_vehicle_group_names (see HighwayTollsConfig).
    The default example model config uses:
    "da", "sr2", "sr3", "vsm", sml", "med", "lrg"

    Example single class config:
        name = "da"
        description= "drive alone"
        mode_code= "d"
        [[highway.classes.demand]]
            source = "household"
            name = "SOV_GP_{period}"
        [[highway.classes.demand]]
            source = "air_passenger"
            name = "da"
        [[highway.classes.demand]]
            source = "internal_external"
            name = "da"
        excluded_links = ["is_toll_da", "is_sr2"],
        value_of_time = 18.93,  # $ / hr
        operating_cost_per_mile = 17.23,  # cents / mile
        toll = ["@bridgetoll_da"]
        skims = ["time", "dist", "freeflowtime", "bridgetoll_da"],

    Properties:
        name: short (up to 10 character) unique reference name for the class.
            used in attribute and matrix names
        description: longer text used in attribute and matrix descriptions
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords.
            Should be unique in list of :es, unless multiple classes
            have identical excluded_links specification. Cannot be the
            same as used for highway.maz_to_maz.mode_code.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        demand: list of OMX file and matrix keyname references,
            see HighwayClassDemandConfig
        excluded_links: list of keywords to identify links to exclude from
            this class' available subnetwork (generate link.modes)
            Options are:
                - "is_sr": is reserved for shared ride (@useclass in 2,3)
                - "is_sr2": is reserved for shared ride 2+ (@useclass == 2)
                - "is_sr3": is reserved for shared ride 3+ (@useclass == 3)
                - "is_auto_only": is reserved for autos (non-truck) (@useclass != 1)
                - "is_toll_{vehicle}": has a value (non-bridge) toll for the {vehicle} toll group
        toll: list of additional toll cost link attribute (values stored in cents),
            summed, one of "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
        toll_factor: optional, factor to apply to toll values in cost calculation
        pce: optional, passenger car equivalent to convert assigned demand in
            PCE units to vehicles for total assigned vehicle calculations
        skims: list of skim matrices to generate
            Options are:
                "time": pure travel time in minutes
                "dist": distance in miles
                "hovdist": distance on HOV facilities (is_sr2 or is_sr3)
                "tolldist": distance on toll facilities
                    (@tollbooth > highway.tolls.tollbooth_start_index)
                "freeflowtime": free flow travel time in minutes
                "bridgetoll_{vehicle}": bridge tolls, {vehicle} refers to toll group
                "valuetoll_{vehicle}": other, non-bridge tolls, {vehicle} refers to toll group
    """

    name: str = Field(min_length=1, max_length=10)
    veh_group_name: str = Field(min_length=1, max_length=10)
    description: Optional[str] = Field(default="")
    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    pce: Optional[float] = Field(default=1.0, gt=0)
    # Note that excluded_links, skims, and tolls validated under HighwayConfig to include
    # highway.toll.dst_vehicle_group_names names
    excluded_links: Tuple[str, ...] = Field()
    skims: Tuple[str, ...] = Field()
    toll: Tuple[str, ...] = Field()
    toll_factor: Optional[float] = Field(default=None, gt=0)
    demand: Tuple[HighwayClassDemandConfig, ...] = Field()

tm2py.config.HighwayTollsConfig

Bases: ConfigItem

Highway assignment and skim input tolls and related parameters.

Properties
Source code in tm2py/config.py
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
@dataclass(frozen=True)
class HighwayTollsConfig(ConfigItem):
    """Highway assignment and skim input tolls and related parameters.

    Properties:
        file_path: source relative file path for the highway tolls index CSV
        tollbooth_start_index: tollbooth separates links with "bridge" tolls
            (index < this value) vs. "value" tolls. These toll attributes
            can then be referenced separately in the highway.classes[].tolls
            list
        src_vehicle_group_names: name used for the vehicle toll CSV column IDs,
            of the form "toll{period}_{vehicle}"
        dst_vehicle_group_names: list of names used in destination network
            for the corresponding vehicle group. Length of list must be the same
            as src_vehicle_group_names. Used for toll related attributes and
            resulting skim matrices. Cross-referenced in list of highway.classes[],
            valid keywords for:
                excluded_links: "is_toll_{vehicle}"
                tolls: "@bridgetoll_{vehicle}", "@valuetoll_{vehicle}"
                skims: "bridgetoll_{vehicle}", "valuetoll_{vehicle}"
    """

    file_path: pathlib.Path = Field()
    tollbooth_start_index: int = Field(gt=1)
    src_vehicle_group_names: Tuple[str, ...] = Field()
    dst_vehicle_group_names: Tuple[str, ...] = Field()

    @validator("dst_vehicle_group_names", always=True)
    def dst_vehicle_group_names_length(value, values):
        """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
        if "src_vehicle_group_names" in values:
            assert len(value) == len(
                values["src_vehicle_group_names"]
            ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
            assert all(
                [len(v) <= 4 for v in value]
            ), "dst_vehicle_group_names must be 4 characters or less"
        return value

dst_vehicle_group_names_length(value, values)

Validate dst_vehicle_group_names has same length as src_vehicle_group_names.

Source code in tm2py/config.py
749
750
751
752
753
754
755
756
757
758
759
@validator("dst_vehicle_group_names", always=True)
def dst_vehicle_group_names_length(value, values):
    """Validate dst_vehicle_group_names has same length as src_vehicle_group_names."""
    if "src_vehicle_group_names" in values:
        assert len(value) == len(
            values["src_vehicle_group_names"]
        ), "dst_vehicle_group_names must be same length as src_vehicle_group_names"
        assert all(
            [len(v) <= 4 for v in value]
        ), "dst_vehicle_group_names must be 4 characters or less"
    return value

tm2py.config.DemandCountyGroupConfig

Bases: ConfigItem

Grouping of counties for assignment and demand files.

Properties
Source code in tm2py/config.py
775
776
777
778
779
780
781
782
783
784
785
@dataclass(frozen=True)
class DemandCountyGroupConfig(ConfigItem):
    """Grouping of counties for assignment and demand files.

    Properties:
        number: id number for this group, must be unique
        counties: list of one or more county names
    """

    number: int = Field()
    counties: Tuple[COUNTY_NAMES, ...] = Field()

tm2py.components.network.highway.highway_maz

Assigns and skims MAZ-to-MAZ demand along shortest generalized cost path.

MAZ to MAZ demand is read in from separate OMX matrices as defined under the config table highway.maz_to_maz.demand_county_groups,

The demand is expected to be short distance (e.g. <0.5 miles), or within the same TAZ. The demand is grouped into bins of origin -> all destinations, by distance (straight-line) to furthest destination. This limits the size of the shortest path calculated to the minimum required. The bin edges have been predefined after testing as (in miles): [0.0, 0.9, 1.2, 1.8, 2.5, 5.0, 10.0, max_dist]

Input:

Emme network with

Link attributes: - time attribute, either timau (resulting VDF congested time) or @free_flow_time Node attributes: @maz_id, x, y, and #node_county

Demand matrices under highway.maz_to_maz.demand_file, and can have a placeholder auto_{period}MAZ_AUTO{number}_{period}.omx

Output: The resulting MAZ-MAZ flows are saved in link @maz_flow which is used as background traffic in the equilibrium Highway assignment.

AssignMAZSPDemand

Bases: Component

MAZ-to-MAZ shortest-path highway assignment.

Calculates shortest path between MAZs with demand in the Emme network and assigns flow.

Source code in tm2py/components/network/highway/highway_maz.py
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
class AssignMAZSPDemand(Component):
    """MAZ-to-MAZ shortest-path highway assignment.

    Calculates shortest path between MAZs with demand in the Emme network
    and assigns flow.
    """

    # skip Too many instance attributes recommendation, it is OK as is
    # pylint: disable=R0902

    def __init__(self, controller: RunController):
        """MAZ-to-MAZ shortest-path highway assignment.

        Args:
            controller: parent Controller object
        """

        super().__init__(controller)
        self.config = self.controller.config.highway.maz_to_maz
        self._debug = False

        # bins: performance parameter: crow-fly distance bins
        #       to limit shortest path calculation by origin to furthest destination
        #       semi-exposed for performance testing
        self._bin_edges = _default_bin_edges

        # Lazily-loaded Emme Properties
        self._emmebank = None
        self._eb_dir = None

        # Internal attributes to track data through the sequence of steps
        self._scenario = None
        self._mazs = None
        self._demand = _defaultdict(lambda: [])
        self._max_dist = 0
        self._network = None
        self._root_index = None
        self._leaf_index = None

    @property
    def emmebank(self):
        if self._emmebank is None:
            self._emmebank = self.controller.emme_manager.emmebank(
                self.get_abs_path(self.controller.config.emme.highway_database_path)
            )
        return self._emmebank

    @property
    def eb_dir(self):
        if self._eb_dir is None:
            self._eb_dir = os.path.dirname(self.emmebank.path)
        return self._eb_dir

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run MAZ-to-MAZ shortest path assignment."""

        county_groups = {}
        for group in self.config.demand_county_groups:
            county_groups[group.number] = group.counties
        for time in self.time_period_names:
            self._scenario = self.get_emme_scenario(self.emmebank.path, time)
            with self._setup(time):
                self._prepare_network()
                for i, names in county_groups.items():
                    maz_ids = self._get_county_mazs(names)
                    if len(maz_ids) == 0:
                        self.logger.log(
                            f"warning: no mazs for counties {', '.join(names)}"
                        )
                        continue
                    self._process_demand(time, i, maz_ids)
                demand_bins = self._group_demand()
                for i, demand_group in enumerate(demand_bins):
                    self._find_roots_and_leaves(demand_group["demand"])
                    self._set_link_cost_maz()
                    self._run_shortest_path(time, i, demand_group["dist"])
                    self._assign_flow(time, i, demand_group["demand"])

    @_context
    def _setup(self, time: str):
        """Context setup / teardown, initializes internal attributes.

        Args:
            time: name of the time period
        """
        self._mazs = None
        self._demand = _defaultdict(lambda: [])
        self._max_dist = 0
        self._network = None
        self._root_index = None
        self._leaf_index = None
        attributes = [
            ("LINK", "@link_cost", "total cost MAZ-MAZ"),
            ("LINK", "@link_cost_maz", "cost MAZ-MAZ, unused MAZs blocked"),
            ("NODE", "@maz_root", "Flag for MAZs which are roots"),
            ("NODE", "@maz_leaf", "Flag for MAZs which are leaves"),
        ]
        for domain, name, desc in attributes:
            self.logger.log(f"Create temp {domain} attr: {name}, {desc}", level="TRACE")
        with self.controller.emme_manager.temp_attributes_and_restore(
            self._scenario, attributes
        ):
            try:
                with self.logger.log_start_end(
                    f"MAZ assign for period {time} scenario {self._scenario}"
                ):
                    yield
            finally:
                if not self._debug:
                    self._mazs = None
                    self._demand = None
                    self._network = None
                    self._root_index = None
                    self._leaf_index = None
                    # delete sp path files
                    for bin_no in range(len(self._bin_edges)):
                        file_path = os.path.join(
                            self._eb_dir, f"sp_{time}_{bin_no}.ebp"
                        )
                        if os.path.exists(file_path):
                            os.remove(file_path)

    def _prepare_network(self):
        """Calculate link cost (travel time + bridge tolls + operating cost) and load network.

        Reads Emme network from disk for later node lookups. Optimized to only load
        attribute values of interest, additional attributes must be added in
        order to be read from disk.
        """
        if self._scenario.has_traffic_results:
            time_attr = "(@free_flow_time.max.timau)"
        else:
            time_attr = "@free_flow_time"
        self.logger.log(f"Calculating link costs using time {time_attr}", level="DEBUG")
        vot = self.config.highway.maz_to_maz.value_of_time
        op_cost = self.config.highway.maz_to_maz.operating_cost_per_mile
        net_calc = NetworkCalculator(self._scenario)
        report = net_calc(
            "@link_cost", f"{time_attr} + 0.6 / {vot} * (length * {op_cost})"
        )
        self.logger.log("Link cost calculation report", level="TRACE")
        self.logger.log_dict(report, level="TRACE")
        self._network = self.controller.emme_manager.get_network(
            self._scenario, {"NODE": ["@maz_id", "x", "y", "#node_county"], "LINK": []}
        )
        self._network.create_attribute("LINK", "temp_flow")

    def _get_county_mazs(self, counties: List[str]) -> List[EmmeNode]:
        """Get all MAZ nodes which are located in one of these counties.

        Used the node attribute #node_county to identify the node location.
        Name must be an exact match. Catches a mapping of the county names
        to nodes so nodes are processed only once.

        Args:
            counties: list of county names

        Returns:
            List of MAZ nodes (Emme Node) which are in these counties.
        """
        self.logger.log(
            f"Processing county MAZs for {', '.join(counties)}", level="DETAIL"
        )
        network = self._network
        # NOTE: every maz must have a valid #node_county
        if self._mazs is None:
            self._mazs = _defaultdict(lambda: [])
            for node in network.nodes():
                if node["@maz_id"]:
                    self._mazs[node["#node_county"]].append(node)
        mazs = []
        for county in counties:
            mazs.extend(self._mazs[county])
        self.logger.log(f"Num MAZs {len(mazs)}", level="DEBUG")
        return sorted(mazs, key=lambda n: n["@maz_id"])

    def _process_demand(self, time: str, index: int, maz_ids: List[EmmeNode]):
        """Loads the demand from file and groups by origin node.

        Sets the demand to self._demand for later processing, grouping the demand in
        a dictionary by origin node (Emme Node object) to list of dictionaries
        {"orig": orig_node, "dest": dest_node, "dem": demand, "dist": dist}

        Args:
            time: time period name
            index: group index of the demand file, used to find the file by name
            maz_ids: indexed list of MAZ ID nodes for the county group
                (active counties for this demand file)
        """
        self.logger.log(
            f"Process demand for time period {time} index {index}", level="DETAIL"
        )
        data = self._read_demand_array(time, index)
        origins, destinations = data.nonzero()
        self.logger.log(
            f"non-zero origins {len(origins)} destinations {len(destinations)}",
            level="DEBUG",
        )
        total_demand = 0
        for orig, dest in zip(origins, destinations):
            # skip intra-maz demand
            if orig == dest:
                continue
            orig_node = maz_ids[orig]
            dest_node = maz_ids[dest]
            dist = _sqrt(
                (dest_node.x - orig_node.x) ** 2 + (dest_node.y - orig_node.y) ** 2
            )
            if dist > self._max_dist:
                self._max_dist = dist
            demand = data[orig][dest]
            total_demand += demand
            self._demand[orig_node].append(
                {
                    "orig": orig_node,
                    "dest": dest_node,
                    "dem": demand,
                    "dist": dist,
                }
            )
        self.logger.log(f"Max distance found {self._max_dist}", level="DEBUG")
        self.logger.log(f"Total inter-zonal demand {total_demand}", level="DEBUG")

    def _read_demand_array(self, time: str, index: int) -> NumpyArray:
        """Load the demand from file with the specified time and index name.

        Args:
            time: time period name
            index: group index of the demand file, used to find the file by name
        """
        file_path_tmplt = self.get_abs_path(self.config.demand_file)
        omx_file_path = self.get_abs_path(
            file_path_tmplt.format(period=time, number=index)
        )
        self.logger.log(f"Reading demand from {omx_file_path}", level="DEBUG")
        with OMXManager(omx_file_path, "r") as omx_file:
            demand_array = omx_file.read("M0")
        return demand_array

    def _group_demand(
        self,
    ) -> List[Dict[str, Union[float, List[Dict[str, Union[float, EmmeNode]]]]]]:
        """Process the demand loaded from files \
            and create groups based on the origin to the furthest destination with demand.

        Returns:
            List of dictionaries, containing the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}

        """
        self.logger.log("Grouping demand in distance buckets", level="DETAIL")
        # group demand from same origin into distance bins by furthest
        # distance destination to limit shortest path search radius
        bin_edges = self._bin_edges[:]
        if bin_edges[-1] < self._max_dist / 5280.0:
            bin_edges.append(self._max_dist / 5280.0)

        demand_groups = [
            {"dist": edge, "demand": []} for i, edge in enumerate(bin_edges[1:])
        ]
        for data in self._demand.values():
            max_dist = max(entry["dist"] for entry in data) / 5280.0
            for group in demand_groups:
                if max_dist < group["dist"]:
                    group["demand"].extend(data)
                    break
        for group in demand_groups:
            self.logger.log(
                f"bin dist {group['dist']}, size {len(group['demand'])}", level="DEBUG"
            )
        # Filter out groups without any demand
        demand_groups = [group for group in demand_groups if group["demand"]]
        return demand_groups

    def _find_roots_and_leaves(self, demand: List[Dict[str, Union[float, EmmeNode]]]):
        """Label available MAZ root nodes and leaf nodes for the path calculation.

        The MAZ nodes which are found as origins in the demand are "activated"
        by setting @maz_root to non-zero, and similarly the leaves have @maz_leaf
        set to non-zero.

        Args:
            demand: list of dictionaries, containing the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        network = self._network
        attrs_to_init = [("NODE", ["@maz_root", "@maz_leaf"]), ("LINK", ["maz_cost"])]
        for domain, attrs in attrs_to_init:
            for name in attrs:
                if name in network.attributes(domain):
                    network.delete_attribute(domain, name)
                network.create_attribute(domain, name)
        root_maz_ids = {}
        leaf_maz_ids = {}
        for data in demand:
            o_node, d_node = data["orig"], data["dest"]
            root_maz_ids[o_node.number] = o_node["@maz_root"] = o_node["@maz_id"]
            leaf_maz_ids[d_node.number] = d_node["@maz_leaf"] = d_node["@maz_id"]
        self._root_index = {p: i for i, p in enumerate(sorted(root_maz_ids.keys()))}
        self._leaf_index = {q: i for i, q in enumerate(sorted(leaf_maz_ids.keys()))}
        self.controller.emme_manager.copy_attr_values(
            "NODE", self._network, self._scenario, ["@maz_root", "@maz_leaf"]
        )

    def _set_link_cost_maz(self):
        """Set link cost used in the shortest path forbidden using unavailable connectors.

        Copy the pre-calculated cost @link_cost to @link_cost_maz,
        setting value to 1e20 on connectors to unused zone leaves / from
        unused roots.
        """
        # forbid egress from MAZ nodes which are not demand roots /
        #        access to MAZ nodes which are not demand leafs
        net_calc = NetworkCalculator(self._scenario)
        net_calc.add_calc("@link_cost_maz", "@link_cost")
        net_calc.add_calc("@link_cost_maz", "1e20", "@maz_root=0 and !@maz_id=0")
        net_calc.add_calc("@link_cost_maz", "1e20", "@maz_leafj=0 and !@maz_idj=0")
        net_calc.run()

    @LogStartEnd(level="DETAIL")
    def _run_shortest_path(self, time: str, bin_no: int, max_radius: float):
        """Run the shortest path tool to generate paths between the marked nodes.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            max_radius: max unit coordinate distance to limit search tree
        """
        shortest_paths_tool = self.controller.emme_manager.tool(
            "inro.emme.network_calculation.shortest_path"
        )
        max_radius = max_radius * 5280 + 100  # add some buffer for rounding error
        ext = "ebp" if _USE_BINARY else "txt"
        file_name = f"sp_{time}_{bin_no}.{ext}"

        spec = {
            "type": "SHORTEST_PATH",
            "modes": [self.config.mode_code],
            "root_nodes": "@maz_root",
            "leaf_nodes": "@maz_leaf",
            "link_cost": "@link_cost_maz",
            "path_constraints": {
                "max_radius": max_radius,
                "uturn_allowed": False,
                "through_leaves": False,
                "through_centroids": False,
                "exclude_forbidden_turns": False,
            },
            "results": {
                "skim_output": {
                    "file": "",
                    "format": "TEXT",
                    "return_numpy": False,
                    "analyses": [],
                },
                "path_output": {
                    "format": "BINARY" if _USE_BINARY else "TEXT",
                    "file": os.path.join(self._eb_dir, file_name),
                },
            },
            "performance_settings": {
                "number_of_processors": self.controller.num_processors,
                "direction": "FORWARD",
                "method": "STANDARD",
            },
        }
        shortest_paths_tool(spec, self._scenario)

    def _assign_flow(
        self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]]
    ):
        """Assign the demand along the paths generated from the shortest path tool.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            demand: list of dictionaries, containing the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        if _USE_BINARY:
            self._assign_flow_binary(time, bin_no, demand)
        else:
            self._assign_flow_text(time, bin_no, demand)

    def _assign_flow_text(
        self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]]
    ):
        """Assign the demand along the paths generated from the shortest path tool.

        The paths are read from a text format file, see Emme help for details.
        Demand is summed in self._network (in memory) using temp_flow attribute
        and written to scenario (Emmebank / disk) @maz_flow.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            demand: list of dictionaries, containin the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        paths = self._load_text_format_paths(time, bin_no)
        not_assigned, assigned = 0, 0
        for data in demand:
            orig, dest, dem = data["orig"].number, data["dest"].number, data["dem"]
            path = paths.get(orig, {}).get(dest)
            if path is None:
                not_assigned += dem
                continue
            i_node = orig
            for j_node in path:
                link = self._network.link(i_node, j_node)
                link["temp_flow"] += dem
                i_node = j_node
            assigned += dem
        self.logger.log(f"ASSIGN bin {bin_no}: total: {len(demand)}", level="DEBUG")
        self.logger.log(
            f"assigned: {assigned}, not assigned: {not_assigned}", level="DEBUG"
        )

    def _load_text_format_paths(
        self, time: str, bin_no: int
    ) -> Dict[int, Dict[int, List[int]]]:
        """Load all paths from text file and return as nested dictionary.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment

        Returns:
            All paths as a nested dictionary, path = paths[origin][destination],
            using the node IDs as integers.
        """
        paths = _defaultdict(lambda: {})
        with open(
            os.path.join(self._eb_dir, f"sp_{time}_{bin_no}.txt"),
            "r",
            encoding="utf8",
        ) as paths_file:
            for line in paths_file:
                nodes = [int(x) for x in line.split()]
                paths[nodes[0]][nodes[-1]] = nodes[1:]
        return paths

    def _assign_flow_binary(
        self, time: str, bin_no: int, demand: List[Dict[str, Union[float, EmmeNode]]]
    ):
        """Assign the demand along the paths generated from the shortest path tool.

        The paths are read from a binary format file, see Emme help for details.
        Demand is summed in self._network (in memory) using temp_flow attribute
        and written to scenario (Emmebank / disk) @maz_flow.

        Args:
            time: time period name
            bin_no: bin number (id) for this demand segment
            demand: list of dictionaries, containin the demand in the format
                {"orig": EmmeNode, "dest": EmmeNode, "dem": float (demand value)}
        """
        file_name = f"sp_{time}_{bin_no}.ebp"
        with open(os.path.join(self._eb_dir, file_name), "rb") as paths_file:
            # read set of path pointers by Orig-Dest sequence from file
            offset, leaves_nb, path_indicies = self._get_path_indices(paths_file)
            assigned = 0
            not_assigned = 0
            bytes_read = offset * 8
            # for all orig-dest pairs with demand, load path from file
            for data in demand:
                # get file position based on orig-dest index
                start, end = self._get_path_location(
                    data["orig"].number, data["dest"].number, leaves_nb, path_indicies
                )
                # no path found, disconnected zone
                if start == end:
                    not_assigned += data["dem"]
                    continue
                paths_file.seek(start * 4 + offset * 8)
                self._assign_path_flow(paths_file, start, end, data["dem"])
                assigned += data["dem"]
                bytes_read += (end - start) * 4
        self.controller.emme_manager.copy_attr_values(
            "LINK", self._network, self._scenario, ["temp_flow"], ["@maz_flow"]
        )
        self.logger.log(
            f"ASSIGN bin {bin_no}, total {len(demand)}, assign "
            f"{assigned}, not assign {not_assigned}, bytes {bytes_read}",
            level="DEBUG",
        )

    @staticmethod
    def _get_path_indices(paths_file: BinaryIO) -> [int, int, _array.array]:
        """Get the path header indices.

        See the Emme Shortest path tool doc for additional details on reading
        this file.

        Args:
            paths_file: binary file access to the generated paths file

        Returns:
            2 ints + array of ints: offset, leafs_nb, path_indicies
            offset: starting index to read the paths
            leafs_nb: number of leafs in the shortest path file
            path_indicies: array of the start index for each root, leaf path in paths_file.
        """
        # read first 4 integers from file (Q=64-bit unsigned integers)
        header = _array.array("Q")
        header.fromfile(paths_file, 4)
        roots_nb, leaves_nb = header[2:4]
        # Load sequence of path indices (positions by orig-dest index),
        # pointing to list of path node IDs in file
        path_indicies = _array.array("Q")
        path_indicies.fromfile(paths_file, roots_nb * leaves_nb + 1)
        offset = roots_nb * leaves_nb + 1 + 4
        return offset, leaves_nb, path_indicies

    def _get_path_location(
        self,
        orig: EmmeNode,
        dest: EmmeNode,
        leaves_nb: int,
        path_indicies: _array.array,
    ) -> [int, int]:
        """Get the location in the paths_file to read.

        Args:
            orig: Emme Node object, origin MAZ to query the path
            dest: Emme Node object, destination MAZ to query the path
            leaves_nb: number of leaves
            path_indicies: array of the start index for each root, leaf path in paths_file.

        Returns:
            Two integers, start, end
            start: starting index to read Node ID bytes from paths_file
            end: ending index to read bytes from paths_file
        """
        p_index = self._root_index[orig]
        q_index = self._leaf_index[dest]
        index = p_index * leaves_nb + q_index
        start = path_indicies[index]
        end = path_indicies[index + 1]
        return start, end

    def _assign_path_flow(
        self, paths_file: BinaryIO, start: int, end: int, demand: float
    ):
        """Add demand to link temp_flow for the path.

        Args:
            paths_file: binary file access to read path from
            start: starting index to read Node ID bytes from paths_file
            end: ending index to read bytes from paths_file
            demand: flow demand to add on link
        """
        # load sequence of Node IDs which define the path (L=32-bit unsigned integers)
        path = _array.array("L")
        path.fromfile(paths_file, end - start)
        # process path to sequence of links and add flow
        path_iter = iter(path)
        i_node = next(path_iter)
        for j_node in path_iter:
            link = self._network.link(i_node, j_node)
            link["temp_flow"] += demand
            i_node = j_node
__init__(controller)

MAZ-to-MAZ shortest-path highway assignment.

Parameters:

Name Type Description Default
controller RunController

parent Controller object

required
Source code in tm2py/components/network/highway/highway_maz.py
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
def __init__(self, controller: RunController):
    """MAZ-to-MAZ shortest-path highway assignment.

    Args:
        controller: parent Controller object
    """

    super().__init__(controller)
    self.config = self.controller.config.highway.maz_to_maz
    self._debug = False

    # bins: performance parameter: crow-fly distance bins
    #       to limit shortest path calculation by origin to furthest destination
    #       semi-exposed for performance testing
    self._bin_edges = _default_bin_edges

    # Lazily-loaded Emme Properties
    self._emmebank = None
    self._eb_dir = None

    # Internal attributes to track data through the sequence of steps
    self._scenario = None
    self._mazs = None
    self._demand = _defaultdict(lambda: [])
    self._max_dist = 0
    self._network = None
    self._root_index = None
    self._leaf_index = None
run()

Run MAZ-to-MAZ shortest path assignment.

Source code in tm2py/components/network/highway/highway_maz.py
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
@LogStartEnd()
def run(self):
    """Run MAZ-to-MAZ shortest path assignment."""

    county_groups = {}
    for group in self.config.demand_county_groups:
        county_groups[group.number] = group.counties
    for time in self.time_period_names:
        self._scenario = self.get_emme_scenario(self.emmebank.path, time)
        with self._setup(time):
            self._prepare_network()
            for i, names in county_groups.items():
                maz_ids = self._get_county_mazs(names)
                if len(maz_ids) == 0:
                    self.logger.log(
                        f"warning: no mazs for counties {', '.join(names)}"
                    )
                    continue
                self._process_demand(time, i, maz_ids)
            demand_bins = self._group_demand()
            for i, demand_group in enumerate(demand_bins):
                self._find_roots_and_leaves(demand_group["demand"])
                self._set_link_cost_maz()
                self._run_shortest_path(time, i, demand_group["dist"])
                self._assign_flow(time, i, demand_group["demand"])
validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py/components/network/highway/highway_maz.py
112
113
114
115
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    # TODO
    pass

SkimMAZCosts

Bases: Component

MAZ-to-MAZ shortest-path skim of time, distance and toll.

Source code in tm2py/components/network/highway/highway_maz.py
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
class SkimMAZCosts(Component):
    """MAZ-to-MAZ shortest-path skim of time, distance and toll."""

    def __init__(self, controller: RunController):
        """MAZ-to-MAZ shortest-path skim of time, distance and toll.

        Args:
            controller: parent RunController object
        """
        super().__init__(controller)
        self.config = self.controller.config.highway.maz_to_maz
        self.ref_period_name = self.config.skim_period
        # TODO add config requirement that most be a valid time period
        self._scenario = None
        self._network = None

    @property
    def scenario(self):
        if self._scenario is None:
            self._scenario = self.get_emme_scenario(
                self.controller.config.emme.highway_database_path, self.ref_period_name
            )
        return self._scenario

    def validate_inputs(self):
        """Validate inputs files are correct, raise if an error is found."""
        # TODO
        pass

    @LogStartEnd()
    def run(self):
        """Run shortest path skims for all available MAZ-to-MAZ O-D pairs.

        Runs a shortest path builder for each county, using a maz_skim_cost
        to limit the search. The valid gen cost (time + cost), distance and toll (drive alone)
        are written to CSV at the output_skim_file path:
        FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL

        The following config inputs are used directly in this component. Note also
        that the network mode_code is prepared in the highway_network component
        using the excluded_links.

        config.highway.maz_to_maz:
            skim_period: name of the period used for the skim, must match one the
                defined config.time_periods
            demand_county_groups: used for the list of counties, creates a list out
                of all listed counties under [].counties
            output_skim_file: relative path to save the skims
            value_of_time: value of time used to convert tolls and auto operating cost
            operating_cost_per_mile: auto operating cost
            max_skim_cost: max cost value used to limit the shortest path search
            mode_code:
        """

        # prepare output file and write header
        output = self.get_abs_path(self.config.output_skim_file)
        os.makedirs(os.path.dirname(output), exist_ok=True)
        with open(output, "w", encoding="utf8") as output_file:
            output_file.write("FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL\n")
        counties = []
        for group in self.config.demand_county_groups:
            counties.extend(group.counties)
        with self._setup():
            self._prepare_network()
            for county in counties:
                num_roots = self._mark_roots(county)
                if num_roots == 0:
                    continue
                sp_values = self._run_shortest_path()
                self._export_results(sp_values)

    @_context
    def _setup(self):
        """Creates the temp attributes used in the component."""
        attributes = [
            ("LINK", "@link_cost", "total cost MAZ-MAZ"),
            ("NODE", "@maz_root", "selected roots (origins)"),
        ]
        with self.controller.emme_manager.temp_attributes_and_restore(
            self.scenario, attributes
        ):
            try:
                yield
            finally:
                self._network = None  # clear network obj ref to free memory

    @LogStartEnd(level="DEBUG")
    def _prepare_network(self):
        """Calculates the link cost in @link_cost and loads the network to self._network."""
        net_calc = NetworkCalculator(self._scenario)
        if self._scenario.has_traffic_results:
            time_attr = "(@free_flow_time.max.timau)"
        else:
            time_attr = "@free_flow_time"
        self.logger.log(f"Time attribute {time_attr}", level="DEBUG")
        vot = self.config.value_of_time
        op_cost = self.config.operating_cost_per_mile
        net_calc("@link_cost", f"{time_attr} + 0.6 / {vot} * (length * {op_cost})")
        self._network = self.controller.emme_manager.get_network(
            self.scenario, {"NODE": ["@maz_id", "#node_county"]}
        )

    def _mark_roots(self, county: str) -> int:
        """Mark the available roots in the county."""
        count_roots = 0
        for node in self._network.nodes():
            if node["@maz_id"] > 0 and node["#node_county"] == county:
                node["@maz_root"] = node["@maz_id"]
                count_roots += 1
            else:
                node["@maz_root"] = 0
        values = self._network.get_attribute_values("NODE", ["@maz_root"])
        self.scenario.set_attribute_values("NODE", ["@maz_root"], values)
        return count_roots

    @LogStartEnd(level="DETAIL")
    def _run_shortest_path(self) -> Dict[str, NumpyArray]:
        """Run shortest paths tool and return dictionary of skim results name, numpy arrays.

        O-D pairs are limited by a max cost value from config.highway.maz_to_maz.max_skim_cost,
        from roots marked by @maz_root to all available leaves at @maz_id.

        Returns:
            A dictionary with keys "COST", "DISTANCE", and "BRIDGETOLL", and numpy
            arrays of SP values for available O-D pairs
        """
        shortest_paths_tool = self.controller.emme_manager.tool(
            "inro.emme.network_calculation.shortest_path"
        )
        max_cost = float(self.config.max_skim_cost)
        spec = {
            "type": "SHORTEST_PATH",
            "modes": [self.config.mode_code],
            "root_nodes": "@maz_root",
            "leaf_nodes": "@maz_id",
            "link_cost": "@link_cost",
            "path_constraints": {
                "max_cost": max_cost,
                "uturn_allowed": False,
                "through_leaves": False,
                "through_centroids": False,
                "exclude_forbidden_turns": False,
            },
            "results": {
                "skim_output": {
                    "return_numpy": True,
                    "analyses": [
                        {
                            "component": "SHORTEST_PATH_COST",
                            "operator": "+",
                            "name": "COST",
                            "description": "",
                        },
                        {
                            "component": "length",
                            "operator": "+",
                            "name": "DISTANCE",
                            "description": "",
                        },
                        {
                            "component": "@bridgetoll_da",
                            "operator": "+",
                            "name": "BRIDGETOLL",
                            "description": "",
                        },
                    ],
                    "format": "OMX",
                }
            },
            "performance_settings": {
                "number_of_processors": self.controller.num_processors,
                "direction": "FORWARD",
                "method": "STANDARD",
            },
        }
        sp_values = shortest_paths_tool(spec, self.scenario)
        return sp_values

    def _export_results(self, sp_values: Dict[str, NumpyArray]):
        """Write matrix skims to CSV.

        The matrices are filtered to omit rows for which the COST is
        < 0 or > 1e19 (Emme uses 1e20 to indicate inaccessible zone pairs).

        sp_values: dictionary of matrix costs, with the three keys
            "COST", "DISTANCE", and "BRIDGETOLL" and Numpy arrays of values
        """
        # get list of MAZ IDS
        roots = [
            node["@maz_root"] for node in self._network.nodes() if node["@maz_root"]
        ]
        leaves = [node["@maz_id"] for node in self._network.nodes() if node["@maz_id"]]
        # build dataframe with output data and to/from MAZ ids
        root_ids = np.repeat(roots, len(leaves))
        leaf_ids = leaves * len(roots)
        result_df = pd.DataFrame(
            {
                "FROM_ZONE": root_ids,
                "TO_ZONE": leaf_ids,
                "COST": sp_values["COST"].flatten(),
                "DISTANCE": sp_values["DISTANCE"].flatten(),
                "BRIDGETOLL": sp_values["BRIDGETOLL"].flatten(),
            }
        )
        # drop 0's / 1e20
        result_df = result_df.query("COST > 0 & COST < 1e19")
        # write remaining values to text file
        # FROM_ZONE,TO_ZONE,COST,DISTANCE,BRIDGETOLL
        output = self.get_abs_path(self.config.output_skim_file)
        with open(output, "a", newline="", encoding="utf8") as output_file:
            result_df.to_csv(output_file, header=False, index=False)
__init__(controller)

MAZ-to-MAZ shortest-path skim of time, distance and toll.

Parameters:

Name Type Description Default
controller RunController

parent RunController object

required
Source code in tm2py/components/network/highway/highway_maz.py
632
633
634
635
636
637
638
639
640
641
642
643
def __init__(self, controller: RunController):
    """MAZ-to-MAZ shortest-path skim of time, distance and toll.

    Args:
        controller: parent RunController object
    """
    super().__init__(controller)
    self.config = self.controller.config.highway.maz_to_maz
    self.ref_period_name = self.config.skim_period
    # TODO add config requirement that most be a valid time period
    self._scenario = None
    self._network = None
run()

Run shortest path skims for all available MAZ-to-MAZ O-D pairs.

Runs a shortest path builder for each county, using a maz_skim_cost to limit the search. The valid gen cost (time + cost), distance and toll (drive alone) are written to CSV at the output_skim_file path: FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL

The following config inputs are used directly in this component. Note also that the network mode_code is prepared in the highway_network component using the excluded_links.

config.highway.maz_to_maz: skim_period: name of the period used for the skim, must match one the defined config.time_periods demand_county_groups: used for the list of counties, creates a list out of all listed counties under [].counties output_skim_file: relative path to save the skims value_of_time: value of time used to convert tolls and auto operating cost operating_cost_per_mile: auto operating cost max_skim_cost: max cost value used to limit the shortest path search mode_code:

Source code in tm2py/components/network/highway/highway_maz.py
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
@LogStartEnd()
def run(self):
    """Run shortest path skims for all available MAZ-to-MAZ O-D pairs.

    Runs a shortest path builder for each county, using a maz_skim_cost
    to limit the search. The valid gen cost (time + cost), distance and toll (drive alone)
    are written to CSV at the output_skim_file path:
    FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL

    The following config inputs are used directly in this component. Note also
    that the network mode_code is prepared in the highway_network component
    using the excluded_links.

    config.highway.maz_to_maz:
        skim_period: name of the period used for the skim, must match one the
            defined config.time_periods
        demand_county_groups: used for the list of counties, creates a list out
            of all listed counties under [].counties
        output_skim_file: relative path to save the skims
        value_of_time: value of time used to convert tolls and auto operating cost
        operating_cost_per_mile: auto operating cost
        max_skim_cost: max cost value used to limit the shortest path search
        mode_code:
    """

    # prepare output file and write header
    output = self.get_abs_path(self.config.output_skim_file)
    os.makedirs(os.path.dirname(output), exist_ok=True)
    with open(output, "w", encoding="utf8") as output_file:
        output_file.write("FROM_ZONE, TO_ZONE, COST, DISTANCE, BRIDGETOLL\n")
    counties = []
    for group in self.config.demand_county_groups:
        counties.extend(group.counties)
    with self._setup():
        self._prepare_network()
        for county in counties:
            num_roots = self._mark_roots(county)
            if num_roots == 0:
                continue
            sp_values = self._run_shortest_path()
            self._export_results(sp_values)
validate_inputs()

Validate inputs files are correct, raise if an error is found.

Source code in tm2py/components/network/highway/highway_maz.py
653
654
655
656
def validate_inputs(self):
    """Validate inputs files are correct, raise if an error is found."""
    # TODO
    pass

tm2py.config.HighwayMazToMazConfig

Bases: ConfigItem

Highway MAZ to MAZ shortest path assignment and skim parameters.

Properties
Source code in tm2py/config.py
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
@dataclass(frozen=True)
class HighwayMazToMazConfig(ConfigItem):
    """Highway MAZ to MAZ shortest path assignment and skim parameters.

    Properties:
        mode_code: single character mode, used to generate link.modes to
            identify subnetwork, generated from "excluded_links" keywords,
            plus including MAZ connectors.
        value_of_time: value of time for this class in $ / hr
        operating_cost_per_mile: vehicle operating cost in cents / mile
        max_skim_cost: max shortest path distance to search for MAZ-to-MAZ
            skims, in generized costs units (includes operating cost
            converted to minutes)
        excluded_links: list of keywords to identify links to exclude from
            MAZ-to-MAZ paths, see HighwayClassConfig.excluded_links
        demand_file: relative path to find the input demand files
            can have use a placeholder for {period} and {number}, where the
            {period} is the time_period.name (see TimePeriodConfig)
            and {number} is the demand_count_groups[].number
            (see DemandCountyGroupConfig)
            e.g.: auto_{period}_MAZ_AUTO_{number}_{period}.omx
        demand_county_groups: List of demand county names and
        skim_period: period name to use for the shotest path skims, must
            match one of the names listed in the time_periods
        output_skim_file: relative path to resulting MAZ-to-MAZ skims
    """

    mode_code: str = Field(min_length=1, max_length=1)
    value_of_time: float = Field(gt=0)
    operating_cost_per_mile: float = Field(ge=0)
    max_skim_cost: float = Field(gt=0)
    excluded_links: Tuple[str, ...] = Field()
    demand_file: pathlib.Path = Field()
    demand_county_groups: Tuple[DemandCountyGroupConfig, ...] = Field()
    skim_period: str = Field()
    output_skim_file: pathlib.Path = Field()

    @validator("demand_county_groups")
    def unique_group_numbers(value):
        """Validate list of demand_county_groups has unique .number values."""
        group_ids = [group.number for group in value]
        assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
        return value

unique_group_numbers(value)

Validate list of demand_county_groups has unique .number values.

Source code in tm2py/config.py
825
826
827
828
829
830
@validator("demand_county_groups")
def unique_group_numbers(value):
    """Validate list of demand_county_groups has unique .number values."""
    group_ids = [group.number for group in value]
    assert len(group_ids) == len(set(group_ids)), "-> number value must be unique"
    return value

Transit Network Components

tm2py.components.network.transit.transit_assign

Transit assignment module.

TransitAssignment

Bases: Component

Run transit assignment.

Source code in tm2py/components/network/transit/transit_assign.py
8
9
class TransitAssignment(Component):
    """Run transit assignment."""

tm2py.components.network.transit.transit_skim

Transit skims module.

TransitSkim

Bases: Component

Run transit skims.

Source code in tm2py/components/network/transit/transit_skim.py
8
9
class TransitSkim(Component):
    """Run transit skims."""

tm2py.config.TransitModeConfig

Bases: ConfigItem

Transit mode definition (see also mode in the Emme API).

Source code in tm2py/config.py
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
@dataclass(frozen=True)
class TransitModeConfig(ConfigItem):
    """Transit mode definition (see also mode in the Emme API)."""

    type: Literal["WALK", "ACCESS", "EGRESS", "LOCAL", "PREMIUM"]
    assign_type: Literal["TRANSIT", "AUX_TRANSIT"]
    mode_id: str = Field(min_length=1, max_length=1)
    name: str = Field(max_length=10)
    in_vehicle_perception_factor: Optional[float] = Field(default=None, ge=0)
    speed_miles_per_hour: Optional[float] = Field(default=None, gt=0)

    @validator("in_vehicle_perception_factor", always=True)
    def in_vehicle_perception_factor_valid(value, values):
        """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "TRANSIT":
            assert value is not None, "must be specified when assign_type==TRANSIT"
        return value

    @validator("speed_miles_per_hour", always=True)
    def speed_miles_per_hour_valid(value, values):
        """Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT."""
        if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
            assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
        return value

in_vehicle_perception_factor_valid(value, values)

Validate in_vehicle_perception_factor exists if assign_type is TRANSIT.

Source code in tm2py/config.py
981
982
983
984
985
986
@validator("in_vehicle_perception_factor", always=True)
def in_vehicle_perception_factor_valid(value, values):
    """Validate in_vehicle_perception_factor exists if assign_type is TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "TRANSIT":
        assert value is not None, "must be specified when assign_type==TRANSIT"
    return value

speed_miles_per_hour_valid(value, values)

Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT.

Source code in tm2py/config.py
988
989
990
991
992
993
@validator("speed_miles_per_hour", always=True)
def speed_miles_per_hour_valid(value, values):
    """Validate speed_miles_per_hour exists if assign_type is AUX_TRANSIT."""
    if "assign_type" in values and values["assign_type"] == "AUX_TRANSIT":
        assert value is not None, "must be specified when assign_type==AUX_TRANSIT"
    return value

tm2py.config.TransitConfig

Bases: ConfigItem

Transit assignment parameters.

Source code in tm2py/config.py
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
@dataclass(frozen=True)
class TransitConfig(ConfigItem):
    """Transit assignment parameters."""

    modes: Tuple[TransitModeConfig, ...]
    vehicles: Tuple[TransitVehicleConfig, ...]

    apply_msa_demand: bool
    value_of_time: float
    effective_headway_source: str
    initial_wait_perception_factor: float
    transfer_wait_perception_factor: float
    walk_perception_factor: float
    initial_boarding_penalty: float
    transfer_boarding_penalty: float
    max_transfers: int
    output_skim_path: pathlib.Path
    fares_path: pathlib.Path
    fare_matrix_path: pathlib.Path
    fare_max_transfer_distance_miles: float
    use_fares: bool
    override_connector_times: bool
    input_connector_access_times_path: Optional[pathlib.Path] = Field(default=None)
    input_connector_egress_times_path: Optional[pathlib.Path] = Field(default=None)
    output_stop_usage_path: Optional[pathlib.Path] = Field(default=None)

Active Network Components

To come.

Emme Wrappers

tm2py.emme

Emme components module.

tm2py.config.EmmeConfig

Bases: ConfigItem

Emme-specific parameters.

Properties
Source code in tm2py/config.py
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
@dataclass(frozen=True)
class EmmeConfig(ConfigItem):
    """Emme-specific parameters.

    Properties:
        all_day_scenario_id: scenario ID to use for all day
            (initial imported) scenario with all time period data
        project_path: relative path from run_dir to Emme desktop project (.emp)
        highway_database_path: relative path to highway Emmebank
        active_database_paths: list of relative paths to active mode Emmebanks
        transit_database_path: relative path to transit Emmebank
        num_processors: the number of processors to use in Emme procedures,
            either as an integer, or value MAX, MAX-N. Typically recommend
            using MAX-1 (on desktop systems) or MAX-2 (on servers with many
            logical processors) to leave capacity for background / other tasks.
    """

    all_day_scenario_id: int
    project_path: pathlib.Path
    highway_database_path: pathlib.Path
    active_database_paths: Tuple[pathlib.Path, ...]
    transit_database_path: pathlib.Path
    num_processors: str = Field(regex=r"^MAX$|^MAX-\d+$|^\d+$")

Errata

tm2py.logger

Logging module.

Note the general definition of logging levels as used in tm2py:

highly detailed level information which would rarely be of interest

except for detailed debugging by a developer

diagnostic information which would generally be useful to a developer

debugging the model code; this may also be useful to a model operator in some cases.

more detail than would normally be of interest, but might be useful

to a model operator debugging a model run / data or understanding model results

top-level, model is running type messages. There should be

relatively few of these, generally one per component, or one per time period if the procedure is long.

problem causing operation to halt which is normal

(or not unexpected) in scope, e.g. file does not exist Includes general Python exceptions.

LogCache

Bases: LogFormatter

Caches all messages for later recording in on error logfile.

Properties
  • file_path: the absolute file path to write to
Source code in tm2py/logger.py
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
class LogCache(LogFormatter):
    """Caches all messages for later recording in on error logfile.

    Properties:
        - file_path: the absolute file path to write to
    """

    def __init__(self, file_path: str):
        """Constructor for LogCache object.

        Args:
            file_path (str): the absolute file path to write to.
        """
        super().__init__(level=0)
        self.file_path = file_path
        self._msg_cache = []

    def open(self):
        """Initialize log file (remove)."""
        if os.path.exists(self.file_path):
            os.remove(self.file_path)

    def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
        """Format and store text for later recording.

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None
        """
        self._msg_cache.append(
            (level, self._format_text(text, level, indent, timestamp))
        )

    def write_cache(self):
        """Write all cached messages."""
        with open(self.file_path, "w", encoding="utf8") as file:
            for level, text in self._msg_cache:
                file.write(f"{LEVELS_INT_TO_STR[level]:6} {text}\n")
        self.clear()

    def clear(self):
        """Clear message cache."""
        self._msg_cache = []
__init__(file_path)

Constructor for LogCache object.

Parameters:

Name Type Description Default
file_path str

the absolute file path to write to.

required
Source code in tm2py/logger.py
518
519
520
521
522
523
524
525
526
def __init__(self, file_path: str):
    """Constructor for LogCache object.

    Args:
        file_path (str): the absolute file path to write to.
    """
    super().__init__(level=0)
    self.file_path = file_path
    self._msg_cache = []
clear()

Clear message cache.

Source code in tm2py/logger.py
553
554
555
def clear(self):
    """Clear message cache."""
    self._msg_cache = []
log(text, level, indent, timestamp)

Format and store text for later recording.

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None

required
Source code in tm2py/logger.py
533
534
535
536
537
538
539
540
541
542
543
544
def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
    """Format and store text for later recording.

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None
    """
    self._msg_cache.append(
        (level, self._format_text(text, level, indent, timestamp))
    )
open()

Initialize log file (remove).

Source code in tm2py/logger.py
528
529
530
531
def open(self):
    """Initialize log file (remove)."""
    if os.path.exists(self.file_path):
        os.remove(self.file_path)
write_cache()

Write all cached messages.

Source code in tm2py/logger.py
546
547
548
549
550
551
def write_cache(self):
    """Write all cached messages."""
    with open(self.file_path, "w", encoding="utf8") as file:
        for level, text in self._msg_cache:
            file.write(f"{LEVELS_INT_TO_STR[level]:6} {text}\n")
    self.clear()

LogDisplay

Bases: LogFormatter

Format and print log text to console / Notebook.

Properties
  • level: the log level as an int
Source code in tm2py/logger.py
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
class LogDisplay(LogFormatter):
    """Format and print log text to console / Notebook.

    Properties:
        - level: the log level as an int
    """

    def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
        """Format and display text on screen (print).

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None
        """
        if level >= self.level:
            print(self._format_text(text, level, indent, timestamp))
log(text, level, indent, timestamp)

Format and display text on screen (print).

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None

required
Source code in tm2py/logger.py
498
499
500
501
502
503
504
505
506
507
508
def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
    """Format and display text on screen (print).

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None
    """
    if level >= self.level:
        print(self._format_text(text, level, indent, timestamp))

LogFile

Bases: LogFormatter

Format and write log text to file.

Properties
  • level: the log level as an int
  • file_path: the absolute file path to write to
Source code in tm2py/logger.py
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
class LogFile(LogFormatter):
    """Format and write log text to file.

    Properties:
        - level: the log level as an int
        - file_path: the absolute file path to write to
    """

    def __init__(self, level: int, file_path: str):
        """Constructor for LogFile object.

        Args:
            level (int): the log level as an int.
            file_path (str): the absolute file path to write to.
        """
        super().__init__(level)
        self.file_path = file_path
        self.log_file = None

    def open(self):
        """Open the log file for writing."""
        self.log_file = open(self.file_path, "w", encoding="utf8")

    def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
        """Log text to file and display depending upon log level and config.

        Note that log will not write to file until opened with a context.

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None for timestamp
        """
        if level >= self.level and self.log_file is not None:
            text = self._format_text(text, level, indent, timestamp)
            self.log_file.write(f"{text}\n")
            self.log_file.flush()

    def close(self):
        """Close the open log file."""
        self.log_file.close()
        self.log_file = None
__init__(level, file_path)

Constructor for LogFile object.

Parameters:

Name Type Description Default
level int

the log level as an int.

required
file_path str

the absolute file path to write to.

required
Source code in tm2py/logger.py
423
424
425
426
427
428
429
430
431
432
def __init__(self, level: int, file_path: str):
    """Constructor for LogFile object.

    Args:
        level (int): the log level as an int.
        file_path (str): the absolute file path to write to.
    """
    super().__init__(level)
    self.file_path = file_path
    self.log_file = None
close()

Close the open log file.

Source code in tm2py/logger.py
454
455
456
457
def close(self):
    """Close the open log file."""
    self.log_file.close()
    self.log_file = None
log(text, level, indent, timestamp)

Log text to file and display depending upon log level and config.

Note that log will not write to file until opened with a context.

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None for timestamp

required
Source code in tm2py/logger.py
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
def log(self, text: str, level: int, indent: bool, timestamp: Union[str, None]):
    """Log text to file and display depending upon log level and config.

    Note that log will not write to file until opened with a context.

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None for timestamp
    """
    if level >= self.level and self.log_file is not None:
        text = self._format_text(text, level, indent, timestamp)
        self.log_file.write(f"{text}\n")
        self.log_file.flush()
open()

Open the log file for writing.

Source code in tm2py/logger.py
434
435
436
def open(self):
    """Open the log file for writing."""
    self.log_file = open(self.file_path, "w", encoding="utf8")

LogFileLevelOverride

Bases: LogFile

Format and write log text to file.

Properties
  • level: the log level as an int
  • file_path: the absolute file path to write to
  • iter_component_level: TODO
  • controller: TODO
Source code in tm2py/logger.py
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
class LogFileLevelOverride(LogFile):
    """Format and write log text to file.

    Properties:
        - level: the log level as an int
        - file_path: the absolute file path to write to
        - iter_component_level: TODO
        - controller: TODO
    """

    def __init__(self, level, file_path, iter_component_level, controller):
        """Constructor for LogFileLevelOverride object.

        Args:
            level (_type_): TODO
            file_path (_type_): TODO
            iter_component_level (_type_): TODO
            controller (_type_): TODO
        """
        super().__init__(level, file_path)
        self.iter_component_level = iter_component_level
        self.controller = controller

    @property
    def level(self):
        """Current log level with iter_component_level config override."""
        return self.iter_component_level.get(
            self.controller.iter_component, self._level
        )
__init__(level, file_path, iter_component_level, controller)

Constructor for LogFileLevelOverride object.

Parameters:

Name Type Description Default
level _type_

TODO

required
file_path _type_

TODO

required
iter_component_level _type_

TODO

required
controller _type_

TODO

required
Source code in tm2py/logger.py
470
471
472
473
474
475
476
477
478
479
480
481
def __init__(self, level, file_path, iter_component_level, controller):
    """Constructor for LogFileLevelOverride object.

    Args:
        level (_type_): TODO
        file_path (_type_): TODO
        iter_component_level (_type_): TODO
        controller (_type_): TODO
    """
    super().__init__(level, file_path)
    self.iter_component_level = iter_component_level
    self.controller = controller
level() property

Current log level with iter_component_level config override.

Source code in tm2py/logger.py
483
484
485
486
487
488
@property
def level(self):
    """Current log level with iter_component_level config override."""
    return self.iter_component_level.get(
        self.controller.iter_component, self._level
    )

LogFormatter

Base class for recording text to log.

Properties
Source code in tm2py/logger.py
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
class LogFormatter:
    """Base class for recording text to log.

    Properties:
        indent: current indentation level for the LogFormatter
        level: log filter level (as an int)
    """

    def __init__(self, level: int):
        """Constructor for LogFormatter.

        Args:
            level (int): log filter level (as an int)
        """
        self._level = level
        self.indent = 0

    @property
    def level(self):
        """The current filter level for the LogFormatter."""
        return self._level

    def increase_indent(self, level: int):
        """Increase current indent if the log level is filtered in."""
        if level >= self.level:
            self.indent += 1

    def decrease_indent(self, level: int):
        """Decrease current indent if the log level is filtered in."""
        if level >= self.level:
            self.indent -= 1

    @abstractmethod
    def log(
        self,
        text: str,
        level: int,
        indent: bool,
        timestamp: Union[str, None],
    ):
        """Format and log message text.

        Args:
            text (str): text to log
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts
            timestamp (str): formatted datetime as a string or None
        """

    def _format_text(
        self,
        text: str,
        level: int,
        indent: bool,
        timestamp: Union[str, None],
    ):
        """Format text for logging.

        Args:
            text (str): text to format
            level (int): logging level
            indent (bool): if true indent text based on the number of open contexts and
                timestamp width
            timestamp (str): formatted datetime as a string or None for timestamp
        """
        if timestamp is None:
            timestamp = "                        " if indent else ""
        if indent:
            num_indents = self.indent
            indent = "  " * max(num_indents, 0)
        else:
            indent = ""
        level_str = "{0:>6}".format(LEVELS_INT_TO_STR[level])
        return f"{timestamp}{level_str}: {indent}{text}"
__init__(level)

Constructor for LogFormatter.

Parameters:

Name Type Description Default
level int

log filter level (as an int)

required
Source code in tm2py/logger.py
347
348
349
350
351
352
353
354
def __init__(self, level: int):
    """Constructor for LogFormatter.

    Args:
        level (int): log filter level (as an int)
    """
    self._level = level
    self.indent = 0
decrease_indent(level)

Decrease current indent if the log level is filtered in.

Source code in tm2py/logger.py
366
367
368
369
def decrease_indent(self, level: int):
    """Decrease current indent if the log level is filtered in."""
    if level >= self.level:
        self.indent -= 1
increase_indent(level)

Increase current indent if the log level is filtered in.

Source code in tm2py/logger.py
361
362
363
364
def increase_indent(self, level: int):
    """Increase current indent if the log level is filtered in."""
    if level >= self.level:
        self.indent += 1
level() property

The current filter level for the LogFormatter.

Source code in tm2py/logger.py
356
357
358
359
@property
def level(self):
    """The current filter level for the LogFormatter."""
    return self._level
log(text, level, indent, timestamp) abstractmethod

Format and log message text.

Parameters:

Name Type Description Default
text str

text to log

required
level int

logging level

required
indent bool

if true indent text based on the number of open contexts

required
timestamp str

formatted datetime as a string or None

required
Source code in tm2py/logger.py
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
@abstractmethod
def log(
    self,
    text: str,
    level: int,
    indent: bool,
    timestamp: Union[str, None],
):
    """Format and log message text.

    Args:
        text (str): text to log
        level (int): logging level
        indent (bool): if true indent text based on the number of open contexts
        timestamp (str): formatted datetime as a string or None
    """

LogStartEnd

Log the start and end time with optional message.

Used as a Component method decorator. If msg is not provided a default message is generated with the object class and method name.

Example:: @LogStartEnd(“Highway assignment and skims”, level=”STATUS”) def run(self): pass

Properties

text (str): message text to use in the start and end record. level (str): logging level as a string.

Source code in tm2py/logger.py
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
class LogStartEnd:
    """Log the start and end time with optional message.

    Used as a Component method decorator. If msg is not provided a default
    message is generated with the object class and method name.

    Example::
        @LogStartEnd("Highway assignment and skims", level="STATUS")
        def run(self):
            pass

    Properties:
        text (str): message text to use in the start and end record.
        level (str): logging level as a string.
    """

    def __init__(self, text: str = None, level: str = "INFO"):
        """Constructor for LogStartEnd object.

        Args:
            text (str, optional): message text to use in the start and end record.
                Defaults to None.
            level (str, optional): logging level as a string. Defaults to "INFO".
        """
        self.text = text
        self.level = level

    def __call__(self, func):
        """Ability to call logger.

        Args:
            func (_type_): _description_

        Returns:
            _type_: _description_
        """

        @functools.wraps(func)
        def wrapper(obj, *args, **kwargs):
            text = self.text or obj.__class__.__name__ + " " + func.__name__
            with obj.logger.log_start_end(text, self.level):
                value = func(obj, *args, **kwargs)
            return value

        return wrapper
__call__(func)

Ability to call logger.

Parameters:

Name Type Description Default
func _type_

description

required

Returns:

Name Type Description
_type_

description

Source code in tm2py/logger.py
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
def __call__(self, func):
    """Ability to call logger.

    Args:
        func (_type_): _description_

    Returns:
        _type_: _description_
    """

    @functools.wraps(func)
    def wrapper(obj, *args, **kwargs):
        text = self.text or obj.__class__.__name__ + " " + func.__name__
        with obj.logger.log_start_end(text, self.level):
            value = func(obj, *args, **kwargs)
        return value

    return wrapper
__init__(text=None, level='INFO')

Constructor for LogStartEnd object.

Parameters:

Name Type Description Default
text str

message text to use in the start and end record. Defaults to None.

None
level str

logging level as a string. Defaults to “INFO”.

'INFO'
Source code in tm2py/logger.py
577
578
579
580
581
582
583
584
585
586
def __init__(self, text: str = None, level: str = "INFO"):
    """Constructor for LogStartEnd object.

    Args:
        text (str, optional): message text to use in the start and end record.
            Defaults to None.
        level (str, optional): logging level as a string. Defaults to "INFO".
    """
    self.text = text
    self.level = level

Logger

Logging of message text for display, text file, and Emme logbook, as well as notify to slack.

The log message levels can be one of: TRACE, DEBUG, DETAIL, INFO, STATUS, WARN, ERROR, FATAL Which will filter all messages of that severity and higher. See module note on use of descriptive level names.

logger.log(“a message”) with logger.log_start_end(“Running a set of steps”): logger.log(“Message with timestamp”) logger.log(“A debug message”, level=”DEBUG”) # equivalently, use the .debug: logger.debug(“Another debug message”) if logger.debug_enabled: # only generate this report if logging DEBUG logger.log(“A debug report that takes time to produce”, level=”DEBUG”) logger.notify_slack(“A slack message”)

Methods can also be decorated with LogStartEnd (see class for more).

Note that the Logger should only be initialized once per model run. In places where the controller is not available, the last Logger initialized can be obtained from the class method get_logger::

1
logger = Logger.get_logger()
Internal properties
Source code in tm2py/logger.py
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
class Logger:
    """Logging of message text for display, text file, and Emme logbook, as well as notify to slack.

    The log message levels can be one of:
    TRACE, DEBUG, DETAIL, INFO, STATUS, WARN, ERROR, FATAL
    Which will filter all messages of that severity and higher.
    See module note on use of descriptive level names.

    logger.log("a message")
    with logger.log_start_end("Running a set of steps"):
        logger.log("Message with timestamp")
        logger.log("A debug message", level="DEBUG")
        # equivalently, use the .debug:
        logger.debug("Another debug message")
        if logger.debug_enabled:
            # only generate this report if logging DEBUG
            logger.log("A debug report that takes time to produce", level="DEBUG")
        logger.notify_slack("A slack message")

    Methods can also be decorated with LogStartEnd (see class for more).

    Note that the Logger should only be initialized once per model run.
    In places where the controller is not available, the last Logger
    initialized can be obtained from the class method get_logger::

        logger = Logger.get_logger()

    Internal properties:
        _log_cache: the LogCache object
        _log_formatters: list of objects that format text and record, either
            to file, display (print to screen) or cache for log on error
        _use_emme_logbook: whether Emme logbook is enabled
        _slack_notifier: SlackNotifier object for sending messages to slack
    """

    # used to cache last initialized Logger
    _instance = None

    def __new__(cls, controller: RunController):
        """Logger __new__ method override. TODO.

        Args:
            controller (RunController): TODO.
        """
        # pylint: disable=unused-argument
        cls._instance = super(Logger, cls).__new__(cls)
        return cls._instance

    def __init__(self, controller: RunController):
        """Constructor for Logger object.

        Args:
            controller (RunController): Associated RunController instance.
        """
        self.controller = controller
        log_config = controller.config.logging
        iter_component_level = log_config.iter_component_level or []
        iter_component_level = dict(
            ((i, c), LEVELS_STR_TO_INT[l]) for i, c, l in iter_component_level
        )
        display_logger = LogDisplay(LEVELS_STR_TO_INT[log_config.display_level])
        run_log_formatter = LogFile(
            LEVELS_STR_TO_INT[log_config.run_file_level],
            os.path.join(controller.run_dir, log_config.run_file_path),
        )
        standard_log_formatter = LogFileLevelOverride(
            LEVELS_STR_TO_INT[log_config.log_file_level],
            os.path.join(controller.run_dir, log_config.log_file_path),
            iter_component_level,
            controller,
        )
        self._log_cache = LogCache(
            os.path.join(controller.run_dir, log_config.log_on_error_file_path)
        )
        self._log_formatters = [
            display_logger,
            run_log_formatter,
            standard_log_formatter,
            self._log_cache,
        ]

        self._use_emme_logbook = self.controller.config.logging.use_emme_logbook

        self._slack_notifier = SlackNotifier(self)

        # open log formatters
        for log_formatter in self._log_formatters:
            if hasattr(log_formatter, "open"):
                log_formatter.open()

    def __del__(self):
        """
        Destructor for logger object
        """
        for log_formatter in self._log_formatters:
            if hasattr(log_formatter, "close"):
                log_formatter.close()

    @classmethod
    def get_logger(cls):
        """Return the last initialized logger object."""
        return cls._instance

    def notify_slack(self, text: str):
        """Send message to slack if enabled by config.

        Args:
            text (str): text to send to slack
        """
        if self.controller.config.logging.notify_slack:
            self._slack_notifier.post_message(text)

    def log(self, text: str, level: LogLevel = "INFO", indent: bool = True):
        """Log text to file and display depending upon log level and config.

        Args:
            text (str): text to log
            level (str): logging level
            indent (bool): if true indent text based on the number of open contexts
        """
        timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S) ")
        for log_formatter in self._log_formatters:
            log_formatter.log(text, LEVELS_STR_TO_INT[level], indent, timestamp)
        if self._use_emme_logbook and self.controller.has_emme:
            self.controller.emme_manager.logbook_write(text)

    def trace(self, text: str, indent: bool = False):
        """Log text with level=TRACE.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "TRACE", indent)

    def debug(self, text: str, indent: bool = False):
        """Log text with level=DEBUG.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "DEBUG", indent)

    def detail(self, text: str, indent: bool = False):
        """Log text with level=DETAIL.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "DETAIL", indent)

    def info(self, text: str, indent: bool = False):
        """Log text with level=INFO.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "INFO", indent)

    def status(self, text: str, indent: bool = False):
        """Log text with level=STATUS.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "STATUS", indent)

    def warn(self, text: str, indent: bool = False):
        """Log text with level=WARN.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "WARN", indent)

    def error(self, text: str, indent: bool = False):
        """Log text with level=ERROR.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "ERROR", indent)

    def fatal(self, text: str, indent: bool = False):
        """Log text with level=FATAL.

        Args:
            text (str): text to log
            indent (bool): if true indent text based on the number of open contexts
        """
        self.log(text, "FATAL", indent)

    def _log_start(self, text: str, level: LogLevel = "INFO"):
        """Log message with timestamp and 'Start'.

        Args:
            text (str): message text
            level (str): logging level
        """
        self.log(f"Start {text}", level, indent=True)
        for log_formatter in self._log_formatters:
            log_formatter.increase_indent(LEVELS_STR_TO_INT[level])

    def _log_end(self, text: str, level: LogLevel = "INFO"):
        """Log message with timestamp and 'End'.

        Args:
            text (str): message text
            level (str): logging level
        """
        for log_formatter in self._log_formatters:
            log_formatter.decrease_indent(LEVELS_STR_TO_INT[level])
        self.log(f"End {text}", level, indent=True)

    @_context
    def log_start_end(self, text: str, level: LogLevel = "STATUS"):
        """Use with 'with' statement to log the start and end time with message.

        If using the Emme logbook (config.logging.use_emme_logbook is True), will
        also create a logbook nest in the tree view using logbook_trace.

        Args:
            text (str): message text
            level (str): logging level
        """
        with self._skip_emme_logging():
            self._log_start(text, level)
        if self._use_emme_logbook:
            with self.controller.emme_manager.logbook_trace(text):
                yield
        else:
            yield
        with self._skip_emme_logging():
            self._log_end(text, level)

    def log_dict(self, mapping: dict, level: LogLevel = "DEBUG"):
        """Format dictionary to string and log as text."""
        self.log(pformat(mapping, indent=1, width=120), level)

    @_context
    def _skip_emme_logging(self):
        """Temporary disable Emme logging (if enabled) and restore on exit.

        Intended use is with the log_start_end context and LogStartEnd decorator
        to allow use of the Emme context without double logging of the
        messages in the Emme logbook.
        """
        self._use_emme_logbook, use_emme = False, self._use_emme_logbook
        yield
        self._use_emme_logbook = use_emme

    def clear_msg_cache(self):
        """Clear all log messages from cache."""
        self._log_cache.clear()

    @property
    def debug_enabled(self) -> bool:
        """Returns True if DEBUG is currently filtered for display or print to file.

        Can be used to enable / disable debug logging which may have a performance
        impact.
        """
        debug = LEVELS_STR_TO_INT["DEBUG"]
        for log_formatter in self._log_formatters:
            if log_formatter is not self._log_cache and log_formatter.level <= debug:
                return True
        return False

    @property
    def trace_enabled(self) -> bool:
        """Returns True if TRACE is currently filtered for display or print to file.

        Can be used to enable / disable trace logging which may have a performance
        impact.
        """
        trace = LEVELS_STR_TO_INT["TRACE"]
        for log_formatter in self._log_formatters:
            if log_formatter is not self._log_cache and log_formatter.level <= trace:
                return True
        return False
__del__()

Destructor for logger object

Source code in tm2py/logger.py
141
142
143
144
145
146
147
def __del__(self):
    """
    Destructor for logger object
    """
    for log_formatter in self._log_formatters:
        if hasattr(log_formatter, "close"):
            log_formatter.close()
__init__(controller)

Constructor for Logger object.

Parameters:

Name Type Description Default
controller RunController

Associated RunController instance.

required
Source code in tm2py/logger.py
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
def __init__(self, controller: RunController):
    """Constructor for Logger object.

    Args:
        controller (RunController): Associated RunController instance.
    """
    self.controller = controller
    log_config = controller.config.logging
    iter_component_level = log_config.iter_component_level or []
    iter_component_level = dict(
        ((i, c), LEVELS_STR_TO_INT[l]) for i, c, l in iter_component_level
    )
    display_logger = LogDisplay(LEVELS_STR_TO_INT[log_config.display_level])
    run_log_formatter = LogFile(
        LEVELS_STR_TO_INT[log_config.run_file_level],
        os.path.join(controller.run_dir, log_config.run_file_path),
    )
    standard_log_formatter = LogFileLevelOverride(
        LEVELS_STR_TO_INT[log_config.log_file_level],
        os.path.join(controller.run_dir, log_config.log_file_path),
        iter_component_level,
        controller,
    )
    self._log_cache = LogCache(
        os.path.join(controller.run_dir, log_config.log_on_error_file_path)
    )
    self._log_formatters = [
        display_logger,
        run_log_formatter,
        standard_log_formatter,
        self._log_cache,
    ]

    self._use_emme_logbook = self.controller.config.logging.use_emme_logbook

    self._slack_notifier = SlackNotifier(self)

    # open log formatters
    for log_formatter in self._log_formatters:
        if hasattr(log_formatter, "open"):
            log_formatter.open()
__new__(controller)

Logger new method override. TODO.

Parameters:

Name Type Description Default
controller RunController

TODO.

required
Source code in tm2py/logger.py
89
90
91
92
93
94
95
96
97
def __new__(cls, controller: RunController):
    """Logger __new__ method override. TODO.

    Args:
        controller (RunController): TODO.
    """
    # pylint: disable=unused-argument
    cls._instance = super(Logger, cls).__new__(cls)
    return cls._instance
clear_msg_cache()

Clear all log messages from cache.

Source code in tm2py/logger.py
308
309
310
def clear_msg_cache(self):
    """Clear all log messages from cache."""
    self._log_cache.clear()
debug(text, indent=False)

Log text with level=DEBUG.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
186
187
188
189
190
191
192
193
def debug(self, text: str, indent: bool = False):
    """Log text with level=DEBUG.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "DEBUG", indent)
debug_enabled() property

Returns True if DEBUG is currently filtered for display or print to file.

Can be used to enable / disable debug logging which may have a performance impact.

Source code in tm2py/logger.py
312
313
314
315
316
317
318
319
320
321
322
323
@property
def debug_enabled(self) -> bool:
    """Returns True if DEBUG is currently filtered for display or print to file.

    Can be used to enable / disable debug logging which may have a performance
    impact.
    """
    debug = LEVELS_STR_TO_INT["DEBUG"]
    for log_formatter in self._log_formatters:
        if log_formatter is not self._log_cache and log_formatter.level <= debug:
            return True
    return False
detail(text, indent=False)

Log text with level=DETAIL.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
195
196
197
198
199
200
201
202
def detail(self, text: str, indent: bool = False):
    """Log text with level=DETAIL.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "DETAIL", indent)
error(text, indent=False)

Log text with level=ERROR.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
231
232
233
234
235
236
237
238
def error(self, text: str, indent: bool = False):
    """Log text with level=ERROR.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "ERROR", indent)
fatal(text, indent=False)

Log text with level=FATAL.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
240
241
242
243
244
245
246
247
def fatal(self, text: str, indent: bool = False):
    """Log text with level=FATAL.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "FATAL", indent)
get_logger() classmethod

Return the last initialized logger object.

Source code in tm2py/logger.py
149
150
151
152
@classmethod
def get_logger(cls):
    """Return the last initialized logger object."""
    return cls._instance
info(text, indent=False)

Log text with level=INFO.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
204
205
206
207
208
209
210
211
def info(self, text: str, indent: bool = False):
    """Log text with level=INFO.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "INFO", indent)
log(text, level='INFO', indent=True)

Log text to file and display depending upon log level and config.

Parameters:

Name Type Description Default
text str

text to log

required
level str

logging level

'INFO'
indent bool

if true indent text based on the number of open contexts

True
Source code in tm2py/logger.py
163
164
165
166
167
168
169
170
171
172
173
174
175
def log(self, text: str, level: LogLevel = "INFO", indent: bool = True):
    """Log text to file and display depending upon log level and config.

    Args:
        text (str): text to log
        level (str): logging level
        indent (bool): if true indent text based on the number of open contexts
    """
    timestamp = datetime.now().strftime("%d-%b-%Y (%H:%M:%S) ")
    for log_formatter in self._log_formatters:
        log_formatter.log(text, LEVELS_STR_TO_INT[level], indent, timestamp)
    if self._use_emme_logbook and self.controller.has_emme:
        self.controller.emme_manager.logbook_write(text)
log_dict(mapping, level='DEBUG')

Format dictionary to string and log as text.

Source code in tm2py/logger.py
292
293
294
def log_dict(self, mapping: dict, level: LogLevel = "DEBUG"):
    """Format dictionary to string and log as text."""
    self.log(pformat(mapping, indent=1, width=120), level)
log_start_end(text, level='STATUS')

Use with ‘with’ statement to log the start and end time with message.

If using the Emme logbook (config.logging.use_emme_logbook is True), will also create a logbook nest in the tree view using logbook_trace.

Parameters:

Name Type Description Default
text str

message text

required
level str

logging level

'STATUS'
Source code in tm2py/logger.py
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
@_context
def log_start_end(self, text: str, level: LogLevel = "STATUS"):
    """Use with 'with' statement to log the start and end time with message.

    If using the Emme logbook (config.logging.use_emme_logbook is True), will
    also create a logbook nest in the tree view using logbook_trace.

    Args:
        text (str): message text
        level (str): logging level
    """
    with self._skip_emme_logging():
        self._log_start(text, level)
    if self._use_emme_logbook:
        with self.controller.emme_manager.logbook_trace(text):
            yield
    else:
        yield
    with self._skip_emme_logging():
        self._log_end(text, level)
notify_slack(text)

Send message to slack if enabled by config.

Parameters:

Name Type Description Default
text str

text to send to slack

required
Source code in tm2py/logger.py
154
155
156
157
158
159
160
161
def notify_slack(self, text: str):
    """Send message to slack if enabled by config.

    Args:
        text (str): text to send to slack
    """
    if self.controller.config.logging.notify_slack:
        self._slack_notifier.post_message(text)
status(text, indent=False)

Log text with level=STATUS.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
213
214
215
216
217
218
219
220
def status(self, text: str, indent: bool = False):
    """Log text with level=STATUS.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "STATUS", indent)
trace(text, indent=False)

Log text with level=TRACE.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
177
178
179
180
181
182
183
184
def trace(self, text: str, indent: bool = False):
    """Log text with level=TRACE.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "TRACE", indent)
trace_enabled() property

Returns True if TRACE is currently filtered for display or print to file.

Can be used to enable / disable trace logging which may have a performance impact.

Source code in tm2py/logger.py
325
326
327
328
329
330
331
332
333
334
335
336
@property
def trace_enabled(self) -> bool:
    """Returns True if TRACE is currently filtered for display or print to file.

    Can be used to enable / disable trace logging which may have a performance
    impact.
    """
    trace = LEVELS_STR_TO_INT["TRACE"]
    for log_formatter in self._log_formatters:
        if log_formatter is not self._log_cache and log_formatter.level <= trace:
            return True
    return False
warn(text, indent=False)

Log text with level=WARN.

Parameters:

Name Type Description Default
text str

text to log

required
indent bool

if true indent text based on the number of open contexts

False
Source code in tm2py/logger.py
222
223
224
225
226
227
228
229
def warn(self, text: str, indent: bool = False):
    """Log text with level=WARN.

    Args:
        text (str): text to log
        indent (bool): if true indent text based on the number of open contexts
    """
    self.log(text, "WARN", indent)

SlackNotifier

Notify slack of model run status.

The slack channel can be input directly, or is configured via text file found at “M:\Software\Slack\TravelModel_SlackWebhook.txt” (if on MTC server) rr”C:\Software\Slack\TravelModel_SlackWebhook.txt” (if local)

Properties
  • logger (Logger): object for logging of trace messages
  • slack_webhook_url (str): optional, url to use for sending the message to slack
Source code in tm2py/logger.py
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
class SlackNotifier:
    r"""Notify slack of model run status.

    The slack channel can be input directly, or is configured via text file found at
    "M:\Software\Slack\TravelModel_SlackWebhook.txt" (if on MTC server)
    rr"C:\Software\Slack\TravelModel_SlackWebhook.txt" (if local)

    Properties:
        - logger (Logger): object for logging of trace messages
        - slack_webhook_url (str): optional, url to use for sending the message to slack
    """

    def __init__(self, logger: Logger, slack_webhook_url: str = None):
        r"""Constructor for SlackNotifier object.

        Args:
            logger (Logger): logger instance.
            slack_webhook_url (str, optional): . Defaults to None, which is replaced by either:
                - r"M:\Software\Slack\TravelModel_SlackWebhook.txt" (if on MTC server)
                - r"C:\Software\Slack\TravelModel_SlackWebhook.txt" (otherwise)
        """
        self.logger = logger
        if not logger.controller.config.logging.notify_slack:
            self._slack_webhook_url = None
            return
        if slack_webhook_url is None:
            hostname = socket.getfqdn()
            if hostname.endswith(".mtc.ca.gov"):
                slack_webhook_url_file = (
                    r"M:\Software\Slack\TravelModel_SlackWebhook.txt"
                )
                self.logger.log(
                    f"SlackNotifier running on mtc host; using {slack_webhook_url_file}",
                    level="TRACE",
                )
            else:
                slack_webhook_url_file = (
                    r"C:\Software\Slack\TravelModel_SlackWebhook.txt"
                )
                self.logger.log(
                    f"SlackNotifier running on non-mtc host; using {slack_webhook_url_file}",
                    level="TRACE",
                )
            if os.path.isfile(slack_webhook_url_file):
                with open(slack_webhook_url_file, "r", encoding="utf8") as url_file:
                    self._slack_webhook_url = url_file.read()
            else:
                self._slack_webhook_url = None
        else:
            self._slack_webhook_url = slack_webhook_url
        self.logger.log(
            f"SlackNotifier using slack webhook url {self._slack_webhook_url}",
            level="TRACE",
        )

    def post_message(self, text):
        """Posts text to the slack channel via the webhook if slack_webhook_url is found.

        Args:
           text: text message to send to slack
        """
        if self._slack_webhook_url is None:
            return
        headers = {"Content-type": "application/json"}
        data = {"text": text}
        self.logger.log(f"Sending message to slack: {text}", level="TRACE")
        response = requests.post(self._slack_webhook_url, headers=headers, json=data)
        self.logger.log(f"Receiving response: {response}", level="TRACE")
__init__(logger, slack_webhook_url=None)

Constructor for SlackNotifier object.

Parameters:

Name Type Description Default
logger Logger

logger instance.

required
slack_webhook_url str

. Defaults to None, which is replaced by either: - r”M:\Software\Slack\TravelModel_SlackWebhook.txt” (if on MTC server) - r”C:\Software\Slack\TravelModel_SlackWebhook.txt” (otherwise)

None
Source code in tm2py/logger.py
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
def __init__(self, logger: Logger, slack_webhook_url: str = None):
    r"""Constructor for SlackNotifier object.

    Args:
        logger (Logger): logger instance.
        slack_webhook_url (str, optional): . Defaults to None, which is replaced by either:
            - r"M:\Software\Slack\TravelModel_SlackWebhook.txt" (if on MTC server)
            - r"C:\Software\Slack\TravelModel_SlackWebhook.txt" (otherwise)
    """
    self.logger = logger
    if not logger.controller.config.logging.notify_slack:
        self._slack_webhook_url = None
        return
    if slack_webhook_url is None:
        hostname = socket.getfqdn()
        if hostname.endswith(".mtc.ca.gov"):
            slack_webhook_url_file = (
                r"M:\Software\Slack\TravelModel_SlackWebhook.txt"
            )
            self.logger.log(
                f"SlackNotifier running on mtc host; using {slack_webhook_url_file}",
                level="TRACE",
            )
        else:
            slack_webhook_url_file = (
                r"C:\Software\Slack\TravelModel_SlackWebhook.txt"
            )
            self.logger.log(
                f"SlackNotifier running on non-mtc host; using {slack_webhook_url_file}",
                level="TRACE",
            )
        if os.path.isfile(slack_webhook_url_file):
            with open(slack_webhook_url_file, "r", encoding="utf8") as url_file:
                self._slack_webhook_url = url_file.read()
        else:
            self._slack_webhook_url = None
    else:
        self._slack_webhook_url = slack_webhook_url
    self.logger.log(
        f"SlackNotifier using slack webhook url {self._slack_webhook_url}",
        level="TRACE",
    )
post_message(text)

Posts text to the slack channel via the webhook if slack_webhook_url is found.

Parameters:

Name Type Description Default
text

text message to send to slack

required
Source code in tm2py/logger.py
663
664
665
666
667
668
669
670
671
672
673
674
675
def post_message(self, text):
    """Posts text to the slack channel via the webhook if slack_webhook_url is found.

    Args:
       text: text message to send to slack
    """
    if self._slack_webhook_url is None:
        return
    headers = {"Content-type": "application/json"}
    data = {"text": text}
    self.logger.log(f"Sending message to slack: {text}", level="TRACE")
    response = requests.post(self._slack_webhook_url, headers=headers, json=data)
    self.logger.log(f"Receiving response: {response}", level="TRACE")

tm2py.tools

Tools module for common resources / shared code and “utilities” in the tm2py package.

download_unzip(url, out_base_dir, target_dir, zip_filename='test_data.zip')

Download and unzips a file from a URL. The zip file is removed after extraction.

Parameters:

Name Type Description Default
url str

Full URL do download from.

required
out_base_dir str

Where to unzip the file.

required
target_dir str

What to unzip the file as.

required
zip_filename str

Filename to store zip file as. Defaults to “test_data.zip”.

'test_data.zip'
Source code in tm2py/tools.py
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
def download_unzip(
    url: str, out_base_dir: str, target_dir: str, zip_filename: str = "test_data.zip"
) -> None:
    """Download and unzips a file from a URL. The zip file is removed after extraction.

    Args:
        url (str): Full URL do download from.
        out_base_dir (str): Where to unzip the file.
        target_dir (str): What to unzip the file as.
        zip_filename (str, optional): Filename to store zip file as. Defaults to "test_data.zip".
    """
    target_zip = os.path.join(out_base_dir, zip_filename)
    if not os.path.isdir(out_base_dir):
        os.makedirs(out_base_dir)
    urllib.request.Request(url)
    _download(url, target_zip)
    _unzip(target_zip, target_dir)
    os.remove(target_zip)

emme_context()

Return True if Emme is installed.

Source code in tm2py/tools.py
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
def emme_context():
    """Return True if Emme is installed."""
    import pkg_resources

    _inro_package = "inro-emme"
    _avail_packages = [pkg.key for pkg in pkg_resources.working_set]

    if _inro_package not in _avail_packages:
        print("Inro not found. Skipping inro setup.")
        mocked_inro_context()
        return False
    else:
        import inro

        if "MagicMock" in str(type(inro)):
            return False

    return True

interpolate_dfs(df, ref_points, target_point, ref_col_name='ends_with')

Interpolate for the model year assuming linear growth between the reference years.

Parameters:

Name Type Description Default
df pd.DataFrame

dataframe to interpolate on, with ref points contained in column name per ref_col_name.

required
ref_points Collection[Union[float, int]]

reference years to interpolate between

required
target_point Union[float, int]

target year

required
ref_col_name str

column name to use for reference years. Defaults to “ends_with”.

'ends_with'
Source code in tm2py/tools.py
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
def interpolate_dfs(
    df: pd.DataFrame,
    ref_points: Collection[Union[float, int]],
    target_point: Union[float, int],
    ref_col_name: str = "ends_with",
) -> pd.DataFrame:
    """Interpolate for the model year assuming linear growth between the reference years.

    Args:
        df (pd.DataFrame): dataframe to interpolate on, with ref points contained in column
            name per ref_col_name.
        ref_points (Collection[Union[float,int]]): reference years to interpolate between
        target_point (Union[float,int]): target year
        ref_col_name (str, optional): column name to use for reference years.
            Defaults to "ends_with".
    """
    if ref_col_name not in ["ends_with"]:
        raise NotImplementedError(f"{ref_col_name} not implemented")
    if len(ref_points) != 2:
        raise NotImplementedError(f"{ref_points} reference points not implemented")

    _ref_points = list(map(int, ref_points))
    _target_point = int(target_point)

    _ref_points.sort()
    _start_point, _end_point = _ref_points
    if not _start_point <= _target_point <= _end_point:
        raise ValueError(
            f"Target Point: {_target_point} not within range of \
            Reference Points: {_ref_points}"
        )

    _start_ref_df = df[[c for c in df.columns if c.endswith(f"{_start_point}")]].copy()
    _end_ref_df = df[[c for c in df.columns if c.endswith(f"{_end_point}")]].copy()

    if len(_start_ref_df.columns) != len(_end_ref_df.columns):
        raise ValueError(
            f"{_start_point} and {_end_point} have different number of columns:\n\
           {_start_point} Columns: {_start_ref_df.columns}\n\
           {_end_point} Columns: {_end_ref_df.columns}\
        "
        )

    _start_ref_df.rename(
        columns=lambda x: x.replace(f"_{_start_point}", ""), inplace=True
    )
    _end_ref_df.rename(columns=lambda x: x.replace(f"_{_end_point}", ""), inplace=True)
    _scale_factor = float(target_point - _start_point) / (_end_point - _start_point)

    interpolated_df = (1 - _scale_factor) * _start_ref_df + _scale_factor * _end_ref_df

    return interpolated_df

mocked_inro_context()

Mocking of modules which need to be mocked for tests.

Source code in tm2py/tools.py
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
def mocked_inro_context():
    """Mocking of modules which need to be mocked for tests."""
    import sys
    from unittest.mock import MagicMock

    sys.modules["inro.emme.database.emmebank"] = MagicMock()
    sys.modules["inro.emme.database.emmebank.path"] = MagicMock(return_value=".")
    sys.modules["inro.emme.network"] = MagicMock()
    sys.modules["inro.emme.database.scenario"] = MagicMock()
    sys.modules["inro.emme.database.matrix"] = MagicMock()
    sys.modules["inro.emme.network.node"] = MagicMock()
    sys.modules["inro.emme.desktop.app"] = MagicMock()
    sys.modules["inro"] = MagicMock()
    sys.modules["inro.modeller"] = MagicMock()
    sys.modules["tm2py.emme.manager.EmmeManager.project"] = MagicMock()
    sys.modules["tm2py.emme.manager.EmmeManager.emmebank"] = MagicMock()

run_process(commands, name='')

Run system level commands as blocking process and log output and error messages.

Parameters:

Name Type Description Default
commands Collection[str]

list of one or more commands to execute

required
name str

optional name to use for the temp bat file

''
Source code in tm2py/tools.py
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
def run_process(commands: Collection[str], name: str = ""):
    """Run system level commands as blocking process and log output and error messages.

    Args:
        commands: list of one or more commands to execute
        name: optional name to use for the temp bat file
    """
    # when merged with develop_logging branch can use get_logger
    # logger = Logger.get_logger
    logger = None
    with temp_file("w", prefix=name, suffix=".bat") as (bat_file, bat_file_path):
        bat_file.write("\n".join(commands))
        bat_file.close()
        if logger:
            # temporary file to capture output error messages generated by Java
            # Note: temp file created in the current working directory
            with temp_file(mode="w+", suffix="_error.log") as (err_file, _):
                try:
                    output = _subprocess.check_output(
                        bat_file_path, stderr=err_file, shell=True
                    )
                    logger.log(output.decode("utf-8"))
                except _subprocess.CalledProcessError as error:
                    logger.log(error.output)
                    raise
                finally:
                    err_file.seek(0)
                    error_msg = err_file.read()
                    if error_msg:
                        logger.log(error_msg)
        else:
            _subprocess.check_call(bat_file_path, shell=True)

temp_file(mode='w+', prefix='', suffix='')

Temp file wrapper to return open file handle and named path.

A named temporary file (using mkstemp) with specified prefix and suffix is created and opened with the specified mode. The file handle and path are returned. The file is closed and deleted on exit.

Parameters:

Name Type Description Default
mode str

mode to open file, [rw][+][b]

'w+'
prefix str

optional text to start temp file name

''
suffix str

optional text to end temp file name

''
Source code in tm2py/tools.py
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
@_context
def temp_file(mode: str = "w+", prefix: str = "", suffix: str = ""):
    """Temp file wrapper to return open file handle and named path.

    A named temporary file (using mkstemp) with specified prefix and
    suffix is created and opened with the specified mode. The file
    handle and path are returned. The file is closed and deleted on exit.

    Args:
        mode: mode to open file, [rw][+][b]
        prefix: optional text to start temp file name
        suffix: optional text to end temp file name
    """
    file_ref, file_path = tempfile.mkstemp(prefix=prefix, suffix=suffix)
    file = os.fdopen(file_ref, mode=mode)
    try:
        yield file, file_path
    finally:
        if not file.closed:
            file.close()
        os.remove(file_path)

zonal_csv_to_matrices(csv_file, i_column='ORIG', j_column='DEST', value_columns=['VALUE'], default_value=0.0, fill_zones=False, max_zone=None, delimiter=',')

Read a CSV file with zonal data and into dataframes.

Input CSV file should have a header row specifying the I, J, and Value column names.

Parameters:

Name Type Description Default
csv_file str

description

required
i_column str

Name of j zone column. Defaults to “ORIG”.

'ORIG'
j_column str

Name of i zone column. Defaults to “DEST”.

'DEST'
value_columns str

List of columns to turn into matrices. Defaults to [“VALUE”].

['VALUE']
default_value float

Value to fill empty cells with. Defaults to 0.0.

0.0
fill_zones bool

If true, will fill zones without values to max zone with default value. Defaults to False.

False
max_zone int

If fill_zones is True, used to determine matrix size. Defaults to max(I, J).

None
delimiter str

Input file delimeter. Defaults to “,”.

','

Returns:

Name Type Description
dict Mapping[str, pd.DataFrame]

Dictionary of Pandas dataframes with matrix names as keys.

Source code in tm2py/tools.py
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
def zonal_csv_to_matrices(
    csv_file: str,
    i_column: str = "ORIG",
    j_column: str = "DEST",
    value_columns: str = ["VALUE"],
    default_value: float = 0.0,
    fill_zones: bool = False,
    max_zone: int = None,
    delimiter: str = ",",
) -> Mapping[str, pd.DataFrame]:
    """Read a CSV file with zonal data and into dataframes.

    Input CSV file should have a header row specifying the I, J, and Value column names.

    Args:
        csv_file (str): _description_
        i_column (str, optional): Name of j zone column. Defaults to "ORIG".
        j_column (str, optional): Name of i zone column. Defaults to "DEST".
        value_columns (str, optional): List of columns to turn into matrices.
            Defaults to ["VALUE"].
        default_value (float, optional): Value to fill empty cells with. Defaults to 0.0.
        fill_zones (bool, optional): If true, will fill zones without values to max zone with
            default value. Defaults to False.
        max_zone (int, optional): If fill_zones is True, used to determine matrix size.
            Defaults to max(I, J).
        delimiter (str, optional): Input file delimeter. Defaults to ",".

    Returns:
        dict: Dictionary of Pandas dataframes with matrix names as keys.
    """
    # TODO Create a test
    _df = pd.read_csv(csv_file, delimiter=delimiter)
    _df_idx = _df.set_index([i_column, j_column])

    _dfs_dict = {v: _df_idx[v] for v in value_columns}
    if not fill_zones:
        return _dfs_dict

    if max_zone is None:
        max_zone = _df[[i_column, j_column]].max().max()

    _zone_list = list(range(1, max_zone + 1))
    for v, _df in _dfs_dict.items():
        _df[v].reindex(index=_zone_list, columns=_zone_list, fill_value=default_value)
    return _dfs_dict

tm2py.examples

Download and unzip examples for tm2py, used in tests.

get_example(example_name=_DEFAULT_EXAMPLE_NAME, example_subdir=_DEFAULT_EXAMPLE_SUBDIR, root_dir=_ROOT_DIR, retrieval_url=_DEFAULT_EXAMPLE_URL)

Returns example directory; downloads if necessary from retrieval URL.

Parameters:

Name Type Description Default
example_name str

Used to retrieve sub-folder or create it if doesn’t exist. Defaults to _DEFAULT_EXAMPLE_NAME.

_DEFAULT_EXAMPLE_NAME
example_subdir str

Where to find examples within root dir. Defaults to _DEFAULT_EXAMPLE_SUBDIR.

_DEFAULT_EXAMPLE_SUBDIR
root_dir str

Root dir of project. Defaults to _ROOT_DIR.

_ROOT_DIR
retrieval_url str

URL to retrieve example data zip from. Defaults to _DEFAULT_EXAMPLE_URL.

_DEFAULT_EXAMPLE_URL

Raises:

Type Description
FileNotFoundError

If can’t find the files after trying to download it.

Returns:

Name Type Description
str str

Path to example data.

Source code in tm2py/examples.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
def get_example(
    example_name: str = _DEFAULT_EXAMPLE_NAME,
    example_subdir: str = _DEFAULT_EXAMPLE_SUBDIR,
    root_dir: str = _ROOT_DIR,
    retrieval_url: str = _DEFAULT_EXAMPLE_URL,
) -> str:
    """Returns example directory; downloads if necessary from retrieval URL.

    Args:
        example_name (str, optional): Used to retrieve sub-folder or create it if doesn't exist.
            Defaults to _DEFAULT_EXAMPLE_NAME.
        example_subdir (str, optional): Where to find examples within root dir. Defaults
            to _DEFAULT_EXAMPLE_SUBDIR.
        root_dir (str, optional): Root dir of project. Defaults to _ROOT_DIR.
        retrieval_url (str, optional): URL to retrieve example data zip from. Defaults
            to _DEFAULT_EXAMPLE_URL.

    Raises:
        FileNotFoundError: If can't find the files after trying to download it.

    Returns:
        str: Path to example data.
    """
    _example_dir = os.path.join(root_dir, example_subdir)
    _this_example_dir = os.path.join(_example_dir, example_name)
    if os.path.isdir(_this_example_dir):
        return _this_example_dir

    download_unzip(retrieval_url, _example_dir, _this_example_dir)
    if not os.path.isdir(_this_example_dir):
        raise FileNotFoundError(f"example {_this_example_dir} not found")

    return _this_example_dir