Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions activitysim/core/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,15 +51,15 @@

@contextlib.contextmanager
def logtime(tag, tag2=""):
logger.info(f"begin {tag} {tag2}")
logger.debug(f"begin {tag} {tag2}")
t0 = time.time()
try:
yield
except Exception:
logger.error(f"error in {tag} after {timedelta(seconds=time.time()-t0)} {tag2}")
raise
else:
logger.info(f"completed {tag} in {timedelta(seconds=time.time()-t0)} {tag2}")
logger.debug(f"completed {tag} in {timedelta(seconds=time.time()-t0)} {tag2}")


class TimeLogger:
Expand Down Expand Up @@ -271,11 +271,11 @@ def skims_mapping(
primary_origin_col_name=None,
predigitized_time_periods=False,
):
logger.info("loading skims_mapping")
logger.info(f"- orig_col_name: {orig_col_name}")
logger.info(f"- dest_col_name: {dest_col_name}")
logger.info(f"- stop_col_name: {stop_col_name}")
logger.info(f"- primary_origin_col_name: {primary_origin_col_name}")
logger.debug("loading skims_mapping")
logger.debug(f"- orig_col_name: {orig_col_name}")
logger.debug(f"- dest_col_name: {dest_col_name}")
logger.debug(f"- stop_col_name: {stop_col_name}")
logger.debug(f"- primary_origin_col_name: {primary_origin_col_name}")
skim_dataset = state.get_injectable("skim_dataset")
if zone_layer == "maz" or zone_layer is None:
odim = "omaz" if "omaz" in skim_dataset.dims else "otaz"
Expand All @@ -297,7 +297,7 @@ def skims_mapping(
else:
raise ValueError(f"unknown zone layer {zone_layer!r}")
if zone_layer:
logger.info(f"- zone_layer: {zone_layer}")
logger.debug(f"- zone_layer: {zone_layer}")
if (
orig_col_name is not None
and dest_col_name is not None
Expand Down Expand Up @@ -574,7 +574,7 @@ def _apply_filter(_dataset, renames: list):
if choosers is None:
logger.info(f"empty flow on {trace_label}")
else:
logger.info(f"{len(choosers)} chooser rows on {trace_label}")
logger.debug(f"{len(choosers)} chooser rows on {trace_label}")
flow_tree = sh.DataTree(df=[] if choosers is None else choosers)
idx_name = choosers.index.name or "index"
rename_dataset_cols = [
Expand All @@ -598,7 +598,7 @@ def _apply_filter(_dataset, renames: list):
)
flow_tree.root_dataset = flow_tree.root_dataset # apply the filter
else:
logger.info(
logger.debug(
f"{len(choosers)} chooser rows and {len(interacts)} interact rows on {trace_label}"
)
top = sh.dataset.from_named_objects(
Expand Down Expand Up @@ -697,7 +697,7 @@ def _apply_filter(_dataset, renames: list):
for i, v in extra_vars.items():
readme += f"\n - {i}: {v}"

logger.info(f"setting up sharrow flow {trace_label}")
logger.debug(f"setting up sharrow flow {trace_label}")
extra_hash_data = ()
if zone_layer:
extra_hash_data += (zone_layer,)
Expand Down
4 changes: 2 additions & 2 deletions activitysim/core/skim_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,15 +253,15 @@ def set_df(self, df):
and np.issubdtype(df[self.time_key].dtype, np.integer)
and df[self.time_key].max() < self.dataset.dims["time_period"]
):
logger.info(f"natural use for time_period={self.time_key}")
logger.debug(f"natural use for time_period={self.time_key}")
positions["time_period"] = df[self.time_key]
elif (
df[self.time_key].dtype == "category"
and df[self.time_key].dtype == self.time_label_dtype
):
positions["time_period"] = df[self.time_key].cat.codes
else:
logger.info(f"vectorize lookup for time_period={self.time_key}")
logger.debug(f"vectorize lookup for time_period={self.time_key}")
positions["time_period"] = pd.Series(
np.vectorize(self.time_map.get, "I")(df[self.time_key], 0),
index=df.index,
Expand Down