Skip to content

Commit 87ec0cf

Browse files
committed
more changes
1 parent 58c8e01 commit 87ec0cf

File tree

3 files changed

+29
-13
lines changed

3 files changed

+29
-13
lines changed

src/sed/loader/cfel/buffer_handler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,8 @@ def _save_buffer_file(self, file_set, is_first_file=True, base_timestamp=None, i
274274
# Save electron resolved dataframe
275275
electron_channels = get_channels(self._config, "per_electron")
276276
dtypes = get_dtypes(self._config, df.columns.values)
277-
electron_df = df.dropna(subset=electron_channels).astype(dtypes).reset_index()
277+
# This maintains cumulative event counts across multiple files
278+
electron_df = df.dropna(subset=electron_channels).astype(dtypes)
278279
logger.debug(f"Saving electron buffer with shape: {electron_df.shape}")
279280
electron_df.to_parquet(paths["electron"])
280281

src/sed/loader/cfel/dataframe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def df_electron(self) -> pd.DataFrame:
136136
@property
137137
def df_train(self) -> pd.DataFrame:
138138
"""
139-
Returns a pandas DataFrame for given channel names of type [per train].
139+
Returns a pandas DataFrame for given channel names of type [per_train].
140140
141141
Returns:
142142
pd.DataFrame: The pandas DataFrame for the 'per_train' channel's data.

src/sed/loader/cfel/loader.py

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -568,7 +568,9 @@ def get_count_rate(
568568
- time : array of global times in seconds since scan start
569569
"""
570570
mode = kwds.pop("mode", "point")
571-
return self.get_count_rate_ms(fids=fids, mode=mode, runs=runs, **kwds)
571+
# Resolve runs to fids before calling get_count_rate_ms
572+
fids_resolved = self._resolve_fids(fids=fids, runs=runs)
573+
return self.get_count_rate_ms(fids=fids_resolved, mode=mode, **kwds)
572574

573575
# -------------------------------
574576
# Time-resolved count rate (binned)
@@ -685,18 +687,33 @@ def get_elapsed_time(
685687
)
686688

687689
elapsed_per_file: list[float] = []
690+
prev_max_ts_s = None # Track previous file's max timestamp in seconds
688691

689-
for fid in fids_resolved:
692+
for i, fid in enumerate(fids_resolved):
690693
try:
691694
ts_info = file_statistics[str(fid)]["columns"][ts_alias]
692-
print(f"ts_info: {ts_info}")
693-
dt = ts_info["max"] - ts_info["min"]
694-
695-
# normalize to seconds
696-
if hasattr(dt, "total_seconds"):
697-
dt_s = dt.total_seconds()
695+
696+
max_ts = ts_info["max"]
697+
min_ts = ts_info["min"]
698+
699+
# Normalize to float seconds
700+
if hasattr(max_ts, "total_seconds"):
701+
max_ts_s = max_ts.total_seconds()
702+
else:
703+
max_ts_s = float(max_ts)
704+
705+
if hasattr(min_ts, "total_seconds"):
706+
min_ts_s = min_ts.total_seconds()
707+
else:
708+
min_ts_s = float(min_ts)
709+
710+
# Calculate elapsed time correctly for multi-file runs
711+
if i == 0:
712+
dt_s = max_ts_s - min_ts_s
698713
else:
699-
dt_s = float(dt)
714+
dt_s = max_ts_s - prev_max_ts_s
715+
716+
prev_max_ts_s = max_ts_s
700717

701718
if dt_s < 0:
702719
raise ValueError(
@@ -717,10 +734,8 @@ def get_elapsed_time(
717734
elapsed_per_file.append(dt_s)
718735

719736
if aggregate:
720-
print("aggregate is True")
721737
return sum(elapsed_per_file)
722738

723-
print(f"Elapsed time: {elapsed_per_file}")
724739
return elapsed_per_file
725740

726741
def read_dataframe(

0 commit comments

Comments
 (0)