Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 29 additions & 29 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,79 +7,79 @@

def process_and_visualize_video(video_path, output_root, metadata_csv, annotation_file=None):
"""
Verarbeitet ein einzelnes Video, speichert Metadaten und Ergebnisse (PDF, CSV) in einem benannten Ordner.
Processes a single video, saves metadata and results (PDF, CSV) in a named folder.

Parameters:
video_path (str): Pfad zum Video.
output_root (str): Root-Verzeichnis, in dem der Videoordner erstellt wird.
metadata_csv (str): Pfad zur zentralen Metadatendatei.
annotation_file (str): Pfad zur Excel-Datei mit manuellen Annotationen (optional).
video_path (str): Path to the video.
output_root (str): Root directory where the video folder will be created.
metadata_csv (str): Path to the central metadata file.
annotation_file (str): Path to the Excel file with manual annotations (optional).
"""
video_name = os.path.splitext(os.path.basename(video_path))[0]
video_folder = os.path.join(output_root, video_name)
os.makedirs(video_folder, exist_ok=True)

print(f"Verarbeite Video: {video_path}")
print(f"Processing video: {video_path}")

# Speichern der Metadaten
# Save metadata
# save_metadata(video_path, metadata_csv, annotation_file if annotation_file else None)

# Verarbeitung und Visualisierung
# Processing and visualization
result = process_video(video_path, num_segments=4, display_video=False)
if not result:
print(f"Fehler beim Verarbeiten von {video_path}")
print(f"Error processing {video_path}")
return

# Ergebnisse entpacken und saven
# Unpack and save results
metadata, joints_data, smoothed_data, peaks_data, step_counts_joint = result

save_metadata(metadata, video_path, metadata_csv, annotation_file if annotation_file else None)

# PDF erstellen und speichern
# Create and save PDF
pdf_path = os.path.join(video_folder, f"{video_name}.pdf")
visualize_data(joints_data, smoothed_data, peaks_data, output_path=pdf_path)

# CSV-Dateien speichern
# Save CSV files
save_step_data_to_csv(video_folder, joints_data, smoothed_data, peaks_data, step_counts_joint)

print(f"Verarbeitung abgeschlossen. Ergebnisse im Ordner: {video_folder}")
print(f"Processing completed. Results in folder: {video_folder}")


def process_all_videos_in_directory(root_dir, output_root, annotation_file=None):
"""
Durchsucht ein Verzeichnis nach Videos und verarbeitet alle, die noch nicht verarbeitet wurden.
Speichert Metadaten und Ergebnisse.
Searches a directory for videos and processes all that have not yet been processed.
Saves metadata and results.

Parameters:
root_dir (str): Verzeichnis mit Videos.
output_root (str): Zielverzeichnis für alle Ergebnisse.
annotation_file (str): Pfad zur Excel-Datei mit manuellen Annotationen (optional).
root_dir (str): Directory with videos.
output_root (str): Target directory for all results.
annotation_file (str): Path to the Excel file with manual annotations (optional).
"""
metadata_csv = os.path.join(output_root, "metadata.csv")
for video_path in video_file_generator(root_dir, metadata_csv):
process_and_visualize_video(video_path, output_root, metadata_csv, annotation_file)
print("Alle Videos verarbeitet.")
print("All videos processed.")


def main():
"""
Hauptfunktion für die Videoverarbeitung.
Main function for video processing.
"""
parser = argparse.ArgumentParser(description="Video Processing Script")
parser.add_argument(
"--action",
type=str,
choices=["save_metadata", "process_data"],
required=True,
help="Aktion: 'save_metadata' oder 'process_data'.",
help="Action: 'save_metadata' or 'process_data'.",
)
parser.add_argument(
"--root_dir", type=str, help="Verzeichnis mit Videos (benötigt für 'save_metadata' und 'process_data')."
"--root_dir", type=str, help="Directory with videos (required for 'save_metadata' and 'process_data')."
)
parser.add_argument("--video_path", type=str, help="Pfad zu einem bestimmten Video (nur für 'process_data').")
parser.add_argument("--output_root", type=str, default="output", help="Root-Verzeichnis für die Ausgabedateien.")
parser.add_argument("--video_path", type=str, help="Path to a specific video (only for 'process_data').")
parser.add_argument("--output_root", type=str, default="output", help="Root directory for output files.")
parser.add_argument(
"--annotation_file", type=str, required=False, help="Pfad zur Excel-Datei mit manuellen Annotationen."
"--annotation_file", type=str, required=False, help="Path to the Excel file with manual annotations."
)

args = parser.parse_args()
Expand All @@ -92,10 +92,10 @@ def main():
elif args.root_dir:
process_all_videos_in_directory(args.root_dir, args.output_root, args.annotation_file)
else:
print("Fehler: Bitte entweder '--video_path' oder '--root_dir' angeben.")
print("Error: Please specify either '--video_path' or '--root_dir'.")
elif args.action == "save_metadata":
if not args.root_dir:
print("Fehler: '--root_dir' ist erforderlich für 'save_metadata'.")
print("Error: '--root_dir' is required for 'save_metadata'.")
return

metadata_csv = os.path.join(args.output_root, "metadata.csv")
Expand All @@ -104,8 +104,8 @@ def main():
save_metadata(video_path, metadata_csv, annotation_file)
print_summary()
else:
print("Ungültige Aktion. Bitte 'save_metadata' oder 'process_data' wählen.")
print("Invalid action. Please choose 'save_metadata' or 'process_data'.")


if __name__ == "__main__":
main()
main()
46 changes: 23 additions & 23 deletions src/metadata_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,69 +6,69 @@

def save_metadata(metadata, video_path, metadata_csv, annotation_file=None):
"""
Speichert Metadaten eines Videos in einer CSV-Datei und prüft optional die Schrittzählung mit Annotationen.
Saves metadata of a video to a CSV file and optionally checks the step count with annotations.

Parameters:
video_path (str): Pfad zur Videodatei.
metadata_csv (str): Pfad zur zentralen Metadatendatei.
annotation_file (str): Pfad zur Excel-Datei mit manuellen Schritt-Annotationen (optional).
video_path (str): Path to the video file.
metadata_csv (str): Path to the central metadata file.
annotation_file (str): Path to the Excel file with manual step annotations (optional).
"""
# Initialize or load summary from a global variable
if not hasattr(save_metadata, "summary"):
save_metadata.summary = {"matches": 0, "non_matches": [], "no_annotations": []}

summary = save_metadata.summary

# Prüfe, ob alle erforderlichen Metadaten vorhanden sind
# Check if all required metadata is present
required_keys = ["resolution", "fps", "duration_seconds", "creation_time", "num_steps"]
if not all(key in metadata for key in required_keys):
print(f"Unvollständige Metadaten für '{video_path}'. Überspringe.")
print(f"Incomplete metadata for '{video_path}'. Skipping.")
return

# Extrahiere die berechneten Schritte
# Extract the calculated steps
calculated_steps = metadata["num_steps"]

# Prüfe, ob Annotationen existieren, falls ein Annotation-File angegeben ist
# Check if annotations exist, if an annotation file is provided
if annotation_file:
if not os.path.exists(annotation_file):
print(f"Annotationsdatei '{annotation_file}' existiert nicht. Bitte erstellen Sie diese.")
print(f"Annotation file '{annotation_file}' does not exist. Please create it.")
return

# Lade die Annotationen
# Load the annotations
annotations = pd.read_excel(annotation_file)

# Basisdateiname des Videos extrahieren
# Extract the base name of the video
video_filename = os.path.basename(video_path)

# Prüfe, ob Annotationen für das Video existieren
# Check if annotations exist for the video
if video_filename in annotations["filename"].values:
# Hole die manuelle Schrittzählung
# Get the manual step count
manual_steps = annotations.loc[annotations["filename"] == video_filename, "manual_steps"].iloc[0]

# Vergleiche berechnete Schritte mit manuellen Schritten
# Compare calculated steps with manual steps
if calculated_steps == manual_steps:
print(
f"Schrittzählungen stimmen überein für '{video_filename}' (Manuell: {manual_steps}, Berechnet: {calculated_steps})."
f"Step counts match for '{video_filename}' (Manual: {manual_steps}, Calculated: {calculated_steps})."
)
summary["matches"] += 1
else:
print(
f"Schrittzählungen stimmen NICHT überein für '{video_filename}' (Manuell: {manual_steps}, Berechnet: {calculated_steps}). Überspringe."
f"Step counts do NOT match for '{video_filename}' (Manual: {manual_steps}, Calculated: {calculated_steps}). Skipping."
)
summary["non_matches"].append(
{"filename": video_filename, "manual_steps": manual_steps, "calculated_steps": calculated_steps}
)
return # Nicht speichern, wenn Schritte nicht übereinstimmen
return # Do not save if steps do not match
else:
print(f"Keine Annotation gefunden für '{video_filename}'. Verarbeite mit berechneten Schritten.")
print(f"No annotation found for '{video_filename}'. Processing with calculated steps.")

# CSV-Datei erstellen, falls nicht vorhanden
# Create CSV file if it does not exist
if not os.path.exists(metadata_csv):
with open(metadata_csv, "w", newline="") as file:
writer = csv.writer(file)
writer.writerow(["filename", "resolution", "fps", "duration_seconds", "creation_time", "num_steps"])

# Schreibe Metadaten in CSV
# Write metadata to CSV
if not is_video_in_csv(metadata_csv, os.path.basename(video_path)):
with open(metadata_csv, "a", newline="") as file:
writer = csv.writer(file)
Expand All @@ -82,9 +82,9 @@ def save_metadata(metadata, video_path, metadata_csv, annotation_file=None):
metadata["num_steps"],
]
)
print(f"Metadaten für '{video_path}' gespeichert in '{metadata_csv}'.")
print(f"Metadata for '{video_path}' saved to '{metadata_csv}'.")
else:
print(f"Metadaten für '{video_path}' sind bereits in '{metadata_csv}'. Überspringe.")
print(f"Metadata for '{video_path}' is already in '{metadata_csv}'. Skipping.")


def is_video_in_csv(csv_file, video_filename):
Expand Down Expand Up @@ -126,4 +126,4 @@ def print_summary():
for item in summary["non_matches"]:
print(
f" - {item['filename']}: Manual Steps = {item['manual_steps']}, Calculated Steps = {item['calculated_steps']}"
)
)
12 changes: 6 additions & 6 deletions src/peak_picker.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ def load_ground_truth(file_path):
# Compare detected steps to ground truth
def compare_with_ground_truth(detected_steps, ground_truth_steps, foot="both"):
"""Compares detected peaks with ground truth steps."""
print(f"👣 {foot.capitalize()} Foot - Detected Steps: {detected_steps}")
print(f"🎯 Ground Truth Steps: {ground_truth_steps}")
print(f"{foot.capitalize()} Foot - Detected Steps: {detected_steps}")
print(f"Ground Truth Steps: {ground_truth_steps}")

accuracy = (detected_steps / ground_truth_steps) * 100
print(f"📊 Accuracy: {accuracy:.2f}%")
print(f"Accuracy: {accuracy:.2f}%")

return accuracy

Expand All @@ -52,7 +52,7 @@ def process_folders(root_dir):
if not os.path.isdir(folder_path): # Skip non-directories
continue

print(f"\n📂 Processing folder: {folder_name}")
print(f"\nProcessing folder: {folder_name}")

# Paths to accelerometer data (left & right) and ground truth
left_accel_file = os.path.join(folder_path, f"{folder_name}_left_acceleration_data.csv")
Expand All @@ -61,10 +61,10 @@ def process_folders(root_dir):

# Check if files exist
if not os.path.exists(left_accel_file) or not os.path.exists(right_accel_file):
print(f"Missing accelerometer files in {folder_name}. Skipping.")
print(f"Missing accelerometer files in {folder_name}. Skipping.")
continue
if not os.path.exists(ground_truth_file):
print(f"Missing ground truth file in {folder_name}. Skipping.")
print(f"Missing ground truth file in {folder_name}. Skipping.")
continue

# Load and process left & right foot accelerometer data
Expand Down
40 changes: 20 additions & 20 deletions src/video_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,15 +97,15 @@ def process_video(video_path, num_segments, display_video=False):
first_clap_time_sec = claps[0][0]
second_clap_time_sec = claps[1][0]

# Convert these times to the videos frames
# Convert these times to the video's frames
first_clap_frame = int(first_clap_time_sec * fps)
second_clap_frame = int(second_clap_time_sec * fps)

# Slice data in place
for joint in joints_data:
joints_data[joint] = joints_data[joint][first_clap_frame : second_clap_frame + 1]

# update duration?
# Update duration
new_frame_count = len(joints_data["right_ankle"]) # or any joint
duration = round(new_frame_count / fps, 2)

Expand Down Expand Up @@ -213,19 +213,19 @@ def visualize_data(joints_data, smoothed_data, peaks_data, output_path=None):

def save_step_data_to_csv(output_folder, joints_data, smoothed_data, peaks_data, step_counts_joint):
"""
Speichert Rohdaten, geglättete Daten und Schrittzählungen in separaten CSV-Dateien.
Berechnet die Gesamtanzahl der Schritte nur basierend auf 'left_foot_index' und 'right_foot_index'.
Saves raw data, smoothed data, and step counts in separate CSV files.
Calculates the total number of steps based only on 'left_foot_index' and 'right_foot_index'.

Parameters:
output_folder (str): Zielordner für die CSV-Dateien.
joints_data (dict): Rohdaten der Gelenkbewegungen.
smoothed_data (dict): Geglättete Daten der Gelenkbewegungen.
peaks_data (dict): Detektierte Peaks (Schritte) pro Gelenk.
step_counts_joint (dict): Zählung der Schritte für jedes Gelenk.
output_folder (str): Target folder for the CSV files.
joints_data (dict): Raw joint movement data.
smoothed_data (dict): Smoothed joint movement data.
peaks_data (dict): Detected peaks (steps) per joint.
step_counts_joint (dict): Step counts for each joint.
"""
os.makedirs(output_folder, exist_ok=True)

# 1. Rohdaten speichern
# 1. Save raw data
raw_data_csv = os.path.join(output_folder, "raw_data.csv")
with open(raw_data_csv, mode="w", newline="") as file:
writer = csv.writer(file)
Expand All @@ -236,9 +236,9 @@ def save_step_data_to_csv(output_folder, joints_data, smoothed_data, peaks_data,
joints_data[joint][frame] if frame < len(joints_data[joint]) else None for joint in joints_data
]
writer.writerow(row)
print(f"Rohdaten gespeichert: {raw_data_csv}")
print(f"Raw data saved: {raw_data_csv}")

# 2. Geglättete Daten speichern
# 2. Save smoothed data
smoothed_data_csv = os.path.join(output_folder, "smoothed_data.csv")
with open(smoothed_data_csv, mode="w", newline="") as file:
writer = csv.writer(file)
Expand All @@ -249,28 +249,28 @@ def save_step_data_to_csv(output_folder, joints_data, smoothed_data, peaks_data,
smoothed_data[joint][frame] if frame < len(smoothed_data[joint]) else None for joint in smoothed_data
]
writer.writerow(row)
print(f"Geglättete Daten gespeichert: {smoothed_data_csv}")
print(f"Smoothed data saved: {smoothed_data_csv}")

# 3. Schrittzählung speichern
# 3. Save step counts
steps_csv = os.path.join(output_folder, "step_counts.csv")
with open(steps_csv, mode="w", newline="") as file:
writer = csv.writer(file)
writer.writerow(["Joint", "Detected Steps", "Peaks"]) # Header

# Schreibe die Schrittzählungen für alle Gelenke außer den Total Steps
# Write the step counts for all joints except the total steps
for joint, steps in step_counts_joint.items():
if joint not in ["left_foot_index", "right_foot_index"]: # Ignoriere diese Gelenke vorerst
if joint not in ["left_foot_index", "right_foot_index"]: # Ignore these joints for now
peaks = peaks_data[joint]
writer.writerow([joint, steps, list(peaks)]) # Fügt die Peaks mit hinzu
writer.writerow([joint, steps, list(peaks)]) # Add the peaks

# Berechnung der Total Steps basierend auf 'left_foot_index' und 'right_foot_index'
# Calculate the total steps based on 'left_foot_index' and 'right_foot_index'
left_steps = len(peaks_data.get("left_foot_index", []))
right_steps = len(peaks_data.get("right_foot_index", []))
total_steps = left_steps + right_steps

# Schreibe die Einträge für 'left_foot_index', 'right_foot_index' und die Total Steps
# Write the entries for 'left_foot_index', 'right_foot_index', and the total steps
writer.writerow(["left_foot_index", left_steps, list(peaks_data.get("left_foot_index", []))])
writer.writerow(["right_foot_index", right_steps, list(peaks_data.get("right_foot_index", []))])
writer.writerow(["Total", total_steps, ""])

print(f"Schrittzählungen gespeichert: {steps_csv}")
print(f"Step counts saved: {steps_csv}")