Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
525 changes: 525 additions & 0 deletions cases_static_jsons/employee_types.json

Large diffs are not rendered by default.

18 changes: 18 additions & 0 deletions cases_static_jsons/general_settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"SHIFT_NAME_TO_INDEX": {
"Early": 0,
"Late": 1,
"Night": 2
},
"qualifications": {
"2963": [
"rounds"
],
"3868": [
"rounds"
],
"791": [
"rounds"
]
}
}
113 changes: 113 additions & 0 deletions cases_static_jsons/minimal_number_of_staff.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
{
"Azubi": {
"Di": {
"F": 1,
"N": 0,
"S": 1
},
"Do": {
"F": 1,
"N": 0,
"S": 1
},
"Fr": {
"F": 1,
"N": 0,
"S": 1
},
"Mi": {
"F": 1,
"N": 0,
"S": 1
},
"Mo": {
"F": 1,
"N": 0,
"S": 1
},
"Sa": {
"F": 1,
"N": 0,
"S": 1
},
"So": {
"F": 1,
"N": 0,
"S": 1
}
},
"Fachkraft": {
"Di": {
"F": 3,
"N": 2,
"S": 2
},
"Do": {
"F": 3,
"N": 2,
"S": 2
},
"Fr": {
"F": 3,
"N": 2,
"S": 2
},
"Mi": {
"F": 4,
"N": 2,
"S": 2
},
"Mo": {
"F": 3,
"N": 2,
"S": 2
},
"Sa": {
"F": 2,
"N": 1,
"S": 2
},
"So": {
"F": 2,
"N": 1,
"S": 2
}
},
"Hilfskraft": {
"Di": {
"F": 2,
"N": 0,
"S": 2
},
"Do": {
"F": 2,
"N": 0,
"S": 2
},
"Fr": {
"F": 2,
"N": 0,
"S": 2
},
"Mi": {
"F": 2,
"N": 0,
"S": 2
},
"Mo": {
"F": 2,
"N": 0,
"S": 2
},
"Sa": {
"F": 2,
"N": 1,
"S": 2
},
"So": {
"F": 2,
"N": 1,
"S": 2
}
}
}
3 changes: 3 additions & 0 deletions cases_static_jsons/wishes_and_blocked.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"employees": []
}
55 changes: 50 additions & 5 deletions src/db/export_data.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
import logging
import os
import shutil
from datetime import date
from typing import Any

Expand All @@ -26,6 +27,50 @@ def get_correct_path(filename: str, planning_unit: int):
return output_path


def setup_case_folder(planning_unit: int):
"""Prepare the case folder by deleting the web folder and copying static JSON files.

Args:
planning_unit: ID of the planning unit to set up the folder for.
"""
base_folder = os.getenv("BASE_OUTPUT_FOLDER")
if base_folder is None:
raise ValueError("BASE_OUTPUT_FOLDER is not set in the environment variables.")

# Get the target directory for this planning unit
target_dir = os.path.join("./", base_folder, str(planning_unit))
target_dir = os.path.abspath(target_dir)

# Create target directory if it doesn't exist
os.makedirs(target_dir, exist_ok=True)
web_folder_path = os.path.join(target_dir, "web")
# Delete all contents of the web folder if it exists, but keep the jobs.json file
if os.path.exists(web_folder_path):
for filename in os.listdir(web_folder_path):
file_path = os.path.join(web_folder_path, filename)
try:
logging.info("Deleting file or folder: " + file_path)
if os.path.isfile(file_path) or os.path.islink(file_path):
if filename != "jobs.json":
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
logging.error(f"Failed to delete {file_path}. Reason: {e}")

# Copy all static JSON files from cases_static_jsons to the target directory
static_jsons_dir = os.path.abspath("./cases_static_jsons")
if os.path.exists(static_jsons_dir):
for filename in os.listdir(static_jsons_dir):
if filename.endswith(".json"):
src_file = os.path.join(static_jsons_dir, filename)
dst_file = os.path.join(target_dir, filename)
shutil.copy2(src_file, dst_file)
logging.info(f"copied static JSON file: {filename}")
else:
logging.warning(f"cases_static_jsons directory not found: {static_jsons_dir}")


def export_planning_data(engine: Engine, planning_unit: int, from_date: date, till_date: date) -> dict[str, Any]:
"""Export relevant basic plan data for retrieving all information for the algorithm.

Expand Down Expand Up @@ -138,7 +183,7 @@ def export_shift_data_to_json(engine: Engine, planning_unit: int, filename: str
agg["end_time"] = agg["end_time"].dt.strftime("%Y-%m-%dT%H:%M:%S")

# Store JSON-file within given directory
json_output = json.dumps(agg.to_dict(orient="records"), ensure_ascii=False, indent=2)
json_output = json.dumps(agg.to_dict(orient="records"), ensure_ascii=True, indent=2)
store_path = get_correct_path(filename, planning_unit)
with open(store_path, "w", encoding="utf-8") as f:
f.write(json_output)
Expand Down Expand Up @@ -184,7 +229,7 @@ def export_personal_data_to_json(engine: Engine, planning_unit: int, plan_id: in
output_json = {"employees": employees_list}

# Store JSON-file within given directory
json_output = json.dumps(output_json, ensure_ascii=False, indent=2)
json_output = json.dumps(output_json, ensure_ascii=True, indent=2)
store_path = get_correct_path(filename, planning_unit)
with open(store_path, "w", encoding="utf-8") as f:
f.write(json_output)
Expand Down Expand Up @@ -257,7 +302,7 @@ def export_target_working_minutes_to_json(
output_json = {"employees": target_working_minutes_list}

# Store JSON-file within given directory
json_output = json.dumps(output_json, ensure_ascii=False, indent=2)
json_output = json.dumps(output_json, ensure_ascii=True, indent=2)
store_path = get_correct_path(filename, planning_unit)
with open(store_path, "w", encoding="utf-8") as f:
f.write(json_output)
Expand Down Expand Up @@ -305,7 +350,7 @@ def export_worked_sundays_to_json(
output_json = {"worked_sundays": worked_sundays}

# Store JSON-file within given directory
json_output = json.dumps(output_json, ensure_ascii=False, indent=2)
json_output = json.dumps(output_json, ensure_ascii=True, indent=2)
store_path = get_correct_path(filename, planning_unit)
with open(store_path, "w", encoding="utf-8") as f:
f.write(json_output)
Expand Down Expand Up @@ -490,7 +535,7 @@ def export_free_shift_and_vacation_days_json(
output_json = {"employees": free_shifts_and_vacation_days}

# Store JSON-file within given directory
json_output = json.dumps(output_json, ensure_ascii=False, indent=2, default=str)
json_output = json.dumps(output_json, ensure_ascii=True, indent=2, default=str)
store_path = get_correct_path(filename, planning_unit)
with open(store_path, "w", encoding="utf-8") as f:
f.write(json_output)
Expand Down
4 changes: 4 additions & 0 deletions src/db/export_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
def main(planning_unit: int = 77, from_date: date = date(2024, 11, 1), till_date: date = date(2024, 11, 30)):
"""Sets up a basic connection to the TimeOffice database and exports all needed data for the algorithm."""
engine = get_db_engine()

# Setup case folder: delete web folder and copy static JSON files
export_data.setup_case_folder(planning_unit)

base_data = export_data.export_planning_data(engine, planning_unit, from_date, till_date)
export_data.export_shift_data_to_json(engine, planning_unit)

Expand Down
Loading