diff --git a/.github/workflows/apptainer-sipnet-carb.yml b/.github/workflows/apptainer-sipnet-carb.yml new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/.github/workflows/apptainer-sipnet-carb.yml @@ -0,0 +1 @@ + diff --git a/.github/workflows/run-workflow-examples.yml b/.github/workflows/run-workflow-examples.yml new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/.github/workflows/run-workflow-examples.yml @@ -0,0 +1 @@ + diff --git a/.gitignore b/.gitignore index 0558174..540bcd8 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ # data files not marked test **/IC_files/** **/data/** +**/data_raw/** **/pfts/** # R-specific files @@ -43,3 +44,4 @@ Thumbs.db # Temporary files *.tmp *.log +**/local_dev_notes/** diff --git a/2a_grass/00_fetch_s3_and_prepare_run_dir.sh b/2a_grass/00_fetch_s3_and_prepare_run_dir.sh new file mode 100755 index 0000000..13afd3a --- /dev/null +++ b/2a_grass/00_fetch_s3_and_prepare_run_dir.sh @@ -0,0 +1,213 @@ +#!/usr/bin/env bash +# 00_fetch_s3_and_prepare_run_dir.sh: fetch demo data from S3 and prepare run directory. +# Invoked by the 'get-demo-data' command (for users who do not have local data). +# S3 URLs and path keys come from the workflow manifest; run dir and paths are passed as arguments. +# +# Requires: yq (mikefarah/yq), aws CLI +# +# Options (see --help): --repo-root (required); --manifest optional, defaults to /2a_grass/workflow_manifest.yaml + +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: 00_fetch_s3_and_prepare_run_dir.sh [OPTIONS] + +Fetch demo data from S3 and prepare the run directory. S3 URLs and path keys are +read from the workflow manifest. Run directory is either from --run-dir or from +run_dir in the file given by --config (relative paths resolved with --invocation-cwd). + +Required: + --repo-root PATH Repo root (workflows directory). Script changes to this directory. + +Run directory (one of): + --run-dir PATH Run directory (absolute, or relative to --repo-root). + --config PATH User YAML config file; script reads run_dir from it (use with --invocation-cwd). + +Optional: + --manifest PATH Path to workflow_manifest.yaml (default: /2a_grass/workflow_manifest.yaml). + --invocation-cwd PATH Required when using --config with a relative run_dir. Paths reported relative to this. + --command NAME Command name for manifest step lookup (default: get-demo-data). + --step-index N Step index in that command (default: 0). + -h, --help Print this help and exit. +EOF +} + +RUN_DIR="" +CONFIG_FILE="" +REPO_ROOT="" +MANIFEST="" +COMMAND="get-demo-data" +STEP_INDEX="0" +INVOCATION_CWD="" + +while [[ $# -gt 0 ]]; do + case "$1" in + --run-dir) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --run-dir requires PATH." >&2; usage >&2; exit 1; }; RUN_DIR="$2"; shift 2 ;; + --config) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --config requires PATH." >&2; usage >&2; exit 1; }; CONFIG_FILE="$2"; shift 2 ;; + --repo-root) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --repo-root requires PATH." >&2; usage >&2; exit 1; }; REPO_ROOT="$2"; shift 2 ;; + --manifest) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --manifest requires PATH." >&2; usage >&2; exit 1; }; MANIFEST="$2"; shift 2 ;; + --command) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --command requires NAME." >&2; usage >&2; exit 1; }; COMMAND="$2"; shift 2 ;; + --step-index) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --step-index requires N." >&2; usage >&2; exit 1; }; STEP_INDEX="$2"; shift 2 ;; + --invocation-cwd) [[ $# -lt 2 ]] && { echo "00_fetch_s3_and_prepare_run_dir: --invocation-cwd requires PATH." >&2; usage >&2; exit 1; }; INVOCATION_CWD="$2"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "00_fetch_s3_and_prepare_run_dir: Unknown option: $1" >&2; usage >&2; exit 1 ;; + esac +done + +if [[ -z "$REPO_ROOT" ]]; then echo "00_fetch_s3_and_prepare_run_dir: --repo-root is required." >&2; usage >&2; exit 1; fi +if [[ -z "$MANIFEST" ]]; then + MANIFEST="${REPO_ROOT}/2a_grass/workflow_manifest.yaml" +fi + +# Run directory: from --run-dir or from config file +if [[ -n "$CONFIG_FILE" ]]; then + if [[ ! -f "$CONFIG_FILE" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: Config file not found: $CONFIG_FILE" >&2 + exit 1 + fi + RUN_DIR=$(yq eval '.run_dir' "$CONFIG_FILE") || { echo "00_fetch_s3_and_prepare_run_dir: yq failed to read .run_dir from config: $CONFIG_FILE" >&2; exit 1; } + if [[ -z "$RUN_DIR" || "$RUN_DIR" == "null" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: run_dir not found or empty in config (expected .run_dir): $CONFIG_FILE" >&2 + exit 1 + fi + if [[ "$RUN_DIR" != /* ]]; then + if [[ -z "$INVOCATION_CWD" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: --invocation-cwd is required when run_dir in config is relative." >&2 + exit 1 + fi + RUN_DIR="${INVOCATION_CWD}/${RUN_DIR}" + fi +elif [[ -z "$RUN_DIR" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: Provide --run-dir or --config (with run_dir in the config file)." >&2 + usage >&2 + exit 1 +fi + +# Show path for user: relative to INVOCATION_CWD if under it, else absolute +report_path() { + local abs_path="$1" + if [[ -n "$INVOCATION_CWD" && "$abs_path" == "$INVOCATION_CWD"/* ]]; then + echo "${abs_path#"$INVOCATION_CWD"/}" + else + echo "$abs_path" + fi +} + +if [[ ! -f "$MANIFEST" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: Manifest not found: $MANIFEST" >&2 + exit 1 +fi + +if ! command -v yq &>/dev/null; then + echo "00_fetch_s3_and_prepare_run_dir: yq is required to read the manifest." >&2 + exit 1 +fi + +cd "$REPO_ROOT" + +# Resolve a path relative to run_dir (RUN_DIR may be absolute or relative to REPO_ROOT). +resolve_run_path() { + if [[ "$RUN_DIR" == /* ]]; then + echo "${RUN_DIR}/${1}" + else + echo "${REPO_ROOT}/${RUN_DIR}/${1}" + fi +} + +# --- Read from manifest (endpoint, bucket, and per-resource key_prefix + filename) --- +s3_endpoint=$(yq eval '.s3.endpoint_url' "$MANIFEST") +s3_bucket=$(yq eval '.s3.bucket' "$MANIFEST") + +# Build S3 key from key_prefix + filename (key_prefix may be empty or null from yq) +s3_key() { + local prefix="$1" + local name="$2" + [[ "$prefix" == "null" || -z "$prefix" ]] && prefix="" + if [[ -n "$prefix" ]]; then + echo "${prefix}/${name}" + else + echo "$name" + fi +} + +# Artifact: bucket + key from s3.artifact_02 +artifact_key_prefix=$(yq eval '.s3.artifact_02.key_prefix' "$MANIFEST") +artifact_filename=$(yq eval '.s3.artifact_02.filename' "$MANIFEST") +artifact_s3_key=$(s3_key "$artifact_key_prefix" "$artifact_filename") +artifact_s3_uri="s3://${s3_bucket}/${artifact_s3_key}" + +# LandTrendr TIFs: bucket + key from s3.median_tif and s3.stdv_tif +median_key_prefix=$(yq eval '.s3.median_tif.key_prefix' "$MANIFEST") +median_filename=$(yq eval '.s3.median_tif.filename' "$MANIFEST") +stdv_key_prefix=$(yq eval '.s3.stdv_tif.key_prefix' "$MANIFEST") +stdv_filename=$(yq eval '.s3.stdv_tif.filename' "$MANIFEST") +median_s3_key=$(s3_key "$median_key_prefix" "$median_filename") +stdv_s3_key=$(s3_key "$stdv_key_prefix" "$stdv_filename") +median_s3_uri="s3://${s3_bucket}/${median_s3_key}" +stdv_s3_uri="s3://${s3_bucket}/${stdv_s3_key}" + +landtrendr_paths_raw=$(yq eval '.paths.landtrendr_raw_files' "$MANIFEST") +# Split comma-separated; first segment = median, second = stdv +landtrendr_segment_1="${landtrendr_paths_raw%%,*}" +landtrendr_segment_2="${landtrendr_paths_raw#*,}" + +# Output path keys for this step: create these dirs (from manifest step.outputs) +output_keys=$(yq eval '.steps["'"$COMMAND"'"] | .['"$STEP_INDEX"'].outputs | .[]' "$MANIFEST" 2>/dev/null || true) + +# --- Resolve absolute run directory (for downloads and extract) --- +RUN_DIR_ABS=$(if [[ "$RUN_DIR" = /* ]]; then echo "$RUN_DIR"; else echo "$REPO_ROOT/$RUN_DIR"; fi) + +# --- Create run directory and canonicalize so paths have no ".." (clean aws/tar output) --- +echo "00_fetch_s3_and_prepare_run_dir: Creating run directory and output dirs from manifest" +mkdir -p "$RUN_DIR_ABS" +RUN_DIR_ABS=$(cd "$RUN_DIR_ABS" && pwd) +RUN_DIR="$RUN_DIR_ABS" + +while IFS= read -r path_key; do + [[ -z "$path_key" ]] && continue + path_value=$(yq eval '.paths["'"$path_key"'"]' "$MANIFEST" 2>/dev/null) + [[ -z "$path_value" || "$path_value" == "null" ]] && continue + resolved=$(resolve_run_path "$path_value") + mkdir -p "$resolved" +done <<< "$output_keys" + +# --- Download artifact tarball into run directory and extract --- +artifact_local="${RUN_DIR_ABS}/${artifact_filename}" +artifact_report=$(report_path "$artifact_local") +if [[ -f "$artifact_local" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: Artifact tarball already present in run dir: $artifact_report" +else + echo "00_fetch_s3_and_prepare_run_dir: Downloading artifact from S3 into run directory" + echo "00_fetch_s3_and_prepare_run_dir: Saving to: $artifact_report" + (cd "$RUN_DIR_ABS" && aws s3 cp --endpoint-url "$s3_endpoint" "$artifact_s3_uri" "$artifact_filename") +fi +echo "00_fetch_s3_and_prepare_run_dir: Extracting artifact into run directory" +tar -xzf "$artifact_local" -C "$RUN_DIR_ABS" + +# --- Download LandTrendr TIFs if not present (paths from manifest: first=median, second=stdv) --- +seg1=$(echo "$landtrendr_segment_1" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//') +seg2=$(echo "$landtrendr_segment_2" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//') + +download_tif() { + local seg="$1" + local s3_uri="$2" + local label="$3" + [[ -z "$seg" ]] && return 0 + resolved=$(resolve_run_path "$seg") + if [[ -f "$resolved" ]]; then + echo "00_fetch_s3_and_prepare_run_dir: Already present: $(report_path "$resolved")" + else + local dest_dir dest_name + dest_dir=$(dirname "$resolved") + dest_name=$(basename "$resolved") + mkdir -p "$dest_dir" + echo "00_fetch_s3_and_prepare_run_dir: Downloading $label from S3" + echo "00_fetch_s3_and_prepare_run_dir: Saving to: $(report_path "$resolved")" + (cd "$dest_dir" && aws s3 cp --endpoint-url "$s3_endpoint" "$s3_uri" "$dest_name") + fi +} +download_tif "$seg1" "$median_s3_uri" "median TIF" +download_tif "$seg2" "$stdv_s3_uri" "stdv TIF" + +echo "00_fetch_s3_and_prepare_run_dir: Done." diff --git a/2a_grass/00_stage_external_inputs.sh b/2a_grass/00_stage_external_inputs.sh new file mode 100644 index 0000000..51617ef --- /dev/null +++ b/2a_grass/00_stage_external_inputs.sh @@ -0,0 +1,194 @@ +#!/usr/bin/env bash +# 00_stage_external_inputs.sh: create run directory and stage user external inputs. +# Invoked as step 00 of the 'prepare' command. It: +# - Ensures the run directory exists. +# - Copies user-provided external files (from config.external_paths) into +# the run directory so they are available to the workflow. +# +# Requires: yq (mikefarah/yq) +# +# Options (see --help): --repo-root (required); --manifest optional, currently +# unused for staging; defaults to /2a_grass/workflow_manifest.yaml. +# Run directory is either from --run-dir or from run_dir in the file given by +# --config (relative paths resolved with --invocation-cwd). external_paths +# entries are resolved from --invocation-cwd when relative. + +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: 00_stage_external_inputs.sh [OPTIONS] + +Create the run directory (if needed) and copy user-provided external files +from the config's external_paths section into the run directory so they are +available to the workflow. + +Required: + --repo-root PATH Repo root (workflows directory). Script changes to this directory. + +Run directory (one of): + --run-dir PATH Run directory (absolute, or relative to --repo-root). + --config PATH User YAML config file; script reads run_dir from it (use with --invocation-cwd). + +Optional: + --manifest PATH Path to workflow_manifest.yaml (default: /2a_grass/workflow_manifest.yaml). (Currently unused.) + --invocation-cwd PATH Required when using --config with a relative run_dir or relative external_paths. + -h, --help Print this help and exit. +EOF +} + +RUN_DIR="" +CONFIG_FILE="" +REPO_ROOT="" +MANIFEST="" +INVOCATION_CWD="" + +while [[ $# -gt 0 ]]; do + case "$1" in + --run-dir) + [[ $# -lt 2 ]] && { echo "00_stage_external_inputs: --run-dir requires PATH." >&2; usage >&2; exit 1; } + RUN_DIR="$2"; shift 2 ;; + --config) + [[ $# -lt 2 ]] && { echo "00_stage_external_inputs: --config requires PATH." >&2; usage >&2; exit 1; } + CONFIG_FILE="$2"; shift 2 ;; + --repo-root) + [[ $# -lt 2 ]] && { echo "00_stage_external_inputs: --repo-root requires PATH." >&2; usage >&2; exit 1; } + REPO_ROOT="$2"; shift 2 ;; + --manifest) + [[ $# -lt 2 ]] && { echo "00_stage_external_inputs: --manifest requires PATH." >&2; usage >&2; exit 1; } + MANIFEST="$2"; shift 2 ;; + --invocation-cwd) + [[ $# -lt 2 ]] && { echo "00_stage_external_inputs: --invocation-cwd requires PATH." >&2; usage >&2; exit 1; } + INVOCATION_CWD="$2"; shift 2 ;; + -h|--help) + usage; exit 0 ;; + *) + echo "00_stage_external_inputs: Unknown option: $1" >&2 + usage >&2 + exit 1 ;; + esac +done + +if [[ -z "$REPO_ROOT" ]]; then + echo "00_stage_external_inputs: --repo-root is required." >&2 + usage >&2 + exit 1 +fi + +if [[ -z "$MANIFEST" ]]; then + MANIFEST="${REPO_ROOT}/2a_grass/workflow_manifest.yaml" +fi + +# Run directory: from --run-dir or from config file +if [[ -n "$CONFIG_FILE" ]]; then + if [[ ! -f "$CONFIG_FILE" ]]; then + echo "00_stage_external_inputs: Config file not found: $CONFIG_FILE" >&2 + exit 1 + fi + RUN_DIR=$(yq eval '.run_dir' "$CONFIG_FILE") || { + echo "00_stage_external_inputs: yq failed to read .run_dir from config: $CONFIG_FILE" >&2 + exit 1 + } + if [[ -z "$RUN_DIR" || "$RUN_DIR" == "null" ]]; then + echo "00_stage_external_inputs: run_dir not found or empty in config (expected .run_dir): $CONFIG_FILE" >&2 + exit 1 + fi + if [[ "$RUN_DIR" != /* ]]; then + if [[ -z "$INVOCATION_CWD" ]]; then + echo "00_stage_external_inputs: --invocation-cwd is required when run_dir in config is relative." >&2 + exit 1 + fi + RUN_DIR="${INVOCATION_CWD}/${RUN_DIR}" + fi +elif [[ -z "$RUN_DIR" ]]; then + echo "00_stage_external_inputs: Provide --run-dir or --config (with run_dir in the config file)." >&2 + usage >&2 + exit 1 +fi + +if [[ ! -f "$MANIFEST" ]]; then + echo "00_stage_external_inputs: Manifest not found: $MANIFEST" >&2 + exit 1 +fi + +if ! command -v yq &>/dev/null; then + echo "00_stage_external_inputs: yq is required to read the manifest and config." >&2 + exit 1 +fi + +cd "$REPO_ROOT" + +# Show path for user: relative to INVOCATION_CWD if under it, else absolute +report_path() { + local abs_path="$1" + if [[ -n "$INVOCATION_CWD" && "$abs_path" == "$INVOCATION_CWD"/* ]]; then + echo "${abs_path#"$INVOCATION_CWD"/}" + else + echo "$abs_path" + fi +} + +# Resolve an absolute run directory for staging. +RUN_DIR_ABS=$(if [[ "$RUN_DIR" = /* ]]; then echo "$RUN_DIR"; else echo "$REPO_ROOT/$RUN_DIR"; fi) + +echo "00_stage_external_inputs: Ensuring run directory exists" +mkdir -p "$RUN_DIR_ABS" +RUN_DIR_ABS=$(cd "$RUN_DIR_ABS" && pwd) +RUN_DIR="$RUN_DIR_ABS" +echo "00_stage_external_inputs: Run directory: $(report_path "$RUN_DIR_ABS")" + +# If no config or no external_paths, nothing more to do. +if [[ -z "$CONFIG_FILE" || ! -f "$CONFIG_FILE" ]]; then + echo "00_stage_external_inputs: No config file provided; only run directory was created." + echo "00_stage_external_inputs: Done." + exit 0 +fi + +# external_paths is a mapping from arbitrary keys to source file paths. +# We do not depend on manifest paths here; we simply copy each source file +# into the run directory (flattened by basename). +# Parse the YAML block output of .external_paths line by line (yq v4 outputs plain +# scalars without quotes). Split on first ": " to get key and value. +external_block=$(yq eval '.external_paths' "$CONFIG_FILE" 2>/dev/null || echo "null") +if [[ -z "$external_block" || "$external_block" == "null" || "$external_block" == "{}" ]]; then + echo "00_stage_external_inputs: No external_paths configured; nothing to copy." + echo "00_stage_external_inputs: Done." + exit 0 +fi + +echo "00_stage_external_inputs: Staging external inputs into run directory" + +while IFS= read -r line; do + [[ -z "$line" ]] && continue + # Split on first ": " — key is everything before, value everything after. + key="${line%%: *}" + src="${line#*: }" + [[ -z "$key" || "$key" == "$line" ]] && continue # no ": " found + [[ -z "$src" || "$src" == "null" ]] && continue + # Strip surrounding quotes that yq may preserve from the YAML source. + src="${src#\"}" ; src="${src%\"}" + + # Resolve source: absolute as-is, relative to INVOCATION_CWD otherwise. + if [[ "$src" != /* ]]; then + if [[ -z "$INVOCATION_CWD" ]]; then + echo "00_stage_external_inputs: --invocation-cwd is required when external_paths entries are relative." >&2 + exit 1 + fi + src="${INVOCATION_CWD}/${src}" + fi + if [[ ! -f "$src" ]]; then + echo "00_stage_external_inputs: external_paths.${key}: source file not found: ${src}" >&2 + exit 1 + fi + + # Destination: copy into the run directory using the source basename. + dest="${RUN_DIR_ABS}/$(basename "$src")" + dest_dir=$(dirname "$dest") + mkdir -p "$dest_dir" + + echo "00_stage_external_inputs: Copying $(report_path "$src") -> $(report_path "$dest")" + cp -f "$src" "$dest" +done <<< "$external_block" + +echo "00_stage_external_inputs: Done." + diff --git a/2a_grass/01_ERA5_nc_to_clim.R b/2a_grass/01_ERA5_nc_to_clim.R index 0fc5e51..56ebf16 100755 --- a/2a_grass/01_ERA5_nc_to_clim.R +++ b/2a_grass/01_ERA5_nc_to_clim.R @@ -73,21 +73,36 @@ file_info <- site_info |> dplyr::rename(site_id = id) |> dplyr::cross_join(data.frame(ens_id = 1:10)) +# stopifnot( +# length(unique(file_info$id)) == nrow(file_info), +# all(file_info$lat > 0), # just to simplify grid naming below +# all(file_info$lon < 0) +# ) +file_info <- file_info |> + dplyr::mutate( + # match locations to half-degree ERA5 grid cell centers + # CAUTION: Calculation only correct when all lats are N and all lons are W! + ERA5_grid_cell = paste0( + ((lat + 0.25) %/% 0.5) * 0.5, "N_", + ((abs(lon) + 0.25) %/% 0.5) * 0.5, "W" + ) + ) if (!dir.exists(args$site_sipnet_met_path)) { dir.create(args$site_sipnet_met_path, recursive = TRUE) } furrr::future_pwalk( file_info, - function(site_id, start_date, end_date, ens_id, ...) { + function(site_id, start_date, end_date, ens_id, ERA5_grid_cell, ...) { PEcAn.SIPNET::met2model.SIPNET( in.path = file.path( args$site_era5_path, - paste("ERA5", site_id, ens_id, sep = "_") + # paste("ERA5", site_id, ens_id, sep = "_") + paste("ERA5", ERA5_grid_cell, ens_id, sep = "_") ), start_date = args$start_date, end_date = args$end_date, in.prefix = paste0("ERA5.", ens_id), - outfolder = file.path(args$site_sipnet_met_path, site_id) + outfolder = file.path(args$site_sipnet_met_path, ERA5_grid_cell) ) } ) diff --git a/2a_grass/example_user_config.yaml b/2a_grass/example_user_config.yaml new file mode 100644 index 0000000..05fc1fe --- /dev/null +++ b/2a_grass/example_user_config.yaml @@ -0,0 +1,39 @@ +# Example user-facing config for 2a_grass workflows. +# Pass with: ./magic-ensemble --config workflows/2a_grass/example_user_config.yaml +# +# This file contains only overridable settings and user-provided external +# resources. Fixed paths, S3 resources, and step I/O are defined in +# workflow_manifest.yaml and are not overridden here. + +# Run directory: where outputs and run-specific data live. +# Relative to the CWD where you invoke the CLI, unless you use an absolute path. +run_dir: "config-based-rundir/" + +# Dates used by prepare and run-ensembles. +start_date: "2016-01-01" +end_date: "2023-12-31" +run_LAI_date: "2016-07-01" + +# Ensemble sizes. +n_ens: 20 +n_met: 10 +ic_ensemble_size: 100 + +# User-provided external resources. + +# Absolute paths are used as-is; relative paths are resolved from the +# directory where you invoke ./magic-ensemble. +# paths in external_paths will be localized to the run_dir before the workflow is run. +external_paths: + template_file: "2a_grass/template.xml" + +# Parallelism (e.g. for step 01 --n_cores). +n_workers: 1 + +# Run prepare steps inside Apptainer (single image from workflow manifest; pull-if-not-present). +# When true: 'module load apptainer' is attempted, then apptainer must be on PATH; SIF is +# pulled from manifest remote if not present. No user override of remote; local image only. +use_apptainer: true + +# Switch dispatch method for parallel execution (local-gnu-parallel, slurm-dispatch, etc.). +pecan_dispatch: local-gnu-parallel diff --git a/2a_grass/template.xml b/2a_grass/template.xml index 6d91f23..75c2c24 100644 --- a/2a_grass/template.xml +++ b/2a_grass/template.xml @@ -45,7 +45,7 @@ SIPNET git TRUE - sipnet.git + /usr/local/bin/sipnet.git cp data/events.in @RUNDIR@ @@ -67,15 +67,11 @@ localhost + sbatch -J @NAME@ -o @STDOUT@ -e @STDERR@ apptainer run ./sipnet-carb_develop.sif + Submitted batch job ([0-9]+) + + if test -z "$(squeue -h -j @JOBID@)"; then echo "DONE"; fi output/out output/run - - - squeue -j @JOBID@ || echo DONE - - parallel -j ${NCPUS:-1} --skip-first-line '{}/job.sh' :::: - - 1000 - diff --git a/2a_grass/workflow_manifest.yaml b/2a_grass/workflow_manifest.yaml new file mode 100644 index 0000000..e0b859e --- /dev/null +++ b/2a_grass/workflow_manifest.yaml @@ -0,0 +1,147 @@ +# Workflow manifest: fixed paths and step I/O (internal, not user-facing). +# CLI loads this automatically; do not pass via --config. +# +# Paths: All entries under 'paths' are inside the run directory (no paths outside run_dir). +# Keys are referenced by name in steps (inputs/outputs). At runtime the CLI resolves +# each path as run_dir + "/" + value. These are fixed, non-overrideable locations; +# user configs may only supply external source files that are copied into these +# paths before 'prepare' runs. +# +# Steps: Each command has a list of step objects. Each step has: +# script: R script path (relative to repo root) +# r_libraries: R packages to check before running this script +# inputs: List of path keys (from 'paths') this script reads (local paths only) +# outputs: List of path keys this script creates or writes + +# S3 resources (not in user config). Remote resources are localized before R runs. +# Stored as endpoint + bucket + per-resource key_prefix and filename (no full URLs). +s3: + endpoint_url: "https://s3.garage.ccmmf.ncsa.cloud" + bucket: "carb" + artifact_02: + key_prefix: "data_raw" + filename: "ensembles_data_artifact.tar.gz" + median_tif: + key_prefix: "data_raw" + filename: "ca_biomassfiaald_2016_median.tif" + stdv_tif: + key_prefix: "data_raw" + filename: "ca_biomassfiaald_2016_stdv.tif" + +# Dispatch options for run-ensembles. The user config selects one by name via pecan_dispatch. +# host_xml is the complete ... block to inject into the staged template.xml +# before step 03 (xml_build.R) runs. Valid values for pecan_dispatch in user config are the +# keys listed here. +pecan_dispatch: + local-gnu-parallel: + description: "Run ensemble members locally using GNU parallel (no Slurm required)" + host_xml: | + + localhost + output/out + output/run + squeue -j @JOBID@ || echo DONE + + parallel -j ${NCPUS:-1} --skip-first-line '{}/job.sh' :::: + 1000 + + + host_xml_apptainer: | + + localhost + output/out + output/run + squeue -j @JOBID@ || echo DONE + + parallel -j ${NCPUS:-1} --skip-first-line 'apptainer run @SIF@ {}/job.sh' :::: + 1000 + + + slurm-dispatch: + description: "Submit ensemble members to Slurm via sbatch" + host_xml: | + + localhost + sbatch -J @NAME@ -o @STDOUT@ -e @STDERR@ + Submitted batch job ([0-9]+) + if test -z "$(squeue -h -j @JOBID@)"; then echo "DONE"; fi + output/out + output/run + + host_xml_apptainer: | + + localhost + sbatch -J @NAME@ -o @STDOUT@ -e @STDERR@ apptainer run @SIF@ + Submitted batch job ([0-9]+) + if test -z "$(squeue -h -j @JOBID@)"; then echo "DONE"; fi + output/out + output/run + + +# Apptainer (not in user config) +apptainer: + remote: + url: "docker://hdpriest0uiuc/" + container: + name: "sipnet-carb" + tag: "develop" + sif: "sipnet-carb_develop.sif" + +# Path definitions: all contained within the run directory. +# note that these paths are the internal-workflow expected I/O paths. +# Users should not modify these values unless you know what you are doing. +# Values are relative to run_dir; CLI resolves as run_dir + "/" + value. +paths: + site_info_file: "site_info.csv" + site_sipnet_met_path: "data/ERA5_SIPNET" + site_era5_path: "data_raw/ERA5_nc" + field_shape_path: "data_raw/dwr_map/i15_Crop_Mapping_2018.gdb" + data_dir: "data/IC_prep" + ic_outdir: "IC_files" + pft_dir: "pfts" + landtrendr_raw_files: "data_raw/ca_biomassfiaald_2016_median.tif,data_raw/ca_biomassfiaald_2016_stdv.tif" + site_file: "site_info.csv" + template_file: "template.xml" + output_file: "settings.xml" + met_dir: "data/ERA5_SIPNET" + ic_dir: "IC_files" + settings_xml: "settings.xml" + +# Fixed workflow values (not user overrides) +params_from_pft: "SLA,leafC" +additional_params: "varname=wood_carbon_fraction,distn=norm,parama=0.48,paramb=0.005" + +# Steps per command: script path, R libs to check (empty for shell scripts), input/output path keys +steps: + get-demo-data: + - script: "2a_grass/00_fetch_s3_and_prepare_run_dir.sh" + r_libraries: [] + inputs: [] + outputs: [data_dir, ic_outdir, site_sipnet_met_path] + + prepare: + - script: "2a_grass/00_stage_external_inputs.sh" + r_libraries: [] + inputs: [] + outputs: [] + + - script: "2a_grass/01_ERA5_nc_to_clim.R" + r_libraries: [future, furrr] + inputs: [site_info_file, site_era5_path] + outputs: [site_sipnet_met_path] + + - script: "2a_grass/02_ic_build.R" + r_libraries: [tidyverse] + inputs: [site_info_file, field_shape_path, pft_dir, data_dir, landtrendr_raw_files] + outputs: [ic_outdir, data_dir] + + - script: "2a_grass/03_xml_build.R" + r_libraries: [PEcAn.settings] + inputs: [site_file, template_file, ic_dir, met_dir] + outputs: [output_file] + + run-ensembles: + - script: "2a_grass/04_run_model.R" + r_libraries: [PEcAn.all] + inputs: [settings_xml] + outputs: [] diff --git a/magic-ensemble b/magic-ensemble new file mode 100755 index 0000000..63894f4 --- /dev/null +++ b/magic-ensemble @@ -0,0 +1,529 @@ +#!/usr/bin/env bash +# magic-ensemble: minimal CLI for workflows (2a_grass). +# Usage: ./magic-ensemble [--verbose] --config +# Commands: help | get-demo-data | prepare | run-ensembles + +set -euo pipefail + +# --- Repo root, manifest, and invocation CWD --- +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$SCRIPT_DIR" +MANIFEST="${REPO_ROOT}/2a_grass/workflow_manifest.yaml" +INVOCATION_CWD="${INVOCATION_CWD:-$(pwd)}" + +usage() { + cat <<'EOF' +Usage: ./magic-ensemble [global options] + +Commands: + help Print this usage and help (no scripts run). + get-demo-data Fetch demo data from S3 and create run directory (for users without local data). + prepare Run preparation steps: 00 (stage external inputs), 01 (ERA5→clim), 02 (IC build), 03 (XML build). + run-ensembles Run step 04 (run model) using existing settings.xml and prepared inputs. + +Global options (after command): + --verbose Echo each command before running (including apptainer run when use_apptainer is true). + --config REQUIRED. Path to user YAML config (overridable scalar keys only; fixed paths are in workflow manifest). + Config may set use_apptainer: true to run prepare inside Apptainer (pull-if-not-present; apptainer only). + +Examples: + ./magic-ensemble help + ./magic-ensemble get-demo-data --config my_config.yaml + ./magic-ensemble prepare --config my_config.yaml --verbose + ./magic-ensemble run-ensembles --config my_config.yaml +EOF +} + +# --- Require yq (mikefarah/yq, jq-style) --- +require_yq() { + if ! command -v yq &>/dev/null; then + echo "magic-ensemble: yq is required to read YAML. Install mikefarah/yq: https://github.com/mikefarah/yq" >&2 + exit 1 + fi + if ! yq eval '.' "$MANIFEST" &>/dev/null; then + echo "magic-ensemble: Could not parse manifest with yq. This CLI requires mikefarah/yq (jq-style). Your 'yq' may be a different implementation." >&2 + exit 1 + fi +} + +# --- Parse arguments: command first, then global options --- +COMMAND="" +VERBOSE=0 +CONFIG_FILE="" +while [[ $# -gt 0 ]]; do + case "$1" in + help|get-demo-data|prepare|run-ensembles) + if [[ -z "$COMMAND" ]]; then COMMAND="$1"; shift; continue; fi + ;; + --verbose) VERBOSE=1; shift; continue ;; + --config) + if [[ $# -lt 2 ]]; then echo "magic-ensemble: --config requires ." >&2; usage >&2; exit 1; fi + CONFIG_FILE="$2"; shift 2; continue + ;; + -*) + echo "magic-ensemble: Unknown option: $1" >&2; usage >&2; exit 1 + ;; + *) + if [[ -z "$COMMAND" ]]; then COMMAND="$1"; shift; continue; fi + echo "magic-ensemble: Unexpected argument: $1" >&2; usage >&2; exit 1 + ;; + esac + shift +done + +# Resolve paths passed on the command line relative to CWD (use actual pwd so config is found) +if [[ -n "$CONFIG_FILE" && "$CONFIG_FILE" != /* ]]; then + CONFIG_FILE="$(pwd)/${CONFIG_FILE}" +fi + +# --- Help or no command --- +if [[ -z "$COMMAND" || "$COMMAND" == "help" ]]; then + usage + exit 0 +fi + +if [[ "$COMMAND" != "get-demo-data" && "$COMMAND" != "prepare" && "$COMMAND" != "run-ensembles" ]]; then + echo "magic-ensemble: Unknown command: $COMMAND" >&2 + usage >&2 + exit 1 +fi + +require_yq +if [[ ! -f "$MANIFEST" ]]; then + echo "magic-ensemble: Workflow manifest not found: $MANIFEST" >&2 + exit 1 +fi + +if [[ -z "$CONFIG_FILE" ]]; then + echo "magic-ensemble: --config is required for command '$COMMAND'." >&2 + usage >&2 + exit 1 +fi + +if [[ ! -f "$CONFIG_FILE" ]]; then + echo "magic-ensemble: Config file not found: $CONFIG_FILE" >&2 + exit 1 +fi + +# --- Load effective config: manifest + optional user overrides --- +# User config may contain: run_dir, start_date, end_date, run_LAI_date, n_ens, n_met, ic_ensemble_size, n_workers, pecan_dispatch +get_val() { + local key="$1" + local from_manifest="$2" + if [[ -n "$CONFIG_FILE" && -f "$CONFIG_FILE" ]]; then + local u + u=$(yq eval ".$key" "$CONFIG_FILE" 2>/dev/null) + if [[ -z "$u" || "$u" == "null" ]]; then + echo "magic-ensemble: Config key '$key' is missing or empty in $CONFIG_FILE" >&2 + exit 1 + fi + echo "$u" + return + fi + echo "$from_manifest" +} + +# Read manifest paths and fixed values +# Manifest paths are internal-workflow I/O connections. they should not be altered. +p_site_info_file=$(yq eval '.paths.site_info_file' "$MANIFEST") +p_site_sipnet_met_path=$(yq eval '.paths.site_sipnet_met_path' "$MANIFEST") +p_site_era5_path=$(yq eval '.paths.site_era5_path' "$MANIFEST") +p_field_shape_path=$(yq eval '.paths.field_shape_path' "$MANIFEST") +p_data_dir=$(yq eval '.paths.data_dir' "$MANIFEST") +p_ic_outdir=$(yq eval '.paths.ic_outdir' "$MANIFEST") +p_pft_dir=$(yq eval '.paths.pft_dir' "$MANIFEST") +p_landtrendr_raw_files=$(yq eval '.paths.landtrendr_raw_files' "$MANIFEST") +p_site_file=$(yq eval '.paths.site_file' "$MANIFEST") +p_template_file=$(yq eval '.paths.template_file' "$MANIFEST") +p_output_file=$(yq eval '.paths.output_file' "$MANIFEST") +p_met_dir=$(yq eval '.paths.met_dir' "$MANIFEST") +p_ic_dir=$(yq eval '.paths.ic_dir' "$MANIFEST") +p_settings_xml=$(yq eval '.paths.settings_xml' "$MANIFEST") +params_from_pft=$(yq eval '.params_from_pft' "$MANIFEST") +additional_params=$(yq eval '.additional_params' "$MANIFEST") + +# Overridable defaults (manifest may not have these; use script defaults if not in user config) +run_dir_default="magic-ensemble-run-directory/" +start_date_default="2016-01-01" +end_date_default="2023-12-31" +run_LAI_date_default="2016-07-01" +n_ens_default="20" +n_met_default="10" +ic_ensemble_size_default="100" +n_workers_default="1" +use_apptainer_default="false" + +run_dir=$(get_val "run_dir" "$run_dir_default") + +if [[ "$run_dir" == "/" ]]; then + echo "magic-ensemble: run_dir cannot be the root directory (/)." >&2 + exit 1 +fi + +# If run_dir is not absolute, resolve relative to CWD where the CLI was invoked +if [[ "$run_dir" != /* ]]; then + run_dir="${INVOCATION_CWD}/${run_dir}" +fi +# Normalize run_dir to avoid trailing slashes so joined paths do not contain "//" +run_dir="${run_dir%/}" + +start_date=$(get_val "start_date" "$start_date_default") +end_date=$(get_val "end_date" "$end_date_default") +run_LAI_date=$(get_val "run_LAI_date" "$run_LAI_date_default") +n_ens=$(get_val "n_ens" "$n_ens_default") +n_met=$(get_val "n_met" "$n_met_default") +ic_ensemble_size=$(get_val "ic_ensemble_size" "$ic_ensemble_size_default") +n_workers=$(get_val "n_workers" "$n_workers_default") +use_apptainer_raw=$(get_val "use_apptainer" "$use_apptainer_default") +# Normalize: true/yes/1 (case-insensitive) => 1; else 0 +use_apptainer=0 +case "$(echo "$use_apptainer_raw" | tr '[:upper:]' '[:lower:]')" in + true|yes|1) use_apptainer=1 ;; +esac + +pecan_dispatch=$(get_val "pecan_dispatch" "") + +# Resolve manifest paths relative to run_dir. +# Effective path = run_dir / manifest_path so R (CWD=REPO_ROOT) sees the correct file. +resolve_path() { echo "${run_dir}/${1}"; } + +# All workflow paths come from the manifest only (no user overrides). +site_info_file=$(resolve_path "$p_site_info_file") +site_sipnet_met_path=$(resolve_path "$p_site_sipnet_met_path") +site_era5_path=$(resolve_path "$p_site_era5_path") +field_shape_path=$(resolve_path "$p_field_shape_path") +data_dir=$(resolve_path "$p_data_dir") +ic_outdir=$(resolve_path "$p_ic_outdir") +pft_dir=$(resolve_path "$p_pft_dir") +site_file=$(resolve_path "$p_site_file") +template_file=$(resolve_path "$p_template_file") +output_file=$(resolve_path "$p_output_file") +met_dir=$(resolve_path "$p_met_dir") +ic_dir=$(resolve_path "$p_ic_dir") +settings_xml=$(resolve_path "$p_settings_xml") +# landtrendr_raw_files is comma-separated; resolve each segment (manifest only for now) +landtrendr_raw_files="" +while IFS= read -r segment; do + segment=$(echo "$segment" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//') + [[ -z "$segment" ]] && continue + [[ -n "$landtrendr_raw_files" ]] && landtrendr_raw_files="${landtrendr_raw_files}," + landtrendr_raw_files="${landtrendr_raw_files}${run_dir}/${segment}" +done < <(yq eval '.paths.landtrendr_raw_files' "$MANIFEST" | tr ',' '\n') + +# --- Pre-execution: AWS S3 tools check --- +check_aws() { + if ! command -v aws &>/dev/null; then + echo "magic-ensemble: AWS CLI (aws) not found on PATH; required for S3 access." >&2 + exit 1 + fi +} + +# --- Apptainer: ensure apptainer is available (module load then PATH); apptainer only, no singularity --- +ensure_apptainer_available() { + if command -v apptainer &>/dev/null; then + return 0 + fi + if command -v module &>/dev/null; then + if module load apptainer 2>/dev/null; then + if command -v apptainer &>/dev/null; then + return 0 + fi + fi + fi + echo "magic-ensemble: use_apptainer is true but apptainer is not available. Run 'module load apptainer' or ensure apptainer is on PATH. (Singularity is not supported.)" >&2 + exit 1 +} + +# --- Apptainer: resolve SIF path from manifest (in run_dir); pull from remote if not present (no user override of remote) --- +ensure_sif_present() { + local sif_name sif_path remote_base container_name tag uri + sif_name=$(yq eval '.apptainer.sif' "$MANIFEST") + sif_path="${run_dir}/${sif_name}" + if [[ -f "$sif_path" ]]; then + APPTAINER_SIF="$sif_path" + return 0 + fi + ensure_apptainer_available + remote_base=$(yq eval '.apptainer.remote.url' "$MANIFEST") + remote_base="${remote_base%/}" + container_name=$(yq eval '.apptainer.container.name' "$MANIFEST") + tag=$(yq eval '.apptainer.tag' "$MANIFEST") + uri="${remote_base}/${container_name}:${tag}" + echo "magic-ensemble: SIF not found at $sif_path; pulling from $uri" >&2 + mkdir -p "$run_dir" + if ! apptainer pull "$sif_path" "$uri"; then + echo "magic-ensemble: Failed to pull container to $sif_path" >&2 + exit 1 + fi + APPTAINER_SIF="$sif_path" +} + +# --- Get list of script paths for current command (from manifest steps) --- +get_steps() { + yq eval '.steps["'"$COMMAND"'"] | .[].script' "$MANIFEST" +} + +# --- Populate STEPS array for current command (from manifest) --- +get_steps_array() { + STEPS=() + while IFS= read -r s; do + [[ -n "$s" ]] && STEPS+=("$s") + done < <(get_steps) +} + +# --- R library check for step at index i (reads r_libraries from manifest step; skip if empty or .sh) --- +check_r_libs_for_step() { + local i="$1" + local script="${STEPS[i]}" + [[ "$script" == *.sh ]] && return 0 + local lib + while IFS= read -r lib; do + [[ -z "$lib" || "$lib" == "null" ]] && continue + if ! (cd "$REPO_ROOT" && Rscript -e "library(\"$lib\")") 2>/dev/null; then + echo "magic-ensemble: R library check failed: library(\"$lib\") not available. Install it or activate the correct environment." >&2 + exit 1 + fi + done < <(yq eval '.steps["'"$COMMAND"'"] | .['"$i"'].r_libraries | .[]?' "$MANIFEST" 2>/dev/null || true) +} + +# --- R library check for step at index i inside Apptainer (APPTAINER_SIF must be set) --- +check_r_libs_for_step_in_apptainer() { + local i="$1" + local script="${STEPS[i]}" + [[ "$script" == *.sh ]] && return 0 + local lib + while IFS= read -r lib; do + [[ -z "$lib" || "$lib" == "null" ]] && continue + if ! apptainer run --bind "$REPO_ROOT:$REPO_ROOT" --bind "$run_dir:$run_dir" --pwd "$REPO_ROOT" "$APPTAINER_SIF" Rscript -e "library(\"$lib\")" 2>/dev/null; then + echo "magic-ensemble: R library check failed inside container: library(\"$lib\") not available in image $APPTAINER_SIF" >&2 + exit 1 + fi + done < <(yq eval '.steps["'"$COMMAND"'"] | .['"$i"'].r_libraries | .[]?' "$MANIFEST" 2>/dev/null || true) +} + +# --- Run R script with args. Optional leading args: --apptainer, --cwd DIR (default DIR = REPO_ROOT). --- +run_script() { + local use_apptainer=0 + local script_cwd="$REPO_ROOT" + while [[ $# -gt 0 ]]; do + case "${1:-}" in + --apptainer) use_apptainer=1; shift ;; + --cwd) + if [[ $# -lt 2 ]]; then + echo "magic-ensemble: run_script --cwd requires DIR." >&2 + exit 1 + fi + script_cwd="$2" + shift 2 + ;; + *) break ;; + esac + done + local script="$1" + shift + local script_path="${REPO_ROOT}/${script}" + if [[ ! -f "$script_path" ]]; then + echo "magic-ensemble: Script not found: $script_path" >&2 + exit 1 + fi + if [[ $use_apptainer -eq 1 ]]; then + # APPTAINER_SIF and run_dir must be set (ensure_sif_present and run_dir resolved earlier). + echo "magic-ensemble: Rscript (inside apptainer: $APPTAINER_SIF)" + if [[ $VERBOSE -eq 1 ]]; then + echo "apptainer run --bind \"$REPO_ROOT:$REPO_ROOT\" --bind \"$run_dir:$run_dir\" --cwd \"$script_cwd\" \"$APPTAINER_SIF\" Rscript \"$script_path\" $*" >&2 + fi + apptainer run --bind "$REPO_ROOT:$REPO_ROOT" --bind "$run_dir:$run_dir" --cwd "$script_cwd" "$APPTAINER_SIF" Rscript "$script_path" "$@" + else + echo "magic-ensemble: Rscript: $(command -v Rscript)" + if [[ $VERBOSE -eq 1 ]]; then + echo "(cd \"$script_cwd\" && Rscript \"$script_path\" $*)" >&2 + fi + (cd "$script_cwd" && Rscript "$script_path" "$@") + fi +} + +# --- Run shell script; CWD = REPO_ROOT. Step scripts receive documented CLI arguments. --- +run_shell_script() { + local script="$1" + local step_index="${2:-0}" + local script_path="${REPO_ROOT}/${script}" + if [[ ! -f "$script_path" ]]; then + echo "magic-ensemble: Script not found: $script_path" >&2 + exit 1 + fi + local script_basename="${script##*/}" + if [[ "$script_basename" == "00_fetch_s3_and_prepare_run_dir.sh" || "$script_basename" == "00_stage_external_inputs.sh" ]]; then + # Step 00 helpers: documented arguments (see each script's --help). + local args=(--repo-root "$REPO_ROOT") + if [[ "$script_basename" == "00_fetch_s3_and_prepare_run_dir.sh" ]]; then + args+=(--command "$COMMAND" --step-index "$step_index") + fi + if [[ -n "$CONFIG_FILE" && -f "$CONFIG_FILE" ]]; then + args+=(--config "$CONFIG_FILE" --invocation-cwd "$INVOCATION_CWD") + else + args+=(--run-dir "$run_dir" --invocation-cwd "$INVOCATION_CWD") + fi + if [[ $VERBOSE -eq 1 ]]; then + echo "bash $script_path ${args[*]}" >&2 + fi + (cd "$REPO_ROOT" && bash "$script_path" "${args[@]}") + else + # Other shell steps: pass args if/when they are added; no env vars + if [[ $VERBOSE -eq 1 ]]; then + echo "bash $script_path" >&2 + fi + (cd "$REPO_ROOT" && bash "$script_path") + fi +} + +# --- Validate pecan_dispatch value against manifest options --- +validate_pecan_dispatch() { + if ! yq eval ".pecan_dispatch | has(\"$pecan_dispatch\")" "$MANIFEST" | grep -q '^true$'; then + echo "magic-ensemble: Unknown pecan_dispatch value '$pecan_dispatch'. Valid options:" >&2 + yq eval '.pecan_dispatch | keys | .[]' "$MANIFEST" >&2 + exit 1 + fi +} + +# --- Patch ... block in staged template.xml with chosen dispatch XML --- +# Selects host_xml_apptainer when use_apptainer=1 (with @SIF@ substituted); falls back to host_xml. +patch_dispatch() { + if ! command -v python3 &>/dev/null; then + echo "magic-ensemble: python3 is required to patch dispatch in template.xml." >&2 + exit 1 + fi + local template_path="${run_dir}/$(yq eval '.paths.template_file' "$MANIFEST")" + if [[ ! -f "$template_path" ]]; then + echo "magic-ensemble: staged template.xml not found at $template_path" >&2 + exit 1 + fi + + # Select apptainer variant when available and requested; otherwise plain host_xml. + local host_xml_key="host_xml" + if [[ $use_apptainer -eq 1 ]]; then + local has_apptainer_variant + has_apptainer_variant=$(yq eval ".pecan_dispatch[\"$pecan_dispatch\"] | has(\"host_xml_apptainer\")" "$MANIFEST") + if [[ "$has_apptainer_variant" == "true" ]]; then + host_xml_key="host_xml_apptainer" + fi + fi + + if [[ $VERBOSE -eq 1 ]]; then + echo "magic-ensemble: patching block in $template_path (pecan_dispatch=$pecan_dispatch, xml_key=$host_xml_key)" >&2 + fi + + local sif_name host_xml + sif_name=$(yq eval '.apptainer.sif' "$MANIFEST") + # Substitute @SIF@ with the SIF filename (relative to run_dir, as jobs execute there). + host_xml=$(yq eval ".pecan_dispatch[\"$pecan_dispatch\"].$host_xml_key" "$MANIFEST" \ + | sed "s|@SIF@|./${sif_name}|g") + + python3 "${REPO_ROOT}/tools/patch_xml.py" "$template_path" "host" "$host_xml" --block +} + +# --- Get-demo-data: run steps from manifest (shell script only) --- +run_get_demo_data() { + get_steps_array + check_aws + for i in "${!STEPS[@]}"; do + check_r_libs_for_step "$i" + run_shell_script "${STEPS[i]}" "$i" + done +} + +# --- Prepare: run steps from manifest (hard-coded sequence for this workflow); optionally inside Apptainer --- +run_prepare() { + get_steps_array + check_aws + validate_pecan_dispatch + + local apptainer_arg="" + if [[ $use_apptainer -eq 1 ]]; then + ensure_apptainer_available + ensure_sif_present + apptainer_arg="--apptainer" + for i in "${!STEPS[@]}"; do + check_r_libs_for_step_in_apptainer "$i" + done + else + for i in "${!STEPS[@]}"; do + check_r_libs_for_step "$i" + done + fi + + for i in "${!STEPS[@]}"; do + step_num=$((i + 1)) + script="${STEPS[i]}" + echo "magic-ensemble: prepare step $step_num of ${#STEPS[@]}: $script" + if [[ "$script" == *.sh ]]; then + run_shell_script "$script" "$i" + if [[ "$i" -eq 0 ]]; then + echo "magic-ensemble: patching template.xml with dispatch: $pecan_dispatch" + patch_dispatch + fi + else + case "$i" in + 1) run_script $apptainer_arg "$script" \ + --site_era5_path "$site_era5_path" \ + --site_sipnet_met_path "$site_sipnet_met_path" \ + --site_info_file "$site_info_file" \ + --start_date "$start_date" \ + --end_date "$end_date" \ + --n_cores "$n_workers" \ + --parallel_strategy "multisession" ;; + 2) run_script $apptainer_arg "$script" \ + --site_info_path "$site_info_file" \ + --field_shape_path "$field_shape_path" \ + --ic_ensemble_size "$ic_ensemble_size" \ + --run_start_date "$start_date" \ + --run_LAI_date "$run_LAI_date" \ + --ic_outdir "$ic_outdir" \ + --data_dir "$data_dir" \ + --pft_dir "$pft_dir" \ + --params_read_from_pft "$params_from_pft" \ + --landtrendr_raw_files "$landtrendr_raw_files" \ + --additional_params "$additional_params" ;; + 3) run_script $apptainer_arg "$script" \ + --n_ens "$n_ens" \ + --n_met "$n_met" \ + --start_date "$start_date" \ + --end_date "$end_date" \ + --ic_dir "$ic_dir" \ + --met_dir "$met_dir" \ + --site_file "$site_file" \ + --template_file "$template_file" \ + --output_file "$output_file" ;; + *) echo "magic-ensemble: No argument mapping for prepare step index $i (script $script)" >&2; exit 1 ;; + esac + fi + echo "magic-ensemble: prepare step $step_num completed" + done + echo "magic-ensemble: prepare finished (all ${#STEPS[@]} steps)" +} + +# --- Run-ensembles: run single step from manifest (04); never inside Apptainer. +# When use_apptainer=1, the SIF must be present for dispatched jobs (already patched +# into template.xml via patch_dispatch during prepare); 04_run_model.R itself always +# runs on the host so it can submit further jobs to Slurm. +run_run_ensembles() { + get_steps_array + check_aws + + if [[ $use_apptainer -eq 1 ]]; then + ensure_apptainer_available + ensure_sif_present + fi + check_r_libs_for_step 0 + + run_script --cwd "$run_dir" "${STEPS[0]}" \ + --settings "$settings_xml" \ + --continue "FALSE" +} + +# --- Main --- +case "$COMMAND" in + get-demo-data) run_get_demo_data ;; + prepare) run_prepare ;; + run-ensembles) run_run_ensembles ;; + *) echo "magic-ensemble: Unknown command: $COMMAND" >&2; exit 1 ;; +esac diff --git a/tools/patch_xml.py b/tools/patch_xml.py new file mode 100644 index 0000000..9a346ce --- /dev/null +++ b/tools/patch_xml.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +""" +patch_xml.py — in-place XML element patcher. + +Usage: + patch_xml.py [--block] + +Arguments: + template_path Path to the XML file to patch (modified in-place). + xml_path Slash-separated element path, e.g. "host" or "model/binary". + The last segment is the target tag; an optional leading segment + constrains the match to within that parent element. + new_content Replacement value. Without --block, replaces the text node only. + With --block, replaces the entire element including its tags. + --block Replace the full ... element rather than just its text. + +Exit codes: + 0 Success. + 1 Usage error, file I/O error, or element not found. +""" + +import sys +import re + + +def usage(msg=None): + if msg: + print(f"patch_xml: {msg}", file=sys.stderr) + print(__doc__, file=sys.stderr) + sys.exit(1) + + +def patch_element(text, tag, new_content, replace_block): + # Regex-based: assumes tags have no attributes (e.g. , not ). + # This holds for PEcAn template.xml but would need revision for attributed tags. + if replace_block: + patched, n = re.subn( + r'<' + tag + r'>.*?', + new_content, text, count=1, flags=re.DOTALL, + ) + else: + patched, n = re.subn( + r'(<' + tag + r'>)[^<]*()', + r'\g<1>' + new_content + r'\g<2>', + text, count=1, + ) + return patched, n + + +def main(): + args = sys.argv[1:] + replace_block = '--block' in args + args = [a for a in args if a != '--block'] + + if len(args) != 3: + usage(f"expected 3 positional arguments, got {len(args)}") + + template_path, xml_path, new_content = args + parts = xml_path.split('/') + tag = parts[-1] + parent = parts[0] if len(parts) > 1 else None + + try: + content = open(template_path).read() + except OSError as e: + print(f"patch_xml: {e}", file=sys.stderr) + sys.exit(1) + + if parent: + total_replaced = 0 + + def replacer(m): + nonlocal total_replaced + patched, n = patch_element(m.group(0), tag, new_content, replace_block) + total_replaced += n + return patched + + result = re.sub( + r'<' + parent + r'>.*?', + replacer, content, count=1, flags=re.DOTALL, + ) + else: + result, total_replaced = patch_element(content, tag, new_content, replace_block) + + if total_replaced == 0: + print(f"patch_xml: no element matched path '{xml_path}' in {template_path}", file=sys.stderr) + sys.exit(1) + + try: + open(template_path, 'w').write(result) + except OSError as e: + print(f"patch_xml: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == '__main__': + main()