forked from Qi-YOU/CEGE0049-GSS-Dassl-CoOp
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathrun-grid-search.sh
More file actions
99 lines (82 loc) · 3.56 KB
/
run-grid-search.sh
File metadata and controls
99 lines (82 loc) · 3.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#!/bin/bash
# Run a grid search to find the best hyperparameter combination for CLIP MHAdapter.
# Loops over datasets and parameter ranges (loss, class weight, blend ratio, num_heads)
# using run_experiment(), logs results, and outputs a summary for analysis.
# NOTE: This script performs a limited search and does not cover the entire hyperparameter space.
# === Summary log setup ===
summary_file="train_summary.txt"
echo "==== TRAINING SUMMARY ====" > "$summary_file"
echo "Start time: $(date)" >> "$summary_file"
echo "" >> "$summary_file"
function run_experiment() {
local dataset=$1
local loss=$2
local class_weight=$3
local config=$4
local blend_ratio=$5
local num_heads=$6
local gamma=${7:-""} # optional
local blend_tag=${blend_ratio/./}
# For Linux users (uncomment this line and comment the Windows one)
local outdir="/root/autodl-tmp/results/${dataset}/clip-vitb16-mh_${num_heads}-${loss}-${class_weight}-br_${blend_tag}"
# For Windows users (uncomment this line and comment the Linux one)
# local outdir="../autodl-tmp/results/${dataset}/clip-vitb16-mh_${num_heads}-${loss}-${class_weight}-br_${blend_tag}"
echo "Running ${dataset} with CLIP MHAdapter + Multihead Attention Heads #=${num_heads}..."
echo "=== Loss: ${loss} | Weighting: ${class_weight} | Blend: ${blend_ratio} | Num_heads: ${num_heads} | Config: ${config} ==="
local start_time=$(date +%s)
local cmd="python CoOp/train.py \
--trainer CLIP_MHAdapter \
--dataset-config-file CoOp/configs/datasets/${dataset}.yaml \
--config-file configs/${config}.yaml \
--output-dir ${outdir} \
--seed 42 \
TRAINER.LOSS.NAME ${loss} \
TRAINER.LOSS.CLASS_WEIGHTING ${class_weight} \
MODEL.BLEND_RATIO ${blend_ratio} \
MODEL.NUM_HEADS ${num_heads}"
echo ">> Running command:"
echo "$cmd"
eval "$cmd"
local end_time=$(date +%s)
local elapsed=$((end_time - start_time))
printf "Elapsed time for %s (loss=%s, weight=%s, br=%s, heads=%d): %02d:%02d:%02d\n\n" \
"$dataset" "$loss" "$class_weight" "$blend_ratio" "$num_heads" \
$((elapsed/3600)) $(((elapsed%3600)/60)) $((elapsed%60))
# --- Log to summary ---
{
echo "[$dataset] vitb16-mh-${loss}_${class_weight}"
echo "Tag: H${num_heads}_BR${blend_ratio}"
printf "Elapsed: %02d:%02d:%02d\n" $((elapsed/3600)) $(((elapsed%3600)/60)) $((elapsed%60))
echo ""
} >> "$summary_file"
}
datasets=("glare" "lighting_condition" "pano_status" "platform" "quality" "reflection" "view_direction" "weather")
loss="ce"
config="vit_b16-adamw"
class_weights=("inverse" "uniform")
blend_ratios=("0.2" "0.8")
num_heads_list=(4 8 16)
total_start=$(date +%s)
for dataset in "${datasets[@]}"; do
for class_weight in "${class_weights[@]}"; do
for blend_ratio in "${blend_ratios[@]}"; do
for num_heads in "${num_heads_list[@]}"; do
run_experiment "$dataset" "$loss" "$class_weight" "$config" "$blend_ratio" "$num_heads"
done
done
done
done
total_end=$(date +%s)
total_elapsed=$((total_end - total_start))
total_hours=$((total_elapsed / 3600))
total_minutes=$(((total_elapsed % 3600) / 60))
total_seconds=$((total_elapsed % 60))
{
echo "================================================"
printf "Total elapsed time for all experiments: %02d:%02d:%02d\n" $total_hours $total_minutes $total_seconds
echo "End time: $(date)"
echo "==== END OF TRAINING ===="
} >> "$summary_file"
echo "================================================"
printf "Total elapsed time for all experiments: %02d:%02d:%02d\n" $total_hours $total_minutes $total_seconds
echo "================================================"