-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdeploy.sh
More file actions
88 lines (69 loc) · 2.81 KB
/
deploy.sh
File metadata and controls
88 lines (69 loc) · 2.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#!/bin/bash
set -euo pipefail
########################################
# CONFIG
########################################
if [[ $# -lt 1 ]]; then
echo "ERROR: Missing required argument: ENVIRONMENT"
echo "Usage: $0 <environment> [restart] where <environment> matches the name of your terraform environment so the script can pull details from your terraform output files."
echo "Example: $0 cadc"
echo "Or for full restart: $0 cadc restart"
exit 1
fi
ENVIRONMENT="$1"
# Pass in value "restart" to do a full restart of Airflow services, whick kills running DAGs.
FULL_RESTART_ARG="${2:-false}"
if [[ $FULL_RESTART_ARG == "restart" ]]; then
FULL_RESTART="true"
else
FULL_RESTART="false"
fi
AMI_CONNECT_REPO="California-Data-Collaborative/ami-connect.git"
# If you include a private neptune adapter in your deploy,
# set the AMI_CONNECT_NEPTUNE_REPO_URL environment variable before running this script
# Defaults to empty string if not set.
AMI_CONNECT_NEPTUNE_REPO_URL="${AMI_CONNECT_NEPTUNE_REPO_URL:-}"
TERRAFORM_OUTPUT_FILE="./amideploy/configuration/$ENVIRONMENT-output.json"
REMOTE_USER="ec2-user"
REMOTE_DIR="/home/ec2-user/build"
SSH_KEY="./amideploy/configuration/$ENVIRONMENT-airflow-key.pem"
# Read Terraform outputs
AIRFLOW_HOST=$(jq -r '.airflow_server_ip.value' $TERRAFORM_OUTPUT_FILE)
DB_HOST=$(jq -r '.airflow_db_host.value' $TERRAFORM_OUTPUT_FILE)
DB_PASSWORD=$(jq -r '.airflow_db_password.value' $TERRAFORM_OUTPUT_FILE)
AIRFLOW_SITE_URL=$(jq -r '.airflow_site_url.value' $TERRAFORM_OUTPUT_FILE)
AIRFLOW_DB_CONN="postgresql+psycopg2://airflow_user:$DB_PASSWORD@$DB_HOST/airflow_db"
########################################
# UTILITY FUNCTIONS
########################################
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
run_ssh() {
ssh -i "$SSH_KEY" -o StrictHostKeyChecking=no \
"$REMOTE_USER@$AIRFLOW_HOST" "$1"
}
copy_tree() {
rsync -avz -e "ssh -i $SSH_KEY -o StrictHostKeyChecking=no" \
"$1/" "$REMOTE_USER@$AIRFLOW_HOST:$2/"
}
########################################
# DEPLOYMENT STEPS
########################################
log "===== AMI Connect Airflow Deploy ====="
log "Environment: $ENVIRONMENT"
log "Server: $REMOTE_USER@$AIRFLOW_HOST"
log "Remote directory: $REMOTE_DIR"
log "Ensuring remote directory exists..."
run_ssh "mkdir -p $REMOTE_DIR"
log "Syncing deployment files..."
copy_tree "./amideploy/deploy" "$REMOTE_DIR"
log "Running remote deployment script with FULL_RESTART=$FULL_RESTART..."
run_ssh "cd $REMOTE_DIR && \
AMI_CONNECT__AIRFLOW_METASTORE_CONN='$AIRFLOW_DB_CONN' \
AMI_CONNECT__AIRFLOW_SITE_URL='$AIRFLOW_SITE_URL' \
FULL_RESTART='$FULL_RESTART' \
AMI_CONNECT_REPO='$AMI_CONNECT_REPO' \
AMI_CONNECT_NEPTUNE_REPO_URL='$AMI_CONNECT_NEPTUNE_REPO_URL' \
bash remote-deploy.sh"
log "===== Deployment complete ====="