-
Notifications
You must be signed in to change notification settings - Fork 14
/
docker-entrypoint.sh
112 lines (99 loc) · 3.66 KB
/
docker-entrypoint.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
#!/usr/bin/env bash
# Original file:
# https://github.com/puckel/docker-airflow/blob/master/script/entrypoint.sh
# Copyright 2019 Slalom Build
# Modifications:
# - Added snowflake_default conneciton
TRY_LOOP="20"
: "${REDIS_HOST:="redis"}"
: "${REDIS_PORT:="6379"}"
: "${REDIS_PASSWORD:=""}"
: "${POSTGRES_HOST:="postgres"}"
: "${POSTGRES_PORT:="5432"}"
: "${POSTGRES_USER:="airflow"}"
: "${POSTGRES_PASSWORD:="airflow"}"
: "${POSTGRES_DB:="airflow"}"
: "${SNOWFLAKE_CONN_ID:="snowflake_default"}"
: "${SNOWFLAKE_HOST:="snowflakecomputing.com"}"
: "${SNOWFLAKE_USER:="snowflake_user"}"
: "${SNOWFLAKE_SCHEMA:="public"}"
: "${SNOWFLAKE_ACCOUNT:="ACCOUNT_NOT_SET"}"
: "${SNOWFLAKE_PASSWORD:="PASSWORD_NOT_SET"}"
: "${SNOWFLAKE_REGION:="us-west-2"}"
: "${SNOWFLAKE_ROLE:="etl"}"
# Defaults and back-compat
: "${AIRFLOW__CORE__FERNET_KEY:=${FERNET_KEY:=$(python -c "from cryptography.fernet import Fernet; FERNET_KEY = Fernet.generate_key().decode(); print(FERNET_KEY)")}}"
: "${AIRFLOW__CORE__SECRET_KEY:=${SECRET_KEY:=$(python -c "from cryptography.fernet import Fernet; SECRET_KEY = Fernet.generate_key().decode(); print(SECRET_KEY)")}}"
: "${AIRFLOW__CORE__EXECUTOR:=${EXECUTOR:-Sequential}Executor}"
export \
AIRFLOW__CELERY__BROKER_URL \
AIRFLOW__CELERY__RESULT_BACKEND \
AIRFLOW__CORE__EXECUTOR \
AIRFLOW__CORE__FERNET_KEY \
AIRFLOW__CORE__LOAD_EXAMPLES \
AIRFLOW__CORE__SQL_ALCHEMY_CONN \
# Load DAGs exemples (default: Yes)
if [[ -z "$AIRFLOW__CORE__LOAD_EXAMPLES" && "${LOAD_EX:=n}" == n ]]
then
AIRFLOW__CORE__LOAD_EXAMPLES=False
fi
# Install custom python package if requirements.txt is present
if [ -e "/requirements.txt" ]; then
$(which pip) install --user -r /requirements.txt
fi
if [ -n "$REDIS_PASSWORD" ]; then
REDIS_PREFIX=:${REDIS_PASSWORD}@
else
REDIS_PREFIX=
fi
wait_for_port() {
local name="$1" host="$2" port="$3"
local j=0
while ! nc -z "$host" "$port" >/dev/null 2>&1 < /dev/null; do
j=$((j+1))
if [ $j -ge $TRY_LOOP ]; then
echo >&2 "$(date) - $host:$port still not reachable, giving up"
exit 1
fi
echo "$(date) - waiting for $name... $j/$TRY_LOOP"
sleep 5
done
}
if [ "$AIRFLOW__CORE__EXECUTOR" != "SequentialExecutor" ]; then
AIRFLOW__CORE__SQL_ALCHEMY_CONN="postgresql+psycopg2://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB"
AIRFLOW__CELERY__RESULT_BACKEND="db+postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB"
wait_for_port "Postgres" "$POSTGRES_HOST" "$POSTGRES_PORT"
fi
if [ "$AIRFLOW__CORE__EXECUTOR" = "CeleryExecutor" ]; then
AIRFLOW__CELERY__BROKER_URL="redis://$REDIS_PREFIX$REDIS_HOST:$REDIS_PORT/1"
wait_for_port "Redis" "$REDIS_HOST" "$REDIS_PORT"
fi
case "$1" in
webserver)
airflow initdb
airflow connections -a --conn_id $SNOWFLAKE_CONN_ID --conn_type Snowflake --conn_host $SNOWFLAKE_HOST \
--conn_schema $SNOWFLAKE_SCHEMA --conn_login $SNOWFLAKE_USER --conn_password $SNOWFLAKE_PASSWORD \
--conn_extra '{"account": "'$SNOWFLAKE_ACCOUNT'", "region": "'$SNOWFLAKE_REGION'", "role": "'$SNOWFLAKE_ROLE'"}'
if [ "$AIRFLOW__CORE__EXECUTOR" = "LocalExecutor" ] || [ "$AIRFLOW__CORE__EXECUTOR" = "SequentialExecutor" ]; then
# With the "Local" and "Sequential" executors it should all run in one container.
airflow scheduler &
fi
exec airflow webserver
;;
worker|scheduler)
# To give the webserver time to run initdb.
sleep 10
exec airflow "$@"
;;
flower)
sleep 10
exec airflow "$@"
;;
version)
exec airflow "$@"
;;
*)
# The command is something like bash, not an airflow subcommand. Just run it in the right environment.
exec "$@"
;;
esac