-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.env.template
60 lines (55 loc) · 2.9 KB
/
.env.template
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
# _____ _ ______ _ ___ _ __ _
# |_ _| | | | ___ \ | / _ \(_) / _| |
# | | ___ ___| |__ | |_/ / | ___ ___ / /_\ \_ _ __| |_| | _____ __
# | |/ _ \/ __| '_ \ | ___ \ |/ _ \ / __| | _ | | '__| _| |/ _ \ \ /\ / /
# | | __/ (__| | | | | |_/ / | (_) | (__ | | | | | | | | | | (_) \ V V /
# \_/\___|\___|_| |_| \____/|_|\___/ \___| \_| |_/_|_| |_| |_|\___/ \_/\_/
########################################################################################
# Airflow Settings
########################################################################################
# Navbar color
AIRFLOW__WEBSERVER__NAVBAR_COLOR="#FFF"
# Disabled by default to make development easier
# (enabled on prod for security)
AIRFLOW__CORE__HIDE_SENSITIVE_VAR_CONN_FIELDS=False
# Use the following python code to generate a fernet key for production
# python -c "import base64, os; print(base64.urlsafe_b64encode(os.urandom(32)).decode())"
# AIRFLOW__CORE__FERNET_KEY=
# CSRF key https://airflow.apache.org/docs/apache-airflow/stable/configurations-ref.html#secret-key
AIRFLOW__WEBSERVER__SECRET_KEY=sample-secret-key=
# Executor to use
AIRFLOW__CORE__EXECUTOR=LocalExecutor
# Environment this instance is being run in
AIRFLOW_VAR_ENVIRONMENT=dev
# Logging settings
AIRFLOW__LOGGING__REMOTE_LOGGING=True
AIRFLOW__LOGGING__REMOTE_LOG_CONN_ID=aws_default
AIRFLOW__LOGGING__REMOTE_BASE_LOG_FOLDER=s3://techbloc-airflow-logs
########################################################################################
# Connection/Variable info
########################################################################################
# Airflow primary metadata database
# Change the following line in prod to use the appropriate DB
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@postgres:5432/airflow
# Remote logging connection ID
# Replace "access_key" and "secret+key" with the real values. Secret key must be URL-encoded
AIRFLOW_CONN_AWS_DEFAULT=aws://test_key:test_secret@?region_name=us-east-1&endpoint_url=http%3A%2F%2Fs3%3A5000
# SSH connections
AIRFLOW_CONN_SSH_MONOLITH=ssh://user@service
# HTTP connections
AIRFLOW_CONN_MATRIX_WEBHOOK=https://matrix-webhook
AIRFLOW_VAR_MATRIX_WEBHOOK_API_KEY=api_key
S3_LOCAL_ENDPOINT=http://s3:5000
AWS_CONN_ID=aws_default
########################################################################################
# Other config
########################################################################################
# Version of the catalog docker image to use. Defaults to `latest` and is not used for
# local development (since the image is always built locally). See available tags at
# https://ghcr.io/orcacollective/techbloc-airflow
DOCKER_IMAGE_TAG=latest
# External port airflow will be mounted to
AIRFLOW_PORT=9090
# AWS/S3 configuration - does not need to be changed for development
AWS_ACCESS_KEY=test_key
AWS_SECRET_KEY=test_secret