-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMakefile
95 lines (75 loc) · 2.48 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# Makefile for managing Airflow Docker services
# Variables
DOCKER_COMPOSE_FILE = docker-compose.yml
DOCKER_COMPOSE_CMD = docker-compose -f $(DOCKER_COMPOSE_FILE)
ENV_FILE = .env
FOLDERS = ./dags ./logs ./plugins ./config
# Targets
.PHONY: all build up down restart logs init
all: up
# Create the necessary folders
create-folders:
mkdir -p $(FOLDERS)
# Initialize the environment file
init-env:
echo "AIRFLOW_UID=$(shell id -u)" > $(ENV_FILE)
echo "POSTGRES_USER=airflow" >> $(ENV_FILE)
echo "POSTGRES_PASSWORD=airflow" >> $(ENV_FILE)
echo "POSTGRES_DB=airflow" >> $(ENV_FILE)
echo "AIRFLOW__CORE__FERNET_KEY=UKMzEm3yIuFYEq1y3-2FxPNWSVwRASpahmQ9kQfEr8E=" >> $(ENV_FILE)
echo "AIRFLOW__CORE__EXECUTOR=CeleryExecutor" >> $(ENV_FILE)
echo "AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION=True" >> $(ENV_FILE)
echo "AIRFLOW__CORE__LOAD_EXAMPLES=False" >> $(ENV_FILE)
echo "AIRFLOW__DATABASE__SQL_ALCHEMY_CONN=postgresql+psycopg2://airflow:airflow@postgres/airflow" >> $(ENV_FILE)
echo "AIRFLOW__DATABASE__LOAD_DEFAULT_CONNECTIONS=False" >> $(ENV_FILE)
echo "LIMTER_STORAGE_URI=redis://redis:6379/0" >> $(ENV_FILE)
echo "_AIRFLOW_DB_MIGRATE=True" >> $(ENV_FILE)
echo "_AIRFLOW_WWW_USER_CREATE=True" >> $(ENV_FILE)
echo "_AIRFLOW_WWW_USER_USERNAME=airflow" >> $(ENV_FILE)
echo "_AIRFLOW_WWW_USER_PASSWORD=airflow" >> $(ENV_FILE)
# Initialize the Airflow database
airflow-init:
docker-compose up -d airflow-init
# Initialize the project
init: create-folders init-env airflow-init
# Build all Docker images
build:
$(DOCKER_COMPOSE_CMD) build
# Start all services
up: build
$(DOCKER_COMPOSE_CMD) up airflow-init
$(DOCKER_COMPOSE_CMD) up -d
# Stop and remove all services
down:
$(DOCKER_COMPOSE_CMD) down
# Restart all services
restart: down up
# Initialize the Airflow database
init:
$(DOCKER_COMPOSE_CMD) run --rm airflow-init
# View logs for all services
logs:
$(DOCKER_COMPOSE_CMD) logs -f
# View logs for a specific service (e.g., airflow-webserver)
logs-%:
$(DOCKER_COMPOSE_CMD) logs -f $*
# Execute a command inside a running container
exec-%:
$(DOCKER_COMPOSE_CMD) exec $* /bin/bash
# Run tests or any other commands inside a container
test-%:
$(DOCKER_COMPOSE_CMD) exec $* pytest
# Clean up Docker volumes and networks
clean:
docker system prune -f
# Sanitize Docker images: remove dangling and unused images
sanitize:
docker image prune -a -f
docker system prune -f
docker volume prune -f
# List all containers
ps:
docker ps -a
# Show Docker Compose version
version:
docker-compose --version