-
Notifications
You must be signed in to change notification settings - Fork 0
/
airflow_extract.py
95 lines (83 loc) · 2.48 KB
/
airflow_extract.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import csv
import boto3
import configparser
import psycopg2
# get db Redshift connection info
parser = configparser.ConfigParser(interpolation=None)
parser.read("pipeline.conf")
dbname = parser.get("aws_creds", "database")
user = parser.get("aws_creds", "user")
password = parser.get("aws_creds", "password")
host = parser.get("aws_creds", "host")
port = parser.get("aws_creds", "port")
# connect to the redshift cluster
rs_conn = psycopg2.connect(
dbname=dbname,
user=user,
password=password,
host=host,
port=port
)
rs_sql = """SELECT COALESCE(MAX(id),-1)
FROM dag_run_history;"""
rs_cursor = rs_conn.cursor()
rs_cursor.execute(rs_sql)
result = rs_cursor.fetchone()
# there's only one row and column returned
last_id = result[0]
rs_cursor.close()
rs_conn.commit()
# connect to the airflow db
parser = configparser.ConfigParser()
parser.read("pipeline.conf")
dbname = parser.get("airflowdb_config", "database")
user = parser.get("airflowdb_config", "username")
password = parser.get("airflowdb_config", "password")
host = parser.get("airflowdb_config", "host")
port = parser.get("airflowdb_config", "port")
conn = psycopg2.connect(
"dbname=" + dbname
+ " user=" + user
+ " password=" + password
+ " host=" + host
+ " port=" + port)
# get any new DAG runs. ignore running DAGs
m_query = """SELECT
id,
dag_id,
execution_date,
state,
run_id,
external_trigger,
end_date,
start_date
FROM dag_run
WHERE id > %s
AND state <> \'running\';
"""
m_cursor = conn.cursor()
m_cursor.execute(m_query, (last_id,))
results = m_cursor.fetchall()
local_filename = "dag_run_extract.csv"
with open(local_filename, 'w') as fp:
csv_w = csv.writer(fp, delimiter='|')
csv_w.writerows(results)
fp.close()
m_cursor.close()
conn.close()
# load the aws_boto_credentials values
parser = configparser.ConfigParser()
parser.read("pipeline.conf")
access_key = parser.get("aws_boto_credentials",
"access_key")
secret_key = parser.get("aws_boto_credentials",
"secret_key")
bucket_name = parser.get("aws_boto_credentials",
"bucket_name")
# upload the local CSV to the S3 bucket
s3 = boto3.client(
's3',
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
s3_file = local_filename
s3.upload_file(local_filename, bucket_name, s3_file)