-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtest_ormcache_sequence.py
117 lines (96 loc) · 3.76 KB
/
test_ormcache_sequence.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import psycopg2
import timeit
FACTOR = 1
sequences = {
'registry': 50,
'default': 100_000, # NB row of that type
'assets': 100,
'templates': 500,
'routing': 100,
'groups': 1_500,
}
with psycopg2.connect('dbname=master') as con:
with con.cursor() as cr:
print('Drop tables')
# Version 1: one table by type of sequence
for seq in sequences:
cr.execute(
f'''DROP TABLE IF EXISTS signaling_{seq}'''
)
# Version 2: one table for rule them all
cr.execute('''DROP TABLE IF EXISTS signaling''')
for seq in sequences:
cr.execute(f'DROP SEQUENCE IF EXISTS seq_signaling_{seq}')
with psycopg2.connect('dbname=master') as con:
with con.cursor() as cr:
print('Create tables')
# Version 1: one table by type of sequence
for seq in sequences:
cr.execute(
f'''CREATE TABLE signaling_{seq} (
id SERIAL PRIMARY KEY,
create_datetime timestamp NOT NULL DEFAULT NOW()
)'''
)
# Version 2: one table for rule them all
cr.execute(
'''CREATE TABLE signaling (
key VARCHAR NOT NULL,
seq_id INTEGER NOT NULL,
create_datetime timestamp NOT NULL DEFAULT NOW(),
PRIMARY KEY (key, seq_id)
)'''
)
for seq in sequences:
cr.execute(f'CREATE SEQUENCE seq_signaling_{seq}')
# Fill all tables
print('Fill tables')
# Version 1: one table by type of sequence
for seq, nb_row in sequences.items():
cr.execute(f'INSERT INTO "signaling_{seq}" SELECT FROM generate_series(1, %s)', [nb_row * FACTOR])
# Version 2: one table for rule them all
for seq, nb_row in sequences.items():
cr.execute(f'INSERT INTO signaling (key, seq_id) SELECT %s, nextval(%s) FROM generate_series(1, %s) n', [seq, f'seq_signaling_{seq}', nb_row * FACTOR])
con.commit()
with psycopg2.connect('dbname=master') as con:
with con.cursor() as cr:
cr.execute('ANALYSE')
def solution_1(cr):
subqueries = [
f"(SELECT MAX(id) FROM signaling_{seq})"
for seq in sequences
]
cr.execute(f"""
SELECT {','.join(subqueries)}
""")
return dict(zip(sequences, cr.fetchall()))
# loose indexscan doesn't exists in PostgreSQL then don't use group by key + max(seq_id)
# https://wiki.postgresql.org/wiki/Loose_indexscan
def solution_2(cr):
cr.execute(f"""
SELECT key, (SELECT MAX(signaling.seq_id) FROM signaling WHERE signaling.key = keys.key)
FROM (VALUES {','.join(['%s'] * len(sequences))}) AS keys (key)
""", [(key,) for key in sequences])
return dict(cr.fetchall())
print('Launch test')
with psycopg2.connect('dbname=master') as con:
with con.cursor() as cr:
print(solution_1(cr))
print(solution_2(cr))
subqueries = [
f"(SELECT MAX(id) FROM signaling_{seq})"
for seq in sequences
]
print("SOL1:", cr.mogrify(f"SELECT {','.join(subqueries)}").decode())
subqueries = [
f"(SELECT MAX(id) FROM signaling_{seq})"
for seq in sequences
]
print("SOL2:", cr.mogrify(f"""
SELECT key, (SELECT MAX(signaling.seq_id)
FROM signaling WHERE signaling.key = keys.key) FROM (VALUES {','.join(['%s'] * len(sequences))}) AS keys (key)
""", [(key,) for key in sequences]).decode())
print("TIME solution 1:")
print(timeit.timeit('solution_1(cr)', globals={'cr': cr, 'solution_1': solution_1}, number=1000))
print("TIME solution 2:")
print(timeit.timeit('solution_2(cr)', globals={'cr': cr, 'solution_2': solution_2}, number=1000))