forked from Rapptz/RoboDanny
-
Notifications
You must be signed in to change notification settings - Fork 0
/
launcher.py
330 lines (256 loc) · 9.51 KB
/
launcher.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
from __future__ import annotations
from typing import TypedDict
import re
import os
import sys
import json
import uuid
import click
import logging
import asyncio
import asyncpg
import discord
import datetime
import contextlib
from bot import RoboDanny
from pathlib import Path
from logging.handlers import RotatingFileHandler
import config
import traceback
try:
import uvloop # type: ignore
except ImportError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
class Revisions(TypedDict):
# The version key represents the current activated version
# So v1 means v1 is active and the next revision should be v2
# In order for this to work the number has to be monotonically increasing
# and have no gaps
version: int
database_uri: str
REVISION_FILE = re.compile(r'(?P<kind>V|U)(?P<version>[0-9]+)__(?P<description>.+).sql')
class Revision:
__slots__ = ('kind', 'version', 'description', 'file')
def __init__(self, *, kind: str, version: int, description: str, file: Path) -> None:
self.kind: str = kind
self.version: int = version
self.description: str = description
self.file: Path = file
@classmethod
def from_match(cls, match: re.Match[str], file: Path):
return cls(
kind=match.group('kind'), version=int(match.group('version')), description=match.group('description'), file=file
)
class Migrations:
def __init__(self, *, filename: str = 'migrations/revisions.json'):
self.filename: str = filename
self.root: Path = Path(filename).parent
self.revisions: dict[int, Revision] = self.get_revisions()
self.load()
def ensure_path(self) -> None:
self.root.mkdir(exist_ok=True)
def load_metadata(self) -> Revisions:
try:
with open(self.filename, 'r', encoding='utf-8') as fp:
return json.load(fp)
except FileNotFoundError:
return {
'version': 0,
'database_uri': discord.utils.MISSING,
}
def get_revisions(self) -> dict[int, Revision]:
result: dict[int, Revision] = {}
for file in self.root.glob('*.sql'):
match = REVISION_FILE.match(file.name)
if match is not None:
rev = Revision.from_match(match, file)
result[rev.version] = rev
return result
def dump(self) -> Revisions:
return {
'version': self.version,
'database_uri': self.database_uri,
}
def load(self) -> None:
self.ensure_path()
data = self.load_metadata()
self.version: int = data['version']
self.database_uri: str = data['database_uri']
def save(self):
temp = f'{self.filename}.{uuid.uuid4()}.tmp'
with open(temp, 'w', encoding='utf-8') as tmp:
json.dump(self.dump(), tmp)
# atomically move the file
os.replace(temp, self.filename)
def is_next_revision_taken(self) -> bool:
return self.version + 1 in self.revisions
@property
def ordered_revisions(self) -> list[Revision]:
return sorted(self.revisions.values(), key=lambda r: r.version)
def create_revision(self, reason: str, *, kind: str = 'V') -> Revision:
cleaned = re.sub(r'\s', '_', reason)
filename = f'{kind}{self.version + 1}__{cleaned}.sql'
path = self.root / filename
stub = (
f'-- Revises: V{self.version}\n'
f'-- Creation Date: {datetime.datetime.utcnow()} UTC\n'
f'-- Reason: {reason}\n\n'
)
with open(path, 'w', encoding='utf-8', newline='\n') as fp:
fp.write(stub)
self.save()
return Revision(kind=kind, description=reason, version=self.version + 1, file=path)
async def upgrade(self, connection: asyncpg.Connection) -> int:
ordered = self.ordered_revisions
successes = 0
async with connection.transaction():
for revision in ordered:
if revision.version > self.version:
sql = revision.file.read_text('utf-8')
await connection.execute(sql)
successes += 1
self.version += successes
self.save()
return successes
def display(self) -> None:
ordered = self.ordered_revisions
for revision in ordered:
if revision.version > self.version:
sql = revision.file.read_text('utf-8')
click.echo(sql)
class RemoveNoise(logging.Filter):
def __init__(self):
super().__init__(name='discord.state')
def filter(self, record: logging.LogRecord) -> bool:
if record.levelname == 'WARNING' and 'referencing an unknown' in record.msg:
return False
return True
@contextlib.contextmanager
def setup_logging():
log = logging.getLogger()
try:
discord.utils.setup_logging()
# __enter__
max_bytes = 32 * 1024 * 1024 # 32 MiB
logging.getLogger('discord').setLevel(logging.INFO)
logging.getLogger('discord.http').setLevel(logging.WARNING)
logging.getLogger('discord.state').addFilter(RemoveNoise())
log.setLevel(logging.INFO)
handler = RotatingFileHandler(filename='rdanny.log', encoding='utf-8', mode='w', maxBytes=max_bytes, backupCount=5)
dt_fmt = '%Y-%m-%d %H:%M:%S'
fmt = logging.Formatter('[{asctime}] [{levelname:<7}] {name}: {message}', dt_fmt, style='{')
handler.setFormatter(fmt)
log.addHandler(handler)
yield
finally:
# __exit__
handlers = log.handlers[:]
for hdlr in handlers:
hdlr.close()
log.removeHandler(hdlr)
async def create_pool() -> asyncpg.Pool:
def _encode_jsonb(value):
return json.dumps(value)
def _decode_jsonb(value):
return json.loads(value)
async def init(con):
await con.set_type_codec(
'jsonb',
schema='pg_catalog',
encoder=_encode_jsonb,
decoder=_decode_jsonb,
format='text',
)
return await asyncpg.create_pool(
config.postgresql,
init=init,
command_timeout=300,
max_size=20,
min_size=20,
) # type: ignore
async def run_bot():
log = logging.getLogger()
try:
pool = await create_pool()
except Exception:
click.echo('Could not set up PostgreSQL. Exiting.', file=sys.stderr)
log.exception('Could not set up PostgreSQL. Exiting.')
return
async with RoboDanny() as bot:
bot.pool = pool
await bot.start()
@click.group(invoke_without_command=True, options_metavar='[options]')
@click.pass_context
def main(ctx):
"""Launches the bot."""
if ctx.invoked_subcommand is None:
with setup_logging():
asyncio.run(run_bot())
@main.group(short_help='database stuff', options_metavar='[options]')
def db():
pass
async def ensure_uri_can_run() -> bool:
connection: asyncpg.Connection = await asyncpg.connect(config.postgresql)
await connection.close()
return True
@db.command()
def init():
"""Initializes the database and runs all the current migrations"""
asyncio.run(ensure_uri_can_run())
migrations = Migrations()
migrations.database_uri = config.postgresql
try:
applied = asyncio.run(run_upgrade(migrations))
except Exception:
traceback.print_exc()
click.secho('failed to initialize and apply migrations due to error', fg='red')
else:
click.secho(f'Successfully initialized and applied {applied} revisions(s)', fg='green')
@db.command()
@click.option('--reason', '-r', help='The reason for this revision.', required=True)
def migrate(reason):
"""Creates a new revision for you to edit."""
migrations = Migrations()
if migrations.is_next_revision_taken():
click.echo('an unapplied migration already exists for the next version, exiting')
click.secho('hint: apply pending migrations with the `upgrade` command', bold=True)
return
revision = migrations.create_revision(reason)
click.echo(f'Created revision V{revision.version!r}')
async def run_upgrade(migrations: Migrations) -> int:
connection: asyncpg.Connection = await asyncpg.connect(migrations.database_uri) # type: ignore
return await migrations.upgrade(connection)
@db.command()
@click.option('--sql', help='Print the SQL instead of executing it', is_flag=True)
def upgrade(sql):
"""Upgrades the database at the given revision (if any)."""
migrations = Migrations()
if sql:
migrations.display()
return
try:
applied = asyncio.run(run_upgrade(migrations))
except Exception:
traceback.print_exc()
click.secho('failed to apply migrations due to error', fg='red')
else:
click.secho(f'Applied {applied} revisions(s)', fg='green')
@db.command()
def current():
"""Shows the current active revision version"""
migrations = Migrations()
click.echo(f'Version {migrations.version}')
@db.command()
@click.option('--reverse', help='Print in reverse order (oldest first).', is_flag=True)
def log(reverse):
"""Displays the revision history"""
migrations = Migrations()
# Revisions is oldest first already
revs = reversed(migrations.ordered_revisions) if not reverse else migrations.ordered_revisions
for rev in revs:
as_yellow = click.style(f'V{rev.version:>03}', fg='yellow')
click.echo(f'{as_yellow} {rev.description.replace("_", " ")}')
if __name__ == '__main__':
main()