diff --git a/migrations/dimensions-config/1fae088c80b6.py b/migrations/dimensions-config/1fae088c80b6.py index b164414..ce90131 100644 --- a/migrations/dimensions-config/1fae088c80b6.py +++ b/migrations/dimensions-config/1fae088c80b6.py @@ -53,6 +53,7 @@ def _lock_exposure_table(ctx: _Context) -> None: # Sqlite does not support table locks return + _LOG.info("Locking exposure table") schema = "" if ctx.schema: schema = f"{ctx.schema}." @@ -61,6 +62,7 @@ def _lock_exposure_table(ctx: _Context) -> None: def _migrate_groups(ctx: _Context) -> None: # Create group table + _LOG.info("Creating group table") check_constraints = [] if ctx.is_sqlite: check_constraints = [_make_string_check_constraint("instrument", 32, "group_len_instrument")] @@ -78,6 +80,7 @@ def _migrate_groups(ctx: _Context) -> None: ) # Populate group table based on the data in the exposure table. + _LOG.info("Populating group table") exposure_table = ctx.get_table("exposure") select = sa.select( exposure_table.columns["instrument"], @@ -94,6 +97,7 @@ def _migrate_groups(ctx: _Context) -> None: ) # Update the exposure table to reference the group table. + _LOG.info("Updating exposure table to reference group table") with op.batch_alter_table("exposure", schema=ctx.schema) as batch_op: batch_op.alter_column("group_name", new_column_name="group", nullable=False) batch_op.drop_column("group_id") @@ -118,9 +122,11 @@ def _migrate_day_obs(ctx: _Context) -> None: # Before doing anything else, generate the rows for the new day_obs table # from the data in the exposure table. This is prone to failure due to the # need to import instrument classes. + _LOG.info("Generating data for day_obs table from exposure_table") day_obs_rows = list(_generate_day_obs_rows(ctx)) # Create day_obs table + _LOG.info("Creating day_obs table") timespan_columns: list[sa.Column] if ctx.is_postgres: # Postgres uses a non-standard range datatype for representing timespans. @@ -150,9 +156,11 @@ def _migrate_day_obs(ctx: _Context) -> None: ) # Populate the day_obs table based on the data in the exposure table. + _LOG.info("Populating day_obs table") op.bulk_insert(table, day_obs_rows) # Update exposure table to reference day_obs table + _LOG.info("Updating exposure table to reference day_obs table") with op.batch_alter_table("exposure", schema=ctx.schema) as batch_op: batch_op.alter_column("day_obs", nullable=False) batch_op.create_foreign_key( @@ -165,6 +173,7 @@ def _migrate_day_obs(ctx: _Context) -> None: def _migrate_dimensions_json(ctx: _Context) -> None: + _LOG.info("Updating dimensions.json in ButlerAttributes") attributes = ButlerAttributes(ctx.bind, ctx.schema) attributes.update("config:dimensions.json", _dimensions_json_universe_6) @@ -255,8 +264,9 @@ def get_instrument(self, name: str) -> _Instrument: rows = self._ctx.bind.execute( sa.select(instrument_table.c.class_name).where(instrument_table.c.name == name) ).all() - assert len(rows) == 1 + assert len(rows) == 1, f"Should be exactly one class name for instrument {name}" class_name = rows[0][0] + _LOG.info(f"Loading instrument definition {name} from class {class_name}") instrument = doImportType(class_name)() self._instruments[name] = instrument return instrument