From 4bb2e2f8aff8d0168f1f4788dcd3b8481274ab8f Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Wed, 12 Jun 2024 13:04:42 -0700 Subject: [PATCH] chore: enable ruff lint rule TRY201 and B904 to improve `raise` stack traces (#29166) --- pyproject.toml | 9 +++++- superset/app.py | 4 +-- superset/charts/data/api.py | 5 +-- .../commands/chart/importers/dispatcher.py | 8 ++--- .../dashboard/importers/dispatcher.py | 8 ++--- superset/commands/database/create.py | 4 +-- .../commands/database/importers/dispatcher.py | 8 ++--- .../commands/database/ssh_tunnel/create.py | 5 +-- superset/commands/database/tables.py | 4 +-- superset/commands/database/test_connection.py | 4 +-- superset/commands/database/update.py | 4 +-- .../commands/dataset/importers/dispatcher.py | 8 ++--- superset/commands/importers/v1/__init__.py | 4 +-- superset/commands/importers/v1/utils.py | 2 +- .../commands/query/importers/dispatcher.py | 4 +-- superset/commands/report/execute.py | 8 ++--- superset/commands/security/create.py | 2 +- superset/commands/security/update.py | 2 +- superset/commands/sql_lab/execute.py | 8 ++--- superset/common/query_object.py | 2 +- superset/connectors/sqla/models.py | 4 +-- superset/databases/utils.py | 4 +-- superset/db_engine_specs/base.py | 4 +-- superset/db_engine_specs/impala.py | 2 +- superset/db_engine_specs/ocient.py | 4 +-- superset/db_engine_specs/snowflake.py | 2 +- superset/db_engine_specs/trino.py | 2 +- ..._migrate_num_period_compare_and_period_.py | 4 +-- ...ff221_migrate_filter_sets_to_new_format.py | 8 ++--- ...95_migrate_native_filters_to_new_schema.py | 8 ++--- ...migrate_pivot_table_v2_heatmaps_to_new_.py | 4 +-- ...1_rename_big_viz_total_form_data_fields.py | 8 ++--- ...move_pivot_table_v2_legacy_order_by_to_.py | 8 ++--- ...ea966691069_cross_filter_global_scoping.py | 8 ++--- ...8_migrate_can_view_and_drill_permission.py | 2 +- ...43_5ad7321c2169_mig_new_csv_upload_perm.py | 2 +- ..._d60591c5515f_mig_new_excel_upload_perm.py | 2 +- ...33124c18ad_mig_new_columnar_upload_perm.py | 2 +- ..._update_charts_with_old_time_comparison.py | 4 +-- superset/models/core.py | 10 +++--- superset/models/helpers.py | 4 +-- superset/sql_lab.py | 4 +-- superset/sql_validators/presto_db.py | 2 +- superset/sqllab/sql_json_executer.py | 4 +-- superset/tasks/async_queries.py | 13 ++++---- superset/utils/decorators.py | 7 ++-- superset/utils/json.py | 6 ++-- superset/utils/webdriver.py | 24 +++++++------- superset/views/base_api.py | 2 +- .../dashboards/superset_factory_util.py | 32 +++++++++---------- 50 files changed, 152 insertions(+), 141 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2e20fae77b777..84736580cf1cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -435,7 +435,14 @@ target-version = "py310" # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or # McCabe complexity (`C901`) by default. -select = ["E4", "E7", "E9", "F"] +select = [ + "B904", + "E4", + "E7", + "E9", + "F", + "TRY201", +] ignore = [] extend-select = ["I"] diff --git a/superset/app.py b/superset/app.py index a2192b8966d5e..a3d02648e1837 100644 --- a/superset/app.py +++ b/superset/app.py @@ -42,9 +42,9 @@ def create_app(superset_config_module: Optional[str] = None) -> Flask: return app # Make sure that bootstrap errors ALWAYS get logged - except Exception as ex: + except Exception: logger.exception("Failed to create app") - raise ex + raise class SupersetApp(Flask): diff --git a/superset/charts/data/api.py b/superset/charts/data/api.py index 932a666738757..963edc2b3c6df 100644 --- a/superset/charts/data/api.py +++ b/superset/charts/data/api.py @@ -446,5 +446,6 @@ def _create_query_context_from_form( return ChartDataQueryContextSchema().load(form_data) except KeyError as ex: raise ValidationError("Request is incorrect") from ex - except ValidationError as error: - raise error + except ValidationError: # pylint: disable=try-except-raise + # Make sure to bubble this up + raise diff --git a/superset/commands/chart/importers/dispatcher.py b/superset/commands/chart/importers/dispatcher.py index 6d2d31ccf4d77..227dcaa81f481 100644 --- a/superset/commands/chart/importers/dispatcher.py +++ b/superset/commands/chart/importers/dispatcher.py @@ -55,14 +55,14 @@ def run(self) -> None: return except IncorrectVersionError: logger.debug("File not handled by command, skipping") - except (CommandInvalidError, ValidationError) as exc: + except (CommandInvalidError, ValidationError): # found right version, but file is invalid logger.info("Command failed validation") - raise exc - except Exception as exc: + raise + except Exception: # validation succeeded but something went wrong logger.exception("Error running import command") - raise exc + raise raise CommandInvalidError("Could not find a valid command to import file") diff --git a/superset/commands/dashboard/importers/dispatcher.py b/superset/commands/dashboard/importers/dispatcher.py index 061558cce95fe..250815295cad0 100644 --- a/superset/commands/dashboard/importers/dispatcher.py +++ b/superset/commands/dashboard/importers/dispatcher.py @@ -58,14 +58,14 @@ def run(self) -> None: return except IncorrectVersionError: logger.debug("File not handled by command, skipping") - except (CommandInvalidError, ValidationError) as exc: + except (CommandInvalidError, ValidationError): # found right version, but file is invalid logger.info("Command failed validation") - raise exc - except Exception as exc: + raise + except Exception: # validation succeeded but something went wrong logger.exception("Error running import command") - raise exc + raise raise CommandInvalidError("Could not find a valid command to import file") diff --git a/superset/commands/database/create.py b/superset/commands/database/create.py index b45107ca85cc2..811c2b205eb94 100644 --- a/superset/commands/database/create.py +++ b/superset/commands/database/create.py @@ -68,7 +68,7 @@ def run(self) -> Model: engine=self._properties.get("sqlalchemy_uri", "").split(":")[0], ) # So we can show the original message - raise ex + raise except Exception as ex: event_logger.log_with_context( action=f"db_creation_failed.{ex.__class__.__name__}", @@ -141,7 +141,7 @@ def run(self) -> Model: engine=self._properties.get("sqlalchemy_uri", "").split(":")[0], ) # So we can show the original message - raise ex + raise except ( DAOCreateFailedError, DatabaseInvalidError, diff --git a/superset/commands/database/importers/dispatcher.py b/superset/commands/database/importers/dispatcher.py index bdf487a75893f..beb24df2d10b1 100644 --- a/superset/commands/database/importers/dispatcher.py +++ b/superset/commands/database/importers/dispatcher.py @@ -53,14 +53,14 @@ def run(self) -> None: return except IncorrectVersionError: logger.debug("File not handled by command, skipping") - except (CommandInvalidError, ValidationError) as exc: + except (CommandInvalidError, ValidationError): # found right version, but file is invalid logger.info("Command failed validation") - raise exc - except Exception as exc: + raise + except Exception: # validation succeeded but something went wrong logger.exception("Error running import command") - raise exc + raise raise CommandInvalidError("Could not find a valid command to import file") diff --git a/superset/commands/database/ssh_tunnel/create.py b/superset/commands/database/ssh_tunnel/create.py index 287accc5aa56f..ea38aa52bfa4f 100644 --- a/superset/commands/database/ssh_tunnel/create.py +++ b/superset/commands/database/ssh_tunnel/create.py @@ -51,8 +51,9 @@ def run(self) -> Model: return ssh_tunnel except DAOCreateFailedError as ex: raise SSHTunnelCreateFailedError() from ex - except SSHTunnelInvalidError as ex: - raise ex + except SSHTunnelInvalidError: # pylint: disable=try-except-raise + # Make sure to bubble this up + raise def validate(self) -> None: # TODO(hughhh): check to make sure the server port is not localhost diff --git a/superset/commands/database/tables.py b/superset/commands/database/tables.py index b16fcfc504efb..80f174889846a 100644 --- a/superset/commands/database/tables.py +++ b/superset/commands/database/tables.py @@ -127,8 +127,8 @@ def run(self) -> dict[str, Any]: payload = {"count": len(tables) + len(views), "result": options} return payload - except SupersetException as ex: - raise ex + except SupersetException: + raise except Exception as ex: raise DatabaseTablesUnexpectedError(ex) from ex diff --git a/superset/commands/database/test_connection.py b/superset/commands/database/test_connection.py index 6bf69bbb8741d..7c38ab68a3aeb 100644 --- a/superset/commands/database/test_connection.py +++ b/superset/commands/database/test_connection.py @@ -212,7 +212,7 @@ def ping(engine: Engine) -> bool: engine=database.db_engine_spec.__name__, ) # bubble up the exception to return a 408 - raise ex + raise except SSHTunnelingNotEnabledError as ex: event_logger.log_with_context( action=get_log_connection_action( @@ -221,7 +221,7 @@ def ping(engine: Engine) -> bool: engine=database.db_engine_spec.__name__, ) # bubble up the exception to return a 400 - raise ex + raise except Exception as ex: event_logger.log_with_context( action=get_log_connection_action( diff --git a/superset/commands/database/update.py b/superset/commands/database/update.py index c59984238cfe4..61b0d51ed826d 100644 --- a/superset/commands/database/update.py +++ b/superset/commands/database/update.py @@ -85,9 +85,9 @@ def run(self) -> Model: database.set_sqlalchemy_uri(database.sqlalchemy_uri) ssh_tunnel = self._handle_ssh_tunnel(database) self._refresh_catalogs(database, original_database_name, ssh_tunnel) - except SSHTunnelError as ex: + except SSHTunnelError: # pylint: disable=try-except-raise # allow exception to bubble for debugbing information - raise ex + raise except (DAOUpdateFailedError, DAOCreateFailedError) as ex: raise DatabaseUpdateFailedError() from ex diff --git a/superset/commands/dataset/importers/dispatcher.py b/superset/commands/dataset/importers/dispatcher.py index 9138d4f971cb8..2d711506725e4 100644 --- a/superset/commands/dataset/importers/dispatcher.py +++ b/superset/commands/dataset/importers/dispatcher.py @@ -58,14 +58,14 @@ def run(self) -> None: return except IncorrectVersionError: logger.debug("File not handled by command, skipping") - except (CommandInvalidError, ValidationError) as exc: + except (CommandInvalidError, ValidationError): # found right version, but file is invalid logger.info("Command failed validation") - raise exc - except Exception as exc: + raise + except Exception: # validation succeeded but something went wrong logger.exception("Error running import command") - raise exc + raise raise CommandInvalidError("Could not find a valid command to import file") diff --git a/superset/commands/importers/v1/__init__.py b/superset/commands/importers/v1/__init__.py index 6c86faabeb546..25b8b8790f046 100644 --- a/superset/commands/importers/v1/__init__.py +++ b/superset/commands/importers/v1/__init__.py @@ -74,9 +74,9 @@ def run(self) -> None: try: self._import(self._configs, self.overwrite) db.session.commit() - except CommandException as ex: + except CommandException: db.session.rollback() - raise ex + raise except Exception as ex: db.session.rollback() raise self.import_error() from ex diff --git a/superset/commands/importers/v1/utils.py b/superset/commands/importers/v1/utils.py index 912a4d1be5a77..51ab99271c82c 100644 --- a/superset/commands/importers/v1/utils.py +++ b/superset/commands/importers/v1/utils.py @@ -75,7 +75,7 @@ def load_metadata(contents: dict[str, str]) -> dict[str, str]: # otherwise we raise the validation error ex.messages = {METADATA_FILE_NAME: ex.messages} - raise ex + raise return metadata diff --git a/superset/commands/query/importers/dispatcher.py b/superset/commands/query/importers/dispatcher.py index 438ea8351f722..c89a6963b13bb 100644 --- a/superset/commands/query/importers/dispatcher.py +++ b/superset/commands/query/importers/dispatcher.py @@ -55,10 +55,10 @@ def run(self) -> None: return except IncorrectVersionError: logger.debug("File not handled by command, skipping") - except (CommandInvalidError, ValidationError) as exc: + except (CommandInvalidError, ValidationError): # found right version, but file is invalid logger.exception("Error running import command") - raise exc + raise raise CommandInvalidError("Could not find a valid command to import file") diff --git a/superset/commands/report/execute.py b/superset/commands/report/execute.py index 1540fa70d8096..637898a7a0a50 100644 --- a/superset/commands/report/execute.py +++ b/superset/commands/report/execute.py @@ -599,7 +599,7 @@ def next(self) -> None: self.update_report_schedule_and_log( ReportState.ERROR, error_message=second_error_message ) - raise first_ex + raise class ReportWorkingState(BaseReportState): @@ -662,7 +662,7 @@ def next(self) -> None: ReportState.ERROR, error_message=REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER, ) - raise ex + raise try: self.send() @@ -737,8 +737,8 @@ def run(self) -> None: ReportScheduleStateMachine( self._execution_id, self._model, self._scheduled_dttm ).run() - except CommandException as ex: - raise ex + except CommandException: + raise except Exception as ex: raise ReportScheduleUnexpectedError(str(ex)) from ex diff --git a/superset/commands/security/create.py b/superset/commands/security/create.py index 618c7331581e4..d70bbb7111a87 100644 --- a/superset/commands/security/create.py +++ b/superset/commands/security/create.py @@ -42,7 +42,7 @@ def run(self) -> Any: return RLSDAO.create(attributes=self._properties) except DAOCreateFailedError as ex: logger.exception(ex.exception) - raise ex + raise def validate(self) -> None: roles = populate_roles(self._roles) diff --git a/superset/commands/security/update.py b/superset/commands/security/update.py index f3a6cea607bd7..54d7a66a2a238 100644 --- a/superset/commands/security/update.py +++ b/superset/commands/security/update.py @@ -47,7 +47,7 @@ def run(self) -> Any: rule = RLSDAO.update(self._model, self._properties) except DAOUpdateFailedError as ex: logger.exception(ex.exception) - raise ex + raise return rule diff --git a/superset/commands/sql_lab/execute.py b/superset/commands/sql_lab/execute.py index 533264fb28a4b..911424af51ce4 100644 --- a/superset/commands/sql_lab/execute.py +++ b/superset/commands/sql_lab/execute.py @@ -115,10 +115,10 @@ def run( # pylint: disable=too-many-statements,useless-suppression "status": status, "payload": self._execution_context_convertor.serialize_payload(), } - except (SupersetErrorException, SupersetErrorsException) as ex: + except (SupersetErrorException, SupersetErrorsException): # to make sure we raising the original # SupersetErrorsException || SupersetErrorsException - raise ex + raise except Exception as ex: raise SqlLabException(self._execution_context, exception=ex) from ex @@ -158,9 +158,9 @@ def _run_sql_json_exec_from_scratch(self) -> SqlJsonExecutionStatus: return self._sql_json_executor.execute( self._execution_context, rendered_query, self._log_params ) - except Exception as ex: + except Exception: self._query_dao.update(query, {"status": QueryStatus.FAILED}) - raise ex + raise def _get_the_query_db(self) -> Database: mydb: Any = self._database_dao.find_by_id(self._execution_context.database_id) diff --git a/superset/common/query_object.py b/superset/common/query_object.py index 209e6f0029d68..299947d27756a 100644 --- a/superset/common/query_object.py +++ b/superset/common/query_object.py @@ -276,7 +276,7 @@ def validate( return None except QueryObjectValidationError as ex: if raise_exceptions: - raise ex + raise return ex def _validate_no_have_duplicate_labels(self) -> None: diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 587a184e17904..11ad95bb4425d 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1727,11 +1727,11 @@ def assign_column_label(df: pd.DataFrame) -> pd.DataFrame | None: self.schema or None, mutator=assign_column_label, ) - except (SupersetErrorException, SupersetErrorsException) as ex: + except (SupersetErrorException, SupersetErrorsException): # SupersetError(s) exception should not be captured; instead, they should # bubble up to the Flask error handler so they are returned as proper SIP-40 # errors. This is particularly important for database OAuth2, see SIP-85. - raise ex + raise except Exception as ex: # pylint: disable=broad-except # TODO (betodealmeida): review exception handling while querying the external # database. Ideally we'd expect and handle external database error, but diff --git a/superset/databases/utils.py b/superset/databases/utils.py index dfd75eb2233f4..526cf8020246a 100644 --- a/superset/databases/utils.py +++ b/superset/databases/utils.py @@ -123,8 +123,8 @@ def make_url_safe(raw_url: str | URL) -> URL: url = raw_url.strip() try: return make_url(url) # noqa - except Exception: - raise DatabaseInvalidError() # pylint: disable=raise-missing-from + except Exception as ex: + raise DatabaseInvalidError() from ex else: return raw_url diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index cd37e4e60218e..a9a5cf76556e3 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -2001,7 +2001,7 @@ def get_extra_params(database: Database) -> dict[str, Any]: extra = json.loads(database.extra) except json.JSONDecodeError as ex: logger.error(ex, exc_info=True) - raise ex + raise return extra @staticmethod @@ -2022,7 +2022,7 @@ def update_params_from_encrypted_extra( # pylint: disable=invalid-name params.update(encrypted_extra) except json.JSONDecodeError as ex: logger.error(ex, exc_info=True) - raise ex + raise @classmethod def is_readonly_query(cls, parsed_query: ParsedQuery) -> bool: diff --git a/superset/db_engine_specs/impala.py b/superset/db_engine_specs/impala.py index d7d1862aafd96..62360e77bbd10 100644 --- a/superset/db_engine_specs/impala.py +++ b/superset/db_engine_specs/impala.py @@ -104,7 +104,7 @@ def execute( try: cursor.execute_async(query) except Exception as ex: - raise cls.get_dbapi_mapped_exception(ex) + raise cls.get_dbapi_mapped_exception(ex) from ex @classmethod def handle_cursor(cls, cursor: Any, query: Query) -> None: diff --git a/superset/db_engine_specs/ocient.py b/superset/db_engine_specs/ocient.py index 02d19add82429..e5826097ef2e7 100644 --- a/superset/db_engine_specs/ocient.py +++ b/superset/db_engine_specs/ocient.py @@ -315,12 +315,12 @@ def fetch_data( ) -> list[tuple[Any, ...]]: try: rows: list[tuple[Any, ...]] = super().fetch_data(cursor, limit) - except Exception as exception: + except Exception: with OcientEngineSpec.query_id_mapping_lock: del OcientEngineSpec.query_id_mapping[ getattr(cursor, "superset_query_id") ] - raise exception + raise # TODO: Unsure if we need to verify that we are receiving rows: if len(rows) > 0 and type(rows[0]).__name__ == "Row": diff --git a/superset/db_engine_specs/snowflake.py b/superset/db_engine_specs/snowflake.py index 3d394385ee376..72116cfc321bd 100644 --- a/superset/db_engine_specs/snowflake.py +++ b/superset/db_engine_specs/snowflake.py @@ -368,7 +368,7 @@ def update_params_from_encrypted_extra( encrypted_extra = json.loads(database.encrypted_extra) except json.JSONDecodeError as ex: logger.error(ex, exc_info=True) - raise ex + raise auth_method = encrypted_extra.get("auth_method", None) auth_params = encrypted_extra.get("auth_params", {}) if not auth_method: diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index 600f236b487c9..350337c6b2cc7 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -342,7 +342,7 @@ def update_params_from_encrypted_extra( connect_args["auth"] = trino_auth(**auth_params) except json.JSONDecodeError as ex: logger.error(ex, exc_info=True) - raise ex + raise @classmethod def get_dbapi_exception_mapping(cls) -> dict[type[Exception], type[Exception]]: diff --git a/superset/migrations/versions/2018-07-05_15-19_3dda56f1c4c6_migrate_num_period_compare_and_period_.py b/superset/migrations/versions/2018-07-05_15-19_3dda56f1c4c6_migrate_num_period_compare_and_period_.py index 685c45c631319..18547f3cf3091 100644 --- a/superset/migrations/versions/2018-07-05_15-19_3dda56f1c4c6_migrate_num_period_compare_and_period_.py +++ b/superset/migrations/versions/2018-07-05_15-19_3dda56f1c4c6_migrate_num_period_compare_and_period_.py @@ -118,13 +118,13 @@ def compute_time_compare(granularity, periods): try: obj = isodate.parse_duration(granularity) * periods - except isodate.isoerror.ISO8601Error: + except isodate.isoerror.ISO8601Error as ex: # if parse_human_timedelta can parse it, return it directly delta = f"{periods} {granularity}{'s' if periods > 1 else ''}" obj = parse_human_timedelta(delta) if obj: return delta - raise Exception(f"Unable to parse: {granularity}") + raise Exception(f"Unable to parse: {granularity}") from ex if isinstance(obj, isodate.duration.Duration): return isodate_duration_to_string(obj) diff --git a/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py b/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py index a7f85e510680c..cb1aaa26da996 100644 --- a/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py +++ b/superset/migrations/versions/2021-04-12_12-38_fc3a3a8ff221_migrate_filter_sets_to_new_format.py @@ -197,9 +197,9 @@ def upgrade(): dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True) - except Exception as e: + except Exception: print(f"Parsing json_metadata for dashboard {dashboard.id} failed.") - raise e + raise session.commit() session.close() @@ -225,9 +225,9 @@ def downgrade(): changed_filter_sets += 1 changed_filters += downgrade_filter_set(filter_set) dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True) - except Exception as e: + except Exception: print(f"Parsing json_metadata for dashboard {dashboard.id} failed.") - raise e + raise session.commit() session.close() diff --git a/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py b/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py index ab60f9d00c1f4..5ea4188cd4598 100644 --- a/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py +++ b/superset/migrations/versions/2021-04-29_15-32_f1410ed7ec95_migrate_native_filters_to_new_schema.py @@ -111,9 +111,9 @@ def upgrade(): changed_filters += upgrades[0] changed_filter_sets += upgrades[1] dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True) - except Exception as e: + except Exception: print(f"Parsing json_metadata for dashboard {dashboard.id} failed.") - raise e + raise session.commit() session.close() @@ -151,9 +151,9 @@ def downgrade(): changed_filters += downgrades[0] changed_filter_sets += downgrades[1] dashboard.json_metadata = json.dumps(json_metadata, sort_keys=True) - except Exception as e: + except Exception: print(f"Parsing json_metadata for dashboard {dashboard.id} failed.") - raise e + raise session.commit() session.close() diff --git a/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py b/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py index f49a37a105159..186bdc043d103 100644 --- a/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py +++ b/superset/migrations/versions/2021-08-03_15-36_143b6f2815da_migrate_pivot_table_v2_heatmaps_to_new_.py @@ -92,9 +92,9 @@ def upgrade(): ] changed_slices += 1 slice.params = json.dumps(params, sort_keys=True) - except Exception as e: + except Exception: print(f"Parsing json_metadata for slice {slice.id} failed.") - raise e + raise session.commit() session.close() diff --git a/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py b/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py index f2e8407add4f2..8d676f625a292 100644 --- a/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py +++ b/superset/migrations/versions/2021-12-13_14-06_fe23025b9441_rename_big_viz_total_form_data_fields.py @@ -63,12 +63,12 @@ def upgrade(): if header_timestamp_format: params["time_format"] = header_timestamp_format slc.params = json.dumps(params, sort_keys=True) - except Exception as e: + except Exception: logger.exception( f"An error occurred: parsing params for slice {slc.id} failed." f"You need to fix it before upgrading your DB." ) - raise e + raise session.commit() session.close() @@ -89,12 +89,12 @@ def downgrade(): if force_timestamp_formatting: params["header_format_selector"] = force_timestamp_formatting slc.params = json.dumps(params, sort_keys=True) - except Exception as e: + except Exception: logger.exception( f"An error occurred: parsing params for slice {slc.id} failed. " "You need to fix it before downgrading your DB." ) - raise e + raise session.commit() session.close() diff --git a/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py b/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py index f9ee00b7052b5..3849403159d30 100644 --- a/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py +++ b/superset/migrations/versions/2021-12-17_16-56_31bb738bd1d2_move_pivot_table_v2_legacy_order_by_to_.py @@ -61,12 +61,12 @@ def upgrade(): if legacy_order_by: params["series_limit_metric"] = legacy_order_by slc.params = json.dumps(params, sort_keys=True) - except Exception as e: + except Exception: logger.exception( f"An error occurred: parsing params for slice {slc.id} failed." f"You need to fix it before upgrading your DB." ) - raise e + raise session.commit() session.close() @@ -84,12 +84,12 @@ def downgrade(): if series_limit_metric: params["legacy_order_by"] = series_limit_metric slc.params = json.dumps(params, sort_keys=True) - except Exception as e: + except Exception: logger.exception( f"An error occurred: parsing params for slice {slc.id} failed. " "You need to fix it before downgrading your DB." ) - raise e + raise session.commit() session.close() diff --git a/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py b/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py index 3eb7af7bb9c66..cd03ea7cc24ce 100644 --- a/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py +++ b/superset/migrations/versions/2023-05-11_12-41_4ea966691069_cross_filter_global_scoping.py @@ -90,9 +90,9 @@ def upgrade(): if needs_upgrade: dashboard.json_metadata = json.dumps(json_metadata) - except Exception as e: + except Exception: logger.exception("Failed to run up migration") - raise e + raise session.commit() session.close() @@ -127,9 +127,9 @@ def downgrade(): dashboard.json_metadata = json.dumps(json_metadata) - except Exception as e: + except Exception: logger.exception("Failed to run down migration") - raise e + raise session.commit() session.close() diff --git a/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py b/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py index c15379df6da6c..175822cc25e99 100644 --- a/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py +++ b/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py @@ -68,7 +68,7 @@ def upgrade(): session.commit() except SQLAlchemyError as ex: session.rollback() - raise Exception(f"An error occurred while upgrading permissions: {ex}") + raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex def downgrade(): diff --git a/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py b/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py index 3a846c42dc106..e5c0121a1d0b7 100644 --- a/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py +++ b/superset/migrations/versions/2024-04-08_15-43_5ad7321c2169_mig_new_csv_upload_perm.py @@ -68,7 +68,7 @@ def upgrade(): session.commit() except SQLAlchemyError as ex: session.rollback() - raise Exception(f"An error occurred while upgrading permissions: {ex}") + raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex def downgrade(): diff --git a/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py b/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py index 56e6373094b36..69465d1b8972b 100644 --- a/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py +++ b/superset/migrations/versions/2024-04-17_14-04_d60591c5515f_mig_new_excel_upload_perm.py @@ -70,7 +70,7 @@ def upgrade(): session.commit() except SQLAlchemyError as ex: session.rollback() - raise Exception(f"An error occurred while upgrading permissions: {ex}") + raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex def downgrade(): diff --git a/superset/migrations/versions/2024-04-26_12-36_4a33124c18ad_mig_new_columnar_upload_perm.py b/superset/migrations/versions/2024-04-26_12-36_4a33124c18ad_mig_new_columnar_upload_perm.py index 614bb1d886bb7..57c129db33f0f 100644 --- a/superset/migrations/versions/2024-04-26_12-36_4a33124c18ad_mig_new_columnar_upload_perm.py +++ b/superset/migrations/versions/2024-04-26_12-36_4a33124c18ad_mig_new_columnar_upload_perm.py @@ -71,7 +71,7 @@ def upgrade(): session.commit() except SQLAlchemyError as ex: session.rollback() - raise Exception(f"An error occurred while upgrading permissions: {ex}") + raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex def downgrade(): diff --git a/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py b/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py index dcdc3f0b44877..431d46799fb4a 100644 --- a/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py +++ b/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py @@ -111,7 +111,7 @@ def upgrade(): f"An error occurred: Upgrading params for slice {slc.id} failed." f"You need to fix it before upgrading your DB." ) - raise Exception(f"An error occurred while upgrading slice: {ex}") + raise Exception(f"An error occurred while upgrading slice: {ex}") from ex session.commit() session.close() @@ -206,7 +206,7 @@ def downgrade(): f"An error occurred: Downgrading params for slice {slc.id} failed." f"You need to fix it before downgrading your DB." ) - raise Exception(f"An error occurred while downgrading slice: {ex}") + raise Exception(f"An error occurred while downgrading slice: {ex}") from ex session.commit() session.close() diff --git a/superset/models/core.py b/superset/models/core.py index c8c875e4358f6..78bbf55cdf2b9 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -538,7 +538,7 @@ def _get_sqla_engine( # pylint: disable=too-many-locals try: return create_engine(sqlalchemy_url, **params) except Exception as ex: - raise self.db_engine_spec.get_dbapi_mapped_exception(ex) + raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex @contextmanager def get_raw_connection( @@ -570,7 +570,7 @@ def get_raw_connection( except Exception as ex: if self.is_oauth2_enabled() and self.db_engine_spec.needs_oauth2(ex): self.db_engine_spec.start_oauth2_dance(self) - raise ex + raise def get_default_catalog(self) -> str | None: """ @@ -769,7 +769,7 @@ def get_all_table_names_in_schema( ) } except Exception as ex: - raise self.db_engine_spec.get_dbapi_mapped_exception(ex) + raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex @cache_util.memoized_func( key="db:{self.id}:schema:{schema}:view_list", @@ -803,7 +803,7 @@ def get_all_view_names_in_schema( ) } except Exception as ex: - raise self.db_engine_spec.get_dbapi_mapped_exception(ex) + raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex @contextmanager def get_inspector( @@ -906,7 +906,7 @@ def get_encrypted_extra(self) -> dict[str, Any]: encrypted_extra = json.loads(self.encrypted_extra) except json.JSONDecodeError as ex: logger.error(ex, exc_info=True) - raise ex + raise return encrypted_extra # pylint: disable=invalid-name diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 48b95566af9bd..a044ff75e1370 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -310,7 +310,7 @@ def import_from_dict( try: obj_query = db.session.query(cls).filter(and_(*filters)) obj = obj_query.one_or_none() - except MultipleResultsFound as ex: + except MultipleResultsFound: logger.error( "Error importing %s \n %s \n %s", cls.__name__, @@ -318,7 +318,7 @@ def import_from_dict( yaml.safe_dump(dict_rep), exc_info=True, ) - raise ex + raise if not obj: is_new_obj = True diff --git a/superset/sql_lab.py b/superset/sql_lab.py index 0e77c5a902312..cb2cbe455cce1 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -312,9 +312,9 @@ def execute_sql_statement( # pylint: disable=too-many-statements level=ErrorLevel.ERROR, ) ) from ex - except OAuth2RedirectError as ex: + except OAuth2RedirectError: # user needs to authenticate with OAuth2 in order to run query - raise ex + raise except Exception as ex: # query is stopped in another thread/worker # stopping raises expected exceptions which we should skip diff --git a/superset/sql_validators/presto_db.py b/superset/sql_validators/presto_db.py index 06bee217cf22a..010272ea3eb73 100644 --- a/superset/sql_validators/presto_db.py +++ b/superset/sql_validators/presto_db.py @@ -138,7 +138,7 @@ def validate_statement( ) except Exception as ex: logger.exception("Unexpected error running validation query: %s", str(ex)) - raise ex + raise @classmethod def validate( diff --git a/superset/sqllab/sql_json_executer.py b/superset/sqllab/sql_json_executer.py index 246154f47b775..fde73aef0a86e 100644 --- a/superset/sqllab/sql_json_executer.py +++ b/superset/sqllab/sql_json_executer.py @@ -97,8 +97,8 @@ def execute( ) self._query_dao.update_saved_query_exec_info(query_id) execution_context.set_execution_result(data) - except SupersetTimeoutException as ex: - raise ex + except SupersetTimeoutException: + raise except Exception as ex: logger.exception("Query %i failed unexpectedly", query_id) raise SupersetGenericDBErrorException( diff --git a/superset/tasks/async_queries.py b/superset/tasks/async_queries.py index b804847cd84e3..5be7acc8ccafe 100644 --- a/superset/tasks/async_queries.py +++ b/superset/tasks/async_queries.py @@ -55,8 +55,9 @@ def _create_query_context_from_form(form_data: dict[str, Any]) -> QueryContext: return ChartDataQueryContextSchema().load(form_data) except KeyError as ex: raise ValidationError("Request is incorrect") from ex - except ValidationError as error: - raise error + except ValidationError: # pylint: disable=try-except-raise + # Make sure to bubble this up + raise def _load_user_from_job_metadata(job_metadata: dict[str, Any]) -> User: @@ -96,7 +97,7 @@ def load_chart_data_into_cache( ) except SoftTimeLimitExceeded as ex: logger.warning("A timeout occurred while loading chart data, error: %s", ex) - raise ex + raise except Exception as ex: # TODO: QueryContext should support SIP-40 style errors error = str(ex.message if hasattr(ex, "message") else ex) @@ -104,7 +105,7 @@ def load_chart_data_into_cache( async_query_manager.update_job( job_metadata, async_query_manager.STATUS_ERROR, errors=errors ) - raise ex + raise @celery_app.task(name="load_explore_json_into_cache", soft_time_limit=query_timeout) @@ -162,7 +163,7 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals logger.warning( "A timeout occurred while loading explore json, error: %s", ex ) - raise ex + raise except Exception as ex: if isinstance(ex, SupersetVizException): errors = ex.errors @@ -173,4 +174,4 @@ def load_explore_json_into_cache( # pylint: disable=too-many-locals async_query_manager.update_job( job_metadata, async_query_manager.STATUS_ERROR, errors=errors ) - raise ex + raise diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py index 70fb3058794c3..8e54541e90cde 100644 --- a/superset/utils/decorators.py +++ b/superset/utils/decorators.py @@ -57,7 +57,7 @@ def wrapped(*args: Any, **kwargs: Any) -> Any: current_app.config["STATS_LOGGER"].gauge( f"{metric_prefix_}.error", 1 ) - raise ex + raise return wrapped @@ -146,8 +146,9 @@ def stats_timing(stats_key: str, stats_logger: BaseStatsLogger) -> Iterator[floa start_ts = now_as_float() try: yield start_ts - except Exception as ex: - raise ex + except Exception: # pylint: disable=try-except-raise + # Make sure to bubble this up + raise finally: stats_logger.timing(stats_key, now_as_float() - start_ts) diff --git a/superset/utils/json.py b/superset/utils/json.py index 0d7e31b9cd8af..50a76d1a7cb50 100644 --- a/superset/utils/json.py +++ b/superset/utils/json.py @@ -122,11 +122,11 @@ def json_iso_dttm_ser(obj: Any, pessimistic: bool = False) -> Any: try: return base_json_conv(obj) - except TypeError as ex: + except TypeError: if pessimistic: logger.error("Failed to serialize %s", obj) return f"Unserializable [{type(obj)}]" - raise ex + raise def pessimistic_json_iso_dttm_ser(obj: Any) -> Any: @@ -249,4 +249,4 @@ def loads( ) except JSONDecodeError as ex: logger.error("JSON is not valid %s", str(ex), exc_info=True) - raise ex + raise diff --git a/superset/utils/webdriver.py b/superset/utils/webdriver.py index d79a9c7463d7f..3e4705da6ee7e 100644 --- a/superset/utils/webdriver.py +++ b/superset/utils/webdriver.py @@ -180,9 +180,9 @@ def get_screenshot( # pylint: disable=too-many-locals, too-many-statements ) element = page.locator(f".{element_name}") element.wait_for() - except PlaywrightTimeout as ex: + except PlaywrightTimeout: logger.exception("Timed out requesting url %s", url) - raise ex + raise try: # chart containers didn't render @@ -191,12 +191,12 @@ def get_screenshot( # pylint: disable=too-many-locals, too-many-statements slice_container_locator.first.wait_for() for slice_container_elem in slice_container_locator.all(): slice_container_elem.wait_for() - except PlaywrightTimeout as ex: + except PlaywrightTimeout: logger.exception( "Timed out waiting for chart containers to draw at url %s", url, ) - raise ex + raise try: # charts took too long to load logger.debug( @@ -204,11 +204,11 @@ def get_screenshot( # pylint: disable=too-many-locals, too-many-statements ) for loading_element in page.locator(".loading").all(): loading_element.wait_for(state="detached") - except PlaywrightTimeout as ex: + except PlaywrightTimeout: logger.exception( "Timed out waiting for charts to load at url %s", url ) - raise ex + raise selenium_animation_wait = current_app.config[ "SCREENSHOT_SELENIUM_ANIMATION_WAIT" @@ -366,9 +366,9 @@ def get_screenshot(self, url: str, element_name: str, user: User) -> bytes | Non element = WebDriverWait(driver, self._screenshot_locate_wait).until( EC.presence_of_element_located((By.CLASS_NAME, element_name)) ) - except TimeoutException as ex: + except TimeoutException: logger.exception("Selenium timed out requesting url %s", url) - raise ex + raise try: # chart containers didn't render @@ -378,12 +378,12 @@ def get_screenshot(self, url: str, element_name: str, user: User) -> bytes | Non (By.CLASS_NAME, "chart-container") ) ) - except TimeoutException as ex: + except TimeoutException: logger.exception( "Selenium timed out waiting for chart containers to draw at url %s", url, ) - raise ex + raise try: # charts took too long to load @@ -393,11 +393,11 @@ def get_screenshot(self, url: str, element_name: str, user: User) -> bytes | Non WebDriverWait(driver, self._screenshot_load_wait).until_not( EC.presence_of_all_elements_located((By.CLASS_NAME, "loading")) ) - except TimeoutException as ex: + except TimeoutException: logger.exception( "Selenium timed out waiting for charts to load at url %s", url ) - raise ex + raise selenium_animation_wait = current_app.config[ "SCREENSHOT_SELENIUM_ANIMATION_WAIT" diff --git a/superset/views/base_api.py b/superset/views/base_api.py index a62e963149392..9436ce6390ed7 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -124,7 +124,7 @@ def wraps(self: BaseSupersetApiMixin, *args: Any, **kwargs: Any) -> Response: self.incr_stats("warning", func_name) else: self.incr_stats("error", func_name) - raise ex + raise self.send_stats_metrics(response, func_name, duration) return response diff --git a/tests/integration_tests/dashboards/superset_factory_util.py b/tests/integration_tests/dashboards/superset_factory_util.py index aeae6171dfcde..c21b666eec679 100644 --- a/tests/integration_tests/dashboards/superset_factory_util.py +++ b/tests/integration_tests/dashboards/superset_factory_util.py @@ -198,15 +198,15 @@ def delete_all_inserted_dashboards(): for dashboard in dashboards_to_delete: try: delete_dashboard(dashboard, False) - except Exception as ex: + except Exception: logger.error(f"failed to delete {dashboard.id}", exc_info=True) - raise ex + raise if len(inserted_dashboards_ids) > 0: db.session.commit() inserted_dashboards_ids.clear() - except Exception as ex2: + except Exception: logger.error("delete_all_inserted_dashboards failed", exc_info=True) - raise ex2 + raise def delete_dashboard(dashboard: Dashboard, do_commit: bool = False) -> None: @@ -245,15 +245,15 @@ def delete_all_inserted_slices(): for slice in slices_to_delete: try: delete_slice(slice, False) - except Exception as ex: + except Exception: logger.error(f"failed to delete {slice.id}", exc_info=True) - raise ex + raise if len(inserted_slices_ids) > 0: db.session.commit() inserted_slices_ids.clear() - except Exception as ex2: + except Exception: logger.error("delete_all_inserted_slices failed", exc_info=True) - raise ex2 + raise def delete_slice(slice_: Slice, do_commit: bool = False) -> None: @@ -278,15 +278,15 @@ def delete_all_inserted_tables(): for table in tables_to_delete: try: delete_sqltable(table, False) - except Exception as ex: + except Exception: logger.error(f"failed to delete {table.id}", exc_info=True) - raise ex + raise if len(inserted_sqltables_ids) > 0: db.session.commit() inserted_sqltables_ids.clear() - except Exception as ex2: + except Exception: logger.error("delete_all_inserted_tables failed", exc_info=True) - raise ex2 + raise def delete_sqltable(table: SqlaTable, do_commit: bool = False) -> None: @@ -313,15 +313,15 @@ def delete_all_inserted_dbs(): for database in databases_to_delete: try: delete_database(database, False) - except Exception as ex: + except Exception: logger.error(f"failed to delete {database.id}", exc_info=True) - raise ex + raise if len(inserted_databases_ids) > 0: db.session.commit() inserted_databases_ids.clear() - except Exception as ex2: + except Exception: logger.error("delete_all_inserted_databases failed", exc_info=True) - raise ex2 + raise def delete_database(database: Database, do_commit: bool = False) -> None: