Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(imports): import query_context for imports with charts #30887

Merged
merged 2 commits into from
Nov 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 7 additions & 12 deletions superset/commands/chart/importers/v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from superset.commands.database.importers.v1.utils import import_database
from superset.commands.dataset.importers.v1.utils import import_dataset
from superset.commands.importers.v1 import ImportModelsCommand
from superset.commands.utils import update_chart_config_dataset
from superset.connectors.sqla.models import SqlaTable
from superset.daos.chart import ChartDAO
from superset.databases.schemas import ImportV1DatabaseSchema
Expand Down Expand Up @@ -86,16 +87,10 @@ def _import(configs: dict[str, Any], overwrite: bool = False) -> None:

# update datasource id, type, and name
dataset = datasets[config["dataset_uuid"]]
config.update(
{
"datasource_id": dataset.id,
"datasource_type": "table",
"datasource_name": dataset.table_name,
}
)
config["params"].update({"datasource": dataset.uid})

if "query_context" in config:
config["query_context"] = None

dataset_dict = {
"datasource_id": dataset.id,
"datasource_type": "table",
"datasource_name": dataset.table_name,
}
config = update_chart_config_dataset(config, dataset_dict)
import_chart(config, overwrite=overwrite)
7 changes: 2 additions & 5 deletions superset/commands/dashboard/importers/v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from superset.commands.database.importers.v1.utils import import_database
from superset.commands.dataset.importers.v1.utils import import_dataset
from superset.commands.importers.v1 import ImportModelsCommand
from superset.commands.utils import update_chart_config_dataset
from superset.daos.dashboard import DashboardDAO
from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.schemas import ImportV1DatabaseSchema
Expand Down Expand Up @@ -113,11 +114,7 @@ def _import(configs: dict[str, Any], overwrite: bool = False) -> None:
):
# update datasource id, type, and name
dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict)
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:
config["query_context"] = None
config = update_chart_config_dataset(config, dataset_dict)

chart = import_chart(config, overwrite=False)
charts.append(chart)
Expand Down
7 changes: 2 additions & 5 deletions superset/commands/importers/v1/assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
validate_metadata_type,
)
from superset.commands.query.importers.v1.utils import import_saved_query
from superset.commands.utils import update_chart_config_dataset
from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.schemas import ImportV1DatasetSchema
Expand Down Expand Up @@ -113,11 +114,7 @@ def _import(configs: dict[str, Any]) -> None:
for file_name, config in configs.items():
if file_name.startswith("charts/"):
dataset_dict = dataset_info[config["dataset_uuid"]]
config.update(dataset_dict)
dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}"
config["params"].update({"datasource": dataset_uid})
if "query_context" in config:
config["query_context"] = None
config = update_chart_config_dataset(config, dataset_dict)
chart = import_chart(config, overwrite=True)
charts.append(chart)
chart_ids[str(chart.uuid)] = chart.id
Expand Down
43 changes: 42 additions & 1 deletion superset/commands/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from __future__ import annotations

from collections import Counter
from typing import Optional, TYPE_CHECKING
from typing import Any, Optional, TYPE_CHECKING

from flask import g
from flask_appbuilder.security.sqla.models import Role, User
Expand All @@ -34,6 +34,7 @@
from superset.daos.exceptions import DatasourceNotFound
from superset.daos.tag import TagDAO
from superset.tags.models import ObjectType, Tag, TagType
from superset.utils import json
from superset.utils.core import DatasourceType, get_user_id

if TYPE_CHECKING:
Expand Down Expand Up @@ -185,3 +186,43 @@
TagDAO.create_custom_tagged_objects(
object_type, object_id, [tag.name for tag in tags_to_add]
)


def update_chart_config_dataset(
config: dict[str, Any], dataset_info: dict[str, Any]
) -> dict[str, Any]:
"""
Update the chart configuration and query_context with new dataset information

:param config: The original chart configuration
:param dataset_info: Dict with datasource_id, datasource_type, and datasource_name
:return: The updated chart configuration
"""
# Update datasource id, type, and name
config.update(dataset_info)

dataset_uid = f"{dataset_info['datasource_id']}__{dataset_info['datasource_type']}"
config["params"].update({"datasource": dataset_uid})

if "query_context" in config and config["query_context"] is not None:
try:
query_context = json.loads(config["query_context"])

query_context["datasource"] = {
"id": dataset_info["datasource_id"],
"type": dataset_info["datasource_type"],
}

if "form_data" in query_context:
query_context["form_data"]["datasource"] = dataset_uid

Check warning on line 217 in superset/commands/utils.py

View check run for this annotation

Codecov / codecov/patch

superset/commands/utils.py#L217

Added line #L217 was not covered by tests

if "queries" in query_context:
for query in query_context["queries"]:
if "datasource" in query:
query["datasource"] = query_context["datasource"]

Check warning on line 222 in superset/commands/utils.py

View check run for this annotation

Codecov / codecov/patch

superset/commands/utils.py#L222

Added line #L222 was not covered by tests

config["query_context"] = json.dumps(query_context)
except json.JSONDecodeError:
config["query_context"] = None

Check warning on line 226 in superset/commands/utils.py

View check run for this annotation

Codecov / codecov/patch

superset/commands/utils.py#L225-L226

Added lines #L225 - L226 were not covered by tests

return config
24 changes: 23 additions & 1 deletion tests/integration_tests/commands_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,29 @@ def test_import_assets(self, mock_add_permissions):
dataset = chart.table
assert str(dataset.uuid) == dataset_config["uuid"]

assert chart.query_context is None
assert json.loads(chart.query_context) == {
"datasource": {"id": dataset.id, "type": "table"},
"force": False,
"queries": [
{
"annotation_layers": [],
"applied_time_extras": {},
"columns": [],
"custom_form_data": {},
"custom_params": {},
"extras": {"having": "", "time_grain_sqla": None, "where": ""},
"filters": [],
"metrics": [],
"order_desc": True,
"row_limit": 5000,
"time_range": " : ",
"timeseries_limit": 0,
"url_params": {},
}
],
"result_format": "json",
"result_type": "full",
}
assert json.loads(chart.params)["datasource"] == dataset.uid

database = dataset.database
Expand Down
24 changes: 23 additions & 1 deletion tests/integration_tests/dashboards/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -620,7 +620,29 @@ def test_import_v1_dashboard(self, mock_add_permissions, sm_g, utils_g):
dataset = chart.table
assert str(dataset.uuid) == dataset_config["uuid"]

assert chart.query_context is None
assert json.loads(chart.query_context) == {
"datasource": {"id": dataset.id, "type": "table"},
"force": False,
"queries": [
{
"annotation_layers": [],
"applied_time_extras": {},
"columns": [],
"custom_form_data": {},
"custom_params": {},
"extras": {"having": "", "time_grain_sqla": None, "where": ""},
"filters": [],
"metrics": [],
"order_desc": True,
"row_limit": 5000,
"time_range": " : ",
"timeseries_limit": 0,
"url_params": {},
}
],
"result_format": "json",
"result_type": "full",
}
assert json.loads(chart.params)["datasource"] == dataset.uid

database = dataset.database
Expand Down
Loading