Skip to content

Commit

Permalink
Change filter spec to use YML instead of JSON (#290)
Browse files Browse the repository at this point in the history
Changed filter spec to use YML instead of JSON.
Changed relevant parts of the documentation.
  • Loading branch information
bishwajit-db authored Sep 18, 2024
1 parent 621647f commit 95a6563
Show file tree
Hide file tree
Showing 7 changed files with 104 additions and 79 deletions.
35 changes: 18 additions & 17 deletions docs/dashboards.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* [Overrides](#overrides)
* [`.md` files](#md-files)
* [Markdown header arguments](#markdown-header-arguments)
* [`.filter.yml` file(s)](#filteryml-files)
* [`dashboard.yml` file](#dashboardyml-file)
* [Using as library](#using-as-library)
* [Configuration precedence](#configuration-precedence)
Expand Down Expand Up @@ -247,30 +248,30 @@ The following text tile arguments are supported:

[[back to top](#dashboards-as-code)]

## `.filter.json` files
## `.filter.yml` file(s)

The filter files contain filter definition for filters linked to multiple widgets. The filter is applied to all widget that have the given column. The schema is defined as follows:

| Flag | Description | Type | Optional |
|---------------|----------------------------------------------------|------------|----------|
| column | The columns the filter is applied. | str. | No* |
| columns | The columns the filter is applied. | list[str] | No* |
| type | The filter type, by default multi-select drop down | str | Yes |
| title | The filter title | str | Yes |
| description | The filter description | str | Yes |
| order | The widget order | str | Yes |
| id | The widget id | str | Yes |
| Flag | Description | Type | Optional |
|-------------|--------------------------------------|-----------|----------|
| column | The columns the filter is applied. | str. | No* |
| columns | The columns the filter is applied. | list[str] | No* |
| type | The filter type, by default DROPDOWN | str | Yes |
| title | The filter title | str | Yes |
| description | The filter description | str | Yes |
| order | The widget order | str | Yes |
| id | The widget id | str | Yes |

> *column and columns are exclusive, one is required.
> ***column** and **columns** are exclusive, only one is required.
> **type** can be one of DROPDOWN, MULTI_SELECT and DATE_RANGE_PICKER.
An example filter would be:

```json
{
"column": "Country",
"title": "Countries",
"description": "Filter which countries are vizualized"
}
```yml
column: Country
title: Countries
description: Filter which countries are visualized
```
## `dashboard.yml` file
Expand Down
8 changes: 4 additions & 4 deletions src/databricks/labs/lsql/dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ class FilterHandler(BaseHandler):
def _parse_header(self, header: str) -> dict:
if not header:
return {}
metadata = json.loads(header)
metadata = yaml.safe_load(header) or {}
# The user can either provide a single filter column as a string or a list of filter columns
# Only one of column or columns should be set
filter_col = metadata.pop("column", None)
Expand All @@ -211,7 +211,7 @@ def _parse_header(self, header: str) -> dict:
if filter_col and filter_cols:
raise ValueError(f"Both column and columns set in {self._path}")
# If a single column is provided, convert it to a list of one column
# Please note that column/columns key in .filter.json files are mapped to the filters key in the TileMetadata
# Please note that column/columns key in .filter.yml files are mapped to the filters key in the TileMetadata
metadata["filters"] = [filter_col] if filter_col else filter_cols
metadata["widget_type"] = WidgetType(metadata.pop("type", "DROPDOWN").upper())
return metadata
Expand Down Expand Up @@ -301,7 +301,7 @@ def is_query(self) -> bool:
return self.path is not None and self.path.suffix == ".sql"

def is_filter(self) -> bool:
return self.path is not None and self.path.name.endswith(".filter.json")
return self.path is not None and self.path.name.endswith(".filter.yml")

@property
def handler(self) -> BaseHandler:
Expand Down Expand Up @@ -1041,7 +1041,7 @@ def _from_dashboard_folder(cls, folder: Path) -> "DashboardMetadata":
"""Read the dashboard metadata from the tile files."""
tiles = []
for path in folder.iterdir():
if not path.name.endswith((".sql", ".md", ".filter.json")):
if not path.name.endswith((".sql", ".md", ".filter.yml")):
continue
tile_metadata = TileMetadata.from_path(path)
tile = Tile.from_tile_metadata(tile_metadata)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
columns:
- sales_date
- inventory_date
title: Date
description: Filter by date
type: DATE_RANGE_PICKER
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
columns:
- product_category
- category
title: Category
description: Filter by category
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
columns:
- sales_date
- inventory_date
title: Date
description: Filter by date
type: DATE_RANGE_PICKER
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
columns:
- product_category
- category
title: Category
description: Filter by category
118 changes: 60 additions & 58 deletions tests/unit/test_dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -1009,11 +1009,11 @@ def test_query_tile_creates_database_with_database_and_catalog_overwrite(
def test_query_tile_fills_up_size(tmp_path, width, height, filters, axes):
query_path = tmp_path / "counter.sql"
query_path.write_text("SELECT 1")

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
widget_metadata = TileMetadata(query_path, width=width, height=height, filters=list(filters))
query_tile = QueryTile(widget_metadata)

positions = [layout.position for layout in query_tile.get_layouts([])]
positions = [layout.position for layout in query_tile.get_layouts(dashboard_metadata)]

assert sum(p.width * p.height for p in positions) == width * height

Expand Down Expand Up @@ -1594,31 +1594,43 @@ def test_dashboards_save_to_folder_replaces_counter_names(ugly_dashboard, tmp_pa
def test_filter_spec_validate_absent_column(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1, from test.test_metrics")
filter_spec = """
{
"title": "Date Filter",
"description": "Filter by date",
"type": "DATE_RANGE_PICKER"
}
title: Date Filter
description: Filter by date
type: DATE_RANGE_PICKER
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

with pytest.raises(ValueError) as e:
DashboardMetadata.from_path(tmp_path)
assert "Neither column nor columns set" in str(e.value)


def test_filter_spec_validate_invalid_type(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1, from test.test_metrics")
filter_spec = """
title: Date Filter
column: date
description: Filter by date
type: TABLE
""".lstrip()
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)
dashboard_metadata = DashboardMetadata.from_path(tmp_path)
with pytest.raises(ValueError) as e:
dashboard_metadata.validate()
assert "Filter tile has an invalid widget type" in str(e.value)


def test_filter_spec_validate_both_column_keys_present(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1, from test.test_metrics")
filter_spec = """
{
"column": "date",
"columns": ["date"],
"title": "Date Filter",
"description": "Filter by date",
"type": "DATE_RANGE_PICKER"
}
column: date
columns:
- date
title: Date Filter
description: Filter by date
type: DATE_RANGE_PICKER
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

with pytest.raises(ValueError) as e:
DashboardMetadata.from_path(tmp_path)
Expand All @@ -1628,14 +1640,12 @@ def test_filter_spec_validate_both_column_keys_present(tmp_path):
def test_filter_load_filter_tile(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1, from test.test_metrics")
filter_spec = """
{
"column": "date",
"title": "Date Filter",
"description": "Filter by date",
"type": "DATE_RANGE_PICKER"
}
column: date
title: Date Filter
description: Filter by date
type: DATE_RANGE_PICKER
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
assert len(dashboard_metadata.tiles) == 2
Expand All @@ -1644,29 +1654,27 @@ def test_filter_load_filter_tile(tmp_path):
def test_filter_load_filter_tile_no_applicable_column(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1, from test.test_metrics")
filter_spec = """
{
"column": "timestamp",
"title": "Date Filter",
"description": "Filter by date",
"type": "DATE_RANGE_PICKER"
}
column: timestamp
title: Date Filter
description: Filter by date
type: DATE_RANGE_PICKER
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
assert len(dashboard_metadata.tiles) == 2
with pytest.raises(ValueError) as e:
dashboard_metadata.as_lakeview()
assert "Filter tile has no matching dataset columns" in str(e.value)


def test_filter_widget_spec_defaults_to_dropdown(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1 from test.test_metrics")
filter_spec = """
{
"column": "dimension_1",
"title": "Dimension Filter",
"description": "Filter by dimension"
}
column: dimension_1
title: Dimension Filter
description: Filter by dimension
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
dashboard = dashboard_metadata.as_lakeview()
Expand All @@ -1678,14 +1686,12 @@ def test_filter_widget_spec_defaults_to_dropdown(tmp_path):
def test_filter_widget_spec_multi_select(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1 from test.test_metrics")
filter_spec = """
{
"column": "dimension_1",
"title": "Dimension Filter",
"description": "Filter by dimension",
"type": "MULTI_SELECT"
}
column: dimension_1
title: Dimension Filter
description: Filter by dimension
type: MULTI_SELECT
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
dashboard = dashboard_metadata.as_lakeview()
Expand All @@ -1697,14 +1703,12 @@ def test_filter_widget_spec_multi_select(tmp_path):
def test_filter_widget_spec_date_range(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1 from test.test_metrics")
filter_spec = """
{
"column": "date",
"title": "Date Filter",
"description": "Filter by date",
"type": "DATE_RANGE_PICKER"
}
column: date
title: Date Filter
description: Filter by date
type: DATE_RANGE_PICKER
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
dashboard = dashboard_metadata.as_lakeview()
Expand All @@ -1716,14 +1720,12 @@ def test_filter_widget_spec_date_range(tmp_path):
def test_filter_widget_with_title_and_description(tmp_path):
(tmp_path / "query.sql").write_text("select id, date, dimension_1, metric_1 from test.test_metrics")
filter_spec = """
{
"column": "date",
"title": "Date Filter",
"description": "Filter by date",
"type": "DATE_RANGE_PICKER"
}
column: date
title: Date Filter
description: Filter by date
type: DATE_RANGE_PICKER
""".lstrip()
(tmp_path / "filter_spec.filter.json").write_text(filter_spec)
(tmp_path / "filter_spec.filter.yml").write_text(filter_spec)

dashboard_metadata = DashboardMetadata.from_path(tmp_path)
dashboard = dashboard_metadata.as_lakeview()
Expand Down

0 comments on commit 95a6563

Please sign in to comment.