Skip to content

Commit

Permalink
Add review comments and questions and small changes
Browse files Browse the repository at this point in the history
  • Loading branch information
antonymilne committed Nov 25, 2024
1 parent a03aa45 commit 985ec4c
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 21 deletions.
3 changes: 3 additions & 0 deletions vizro-core/src/vizro/actions/_actions_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,12 +282,15 @@ def _get_modified_page_figures(
**_get_parametrized_config(ctds_parameter=ctds_parameter, target=target, data_frame=False),
)

# AM comment: please check this comment I added!
for target in control_targets:
ctd_filter = [item for item in ctds_filter if item["id"] == model_manager[target].selector.id]

# This only covers the case of cross-page actions when Filter in an output, but is not an input of the action.
current_value = ctd_filter[0]["value"] if ctd_filter else None

# target_to_data_frame contains all targets, including some which might not be relevant for the filter in
# question. We filter to use just the relevant targets in Filter.__call__.
outputs[target] = model_manager[target](target_to_data_frame=target_to_data_frame, current_value=current_value)

return outputs
16 changes: 11 additions & 5 deletions vizro-core/src/vizro/models/_components/form/dropdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,12 @@ def validate_multi(cls, multi, values):
raise ValueError("Please set multi=True if providing a list of default values.")
return multi

# AM comment: please remove build_static and change into __call__ in all places unless you think there's
# a good reason not to do so.
def __call__(self, options):
return self._build_static(options)

def _build_static(self, options):
# AM comment: this is the main confusing thing about the current approach I think: every time we run
# __call__ we override the value set below, which sounds wrong because we say that we never change the value.
# From what I remember this is a workaround for the Dash bug? Maybe add a comment explaining this.
full_options, default_value = get_options_and_default(options=options, multi=self.multi)
option_height = _calculate_option_height(full_options)

Expand All @@ -114,13 +116,17 @@ def _build_dynamic_placeholder(self):
# Setting self.value is kind of Dropdown pre_build method. It sets self.value only the first time if it's None.
# We cannot create pre_build for the Dropdown because it has to be called after vm.Filter.pre_build, but nothing
# guarantees that. We can call Filter.selector.pre_build() from the Filter.pre_build() method if we decide that.
# TODO: move this to pre_build once we have better control of the ordering.
if self.value is None:
self.value = get_options_and_default(self.options, self.multi)[1]
_, default_value = get_options_and_default(self.options, self.multi)
self.value = default_value

# TODO-NEXT: Replace this with the "universal Vizro placeholder" component.
return html.Div(
children=[
dbc.Label(self.title, html_for=self.id) if self.title else None,
# AM question: why do we want opacity: 0? If we don't want it to appear on screen then normally we do
# this with visibility or display.
dmc.DateRangePicker(
id=self.id, value=self.value, persistence=True, persistence_type="session", style={"opacity": 0}
),
Expand All @@ -129,4 +135,4 @@ def _build_dynamic_placeholder(self):

@_log_call
def build(self):
return self._build_dynamic_placeholder() if self._dynamic else self._build_static(self.options)
return self._build_dynamic_placeholder() if self._dynamic else self.__call__(self.options)
36 changes: 20 additions & 16 deletions vizro-core/src/vizro/models/_controls/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ def pre_build(self):
# 2. Propagate values from the model_manager and relax the limitation of requiring argument default values.
# 3. Skip the pre-build and do everything in the build method (if possible).
# Find more about the mentioned limitation at: https://github.com/mckinsey/vizro/pull/879/files#r1846609956
# Even if the solution changes for dynamic data, static data should still use {} as the arguments here.
multi_data_source_name_load_kwargs: list[tuple[DataSourceName, dict[str, Any]]] = [
(model_manager[target]["data_frame"], {}) for target in proposed_targets
]
Expand All @@ -170,11 +171,12 @@ def pre_build(self):
)

# Check if the filter is dynamic. Dynamic filter means that the filter is updated when the page is refreshed
# which causes that "options" for categorical or "min" and "max" for numerical/temporal selectors are updated.
# The filter is dynamic if mentioned attributes ("options"/"min"/"max") are not explicitly provided and if the
# filter targets at least one figure that uses the dynamic data source.
# which causes "options" for categorical or "min" and "max" for numerical/temporal selectors to be updated.
# The filter is dynamic iff mentioned attributes ("options"/"min"/"max") are not explicitly provided and
# filter targets at least one figure that uses dynamic data source. Note that min or max = 0 are Falsey values
# but should still count as manually set.
if isinstance(self.selector, DYNAMIC_SELECTORS) and (
not getattr(self.selector, "options", None)
not getattr(self.selector, "options", [])
and getattr(self.selector, "min", None) is None
and getattr(self.selector, "max", None) is None
):
Expand Down Expand Up @@ -269,6 +271,12 @@ def _get_min_max(targeted_data: pd.DataFrame, current_value=None) -> tuple[float
_max = targeted_data.max(axis=None)

# Convert to datetime if the column is datetime64
# AM question: I think this could be simplified:
# * column type is already stored in _column_type and validated in __call__ with validate_column_type. Hence
# we can just do if instance(self.selector, SELECTORS["temporal"]
# * why do we need to do this type conversion at all? When we set min/max for datepicker in pre_build we don't
# do any type
# conversions.
if targeted_data.apply(is_datetime64_any_dtype).all():
_min = pd.to_datetime(_min)
_max = pd.to_datetime(_max)
Expand All @@ -280,6 +288,7 @@ def _get_min_max(targeted_data: pd.DataFrame, current_value=None) -> tuple[float
# Use item() to convert to convert scalar from numpy to Python type. This isn't needed during pre_build because
# pydantic will coerce the type, but it is necessary in __call__ where we don't update model field values
# and instead just pass straight to the Dash component.
# AM QUESTION: why could AttributeError be raised here?
with suppress(AttributeError):
_min = _min.item()
with suppress(AttributeError):
Expand All @@ -294,20 +303,15 @@ def _get_min_max(targeted_data: pd.DataFrame, current_value=None) -> tuple[float

@staticmethod
def _get_options(targeted_data: pd.DataFrame, current_value=None) -> list[Any]:
# Use tolist() to convert to convert scalar from numpy to Python type. This isn't needed during pre_build
# because pydantic will coerce the type, but it is necessary in __call__ where we don't update model field
# values and instead just pass straight to the Dash component.
# The dropna() isn't strictly required here but will be in future pandas versions when the behavior of stack
# changes. See https://pandas.pydata.org/docs/whatsnew/v2.1.0.html#whatsnew-210-enhancements-new-stack.
# Also setting the dtype for the current_value_series to the dtype of the targeted_data_series to ensure it
# works when it's empty. See: https://pandas.pydata.org/docs/whatsnew/v2.1.0.html#other-deprecations
options = set(targeted_data.stack().dropna()) # noqa: PD013

# Remove ALL_OPTION from the string or list of currently selected value for the categorical filters.
current_value = [] if current_value in (None, ALL_OPTION) else current_value
if isinstance(current_value, list) and ALL_OPTION in current_value:
current_value.remove(ALL_OPTION)
# AM comment: I refactored this function to work analogously to _get_min_max.
# Completely untested though so please do check!!
if current_value is not None:
current_value = set(current_value) if isinstance(current_value, list) else {current_value}
options = options | current_value - {ALL_OPTION}

targeted_data_series = targeted_data.stack().dropna() # noqa: PD013
current_value_series = pd.Series(current_value).astype(targeted_data_series.dtypes)
return sorted(options)

return sorted(pd.concat([targeted_data_series, current_value_series]).unique())

0 comments on commit 985ec4c

Please sign in to comment.