diff --git a/frontend/taipy-gui/packaging/taipy-gui.d.ts b/frontend/taipy-gui/packaging/taipy-gui.d.ts index dbd409324c..bf07e4786b 100644 --- a/frontend/taipy-gui/packaging/taipy-gui.d.ts +++ b/frontend/taipy-gui/packaging/taipy-gui.d.ts @@ -126,16 +126,20 @@ export interface TableProps extends TaipyPaginatedTableProps { } export declare const Table: (props: TableProps) => JSX.Element; +export interface FilterColumnDesc extends ColumnDesc { + params?: number[]; +} export interface FilterDesc { col: string; action: string; value: string | number | boolean | Date; type: string; + params?: number[]; } export interface TableFilterProps { fieldHeader?: string; fieldHeaderTooltip?: string; - columns: Record; + columns: Record; colsOrder?: Array; onValidate: (data: Array) => void; appliedFilters?: Array; @@ -144,15 +148,19 @@ export interface TableFilterProps { } export declare const TableFilter: (props: TableFilterProps) => JSX.Element; +export interface SortColumnDesc extends ColumnDesc { + params?: number[]; +} export interface SortDesc { col: string; order: boolean; + params?: number[]; } export interface TableSortProps { fieldHeader?: string; fieldHeaderTooltip?: string; - columns: Record; + columns: Record; colsOrder?: Array; onValidate: (data: Array) => void; appliedSorts?: Array; diff --git a/frontend/taipy-gui/src/components/Taipy/TableFilter.tsx b/frontend/taipy-gui/src/components/Taipy/TableFilter.tsx index 9be19f0289..b9ecaeabff 100644 --- a/frontend/taipy-gui/src/components/Taipy/TableFilter.tsx +++ b/frontend/taipy-gui/src/components/Taipy/TableFilter.tsx @@ -35,10 +35,14 @@ import { getDateTime, getTypeFromDf } from "../../utils"; import { getSuffixedClassNames } from "./utils"; import { MatchCase } from "../icons/MatchCase"; +export interface FilterColumnDesc extends ColumnDesc { + params?: number[]; +} + interface TableFilterProps { fieldHeader?: string; fieldHeaderTooltip?: string; - columns: Record; + columns: Record; colsOrder?: Array; onValidate: (data: Array) => void; appliedFilters?: Array; @@ -51,7 +55,7 @@ interface FilterRowProps { fieldHeader?: string; fieldHeaderTooltip?: string; filter?: FilterDesc; - columns: Record; + columns: Record; colsOrder: Array; setFilter: (idx: number, fd: FilterDesc, remove?: boolean) => void; } @@ -98,7 +102,7 @@ const getActionsByType = (colType?: string) => (colType === "any" ? { ...actionsByType.string, ...actionsByType.number } : actionsByType.string); const getFilterDesc = ( - columns: Record, + columns: Record, colId?: string, act?: string, val?: string, @@ -118,13 +122,14 @@ const getFilterDesc = ( ? colType === "number" ? parseFloat(val) : colType === "boolean" - ? val === "1" - : colType === "date" - ? getDateTime(val) - : val + ? val === "1" + : colType === "date" + ? getDateTime(val) + : val : val, type: colType, matchCase: !!matchCase, + params: columns[colId].params, } as FilterDesc; } catch (e) { console.info("Could not parse value ", val, e); @@ -231,7 +236,11 @@ const FilterRow = (props: FilterRowProps) => { {fieldHeader} - } + > {colsOrder.map((col) => columns[col].filter ? ( @@ -352,7 +361,7 @@ const TableFilter = (props: TableFilterProps) => { onValidate, appliedFilters, className = "", - filteredCount + filteredCount, } = props; const [showFilter, setShowFilter] = useState(false); diff --git a/frontend/taipy-gui/src/components/Taipy/TableSort.tsx b/frontend/taipy-gui/src/components/Taipy/TableSort.tsx index 3de0432c18..98ee71c61b 100644 --- a/frontend/taipy-gui/src/components/Taipy/TableSort.tsx +++ b/frontend/taipy-gui/src/components/Taipy/TableSort.tsx @@ -34,12 +34,17 @@ import { getSuffixedClassNames } from "./utils"; export interface SortDesc { col: string; order: boolean; + params?: number[]; +} + +export interface SortColumnDesc extends ColumnDesc { + params?: number[]; } interface TableSortProps { fieldHeader?: string; fieldHeaderTooltip?: string; - columns: Record; + columns: Record; colsOrder?: Array; onValidate: (data: Array) => void; appliedSorts?: Array; @@ -73,12 +78,13 @@ const badgeSx = { }; const orderCaptionSx = { ml: 1 }; -const getSortDesc = (columns: Record, colId?: string, asc?: boolean) => +const getSortDesc = (columns: Record, colId?: string, asc?: boolean) => colId && asc !== undefined ? ({ - col: columns[colId].dfid, - order: !!asc, - } as SortDesc) + col: columns[colId].dfid, + order: !!asc, + params: columns[colId].params, + } as SortDesc) : undefined; const SortRow = (props: SortRowProps) => { @@ -138,7 +144,11 @@ const SortRow = (props: SortRowProps) => { Column - } + > {cols.map((col) => ( {columns[col].title || columns[col].dfid} @@ -183,7 +193,7 @@ const TableSort = (props: TableSortProps) => { columns, onValidate, appliedSorts, - className = "" + className = "", } = props; const [showSort, setShowSort] = useState(false); diff --git a/frontend/taipy-gui/src/components/Taipy/tableUtils.tsx b/frontend/taipy-gui/src/components/Taipy/tableUtils.tsx index 13769e8928..3ef9c88453 100644 --- a/frontend/taipy-gui/src/components/Taipy/tableUtils.tsx +++ b/frontend/taipy-gui/src/components/Taipy/tableUtils.tsx @@ -223,6 +223,7 @@ export interface FilterDesc { value: string | number | boolean | Date; type: string; matchcase?: boolean; + params?: number[]; } export const defaultColumns = {} as Record; diff --git a/frontend/taipy-gui/src/extensions/exports.ts b/frontend/taipy-gui/src/extensions/exports.ts index c42d6697cc..ac0f32a7b4 100644 --- a/frontend/taipy-gui/src/extensions/exports.ts +++ b/frontend/taipy-gui/src/extensions/exports.ts @@ -17,10 +17,10 @@ import FileSelector from "../components/Taipy/FileSelector"; import Login from "../components/Taipy/Login"; import Router from "../components/Router"; import Table from "../components/Taipy/Table"; -import TableFilter from "../components/Taipy/TableFilter"; +import TableFilter, { FilterColumnDesc } from "../components/Taipy/TableFilter"; import { FilterDesc } from "../components/Taipy/tableUtils"; -import TableSort, { SortDesc } from "../components/Taipy/TableSort"; -import {getComponentClassName} from "../components/Taipy/TaipyStyle"; +import TableSort, { SortColumnDesc, SortDesc } from "../components/Taipy/TableSort"; +import { getComponentClassName } from "../components/Taipy/TaipyStyle"; import Metric from "../components/Taipy/Metric"; import { useLovListMemo, LoV, LoVElt } from "../components/Taipy/lovUtils"; import { LovItem } from "../utils/lov"; @@ -72,12 +72,14 @@ export { export type { ColumnDesc, + FilterColumnDesc, FilterDesc, LoV, LoVElt, LovItem, RowType, RowValue, + SortColumnDesc, SortDesc, TaipyStore as Store, TaipyState as State, diff --git a/frontend/taipy/src/CoreSelector.tsx b/frontend/taipy/src/CoreSelector.tsx index 59418caf9c..3f4c6c80b6 100644 --- a/frontend/taipy/src/CoreSelector.tsx +++ b/frontend/taipy/src/CoreSelector.tsx @@ -37,40 +37,41 @@ import { SimpleTreeView } from "@mui/x-tree-view/SimpleTreeView"; import { TreeItem } from "@mui/x-tree-view/TreeItem"; import { - useDispatch, - useModule, - getUpdateVar, + createRequestUpdateAction, createSendUpdateAction, + getSuffixedClassNames, + getUpdateVar, + useClassNames, + useDispatch, useDispatchRequestUpdateOnFirstRender, - createRequestUpdateAction, + useModule, useDynamicProperty, - ColumnDesc, + FilterColumnDesc, FilterDesc, - TableFilter, + SortColumnDesc, SortDesc, + TableFilter, TableSort, - useClassNames, - getSuffixedClassNames, } from "taipy-gui"; -import { Cycles, Cycle, DataNodes, NodeType, Scenarios, Scenario, DataNode, Sequence, Sequences } from "./utils/types"; +import { Cycle, Cycles, DataNode, DataNodes, NodeType, Scenario, Scenarios, Sequence, Sequences } from "./utils/types"; import { Cycle as CycleIcon, Datanode as DatanodeIcon, - Sequence as SequenceIcon, Scenario as ScenarioIcon, + Sequence as SequenceIcon, } from "./icons"; import { + getUpdateVarNames, + iconLabelSx, + tinyIconButtonSx, + tinySelPinIconButtonSx, BadgePos, BadgeSx, BaseTreeViewSx, CoreProps, FlagSx, ParentItemSx, - getUpdateVarNames, - iconLabelSx, - tinyIconButtonSx, - tinySelPinIconButtonSx, } from "./utils"; export interface EditProps { @@ -282,7 +283,7 @@ const filterTree = (entities: Entities, search: string, leafType: NodeType, coun count.nb++; return emptyEntity; }) - .filter((i) => (i as unknown[]).length !== 0); + .filter((item) => (item as unknown[]).length > 3 && (item[3] == leafType || !item[2] || item[2].length > 0)); if (top && count.nb == 0) { return entities; } @@ -502,21 +503,22 @@ const CoreSelector = (props: CoreSelectorProps) => { const colFilters = useMemo(() => { try { const res = props.filter - ? (JSON.parse(props.filter) as Array<[string, string, string, string[]]>) + ? (JSON.parse(props.filter) as Array<[string, string, string, string[], number[]]>) : undefined; return Array.isArray(res) - ? res.reduce((pv, [name, id, coltype, lov], idx) => { + ? res.reduce((pv, [name, id, colType, lov, params], idx) => { pv[name] = { dfid: id, title: name, - type: coltype, + type: colType, index: idx, filter: true, - lov: lov, + lov, freeLov: !!lov, + params }; return pv; - }, {} as Record) + }, {} as Record) : undefined; } catch { return undefined; @@ -531,18 +533,20 @@ const CoreSelector = (props: CoreSelectorProps) => { if (old.length != filters.length || JSON.stringify(old) != jsonFilters) { localStoreSet(jsonFilters, id, lovPropertyName, "filter"); const filterVar = getUpdateVar(updateCoreVars, "filter"); - const lovVar = getUpdateVarNames(updateVars, lovPropertyName); - Promise.resolve().then(() => - dispatch( - createRequestUpdateAction( - id, - module, - lovVar, - true, - filterVar ? { [filterVar]: filters } : undefined + if (filterVar) { + const lovVar = getUpdateVarNames(updateVars, lovPropertyName); + Promise.resolve().then(() => + dispatch( + createRequestUpdateAction( + id, + module, + lovVar, + true, + { [filterVar]: filters } + ) ) - ) - ); + ); + } return filters; } return old; @@ -554,12 +558,12 @@ const CoreSelector = (props: CoreSelectorProps) => { // sort const colSorts = useMemo(() => { try { - const res = props.sort ? (JSON.parse(props.sort) as Array<[string, string]>) : undefined; + const res = props.sort ? (JSON.parse(props.sort) as Array<[string, string, number[]]>) : undefined; return Array.isArray(res) - ? res.reduce((pv, [name, id], idx) => { - pv[name] = { dfid: id, title: name, type: "str", index: idx }; + ? res.reduce((pv, [name, id, params], idx) => { + pv[name] = { dfid: id, title: name, type: "str", index: idx, params }; return pv; - }, {} as Record) + }, {} as Record) : undefined; } catch { return undefined; @@ -574,15 +578,17 @@ const CoreSelector = (props: CoreSelectorProps) => { if (old.length != sorts.length || JSON.stringify(old) != jsonSorts) { localStoreSet(jsonSorts, id, lovPropertyName, "sort"); const sortVar = getUpdateVar(updateCoreVars, "sort"); - dispatch( - createRequestUpdateAction( - id, - module, - getUpdateVarNames(updateVars, lovPropertyName), - true, - sortVar ? { [sortVar]: sorts } : undefined - ) - ); + if (sortVar) { + dispatch( + createRequestUpdateAction( + id, + module, + getUpdateVarNames(updateVars, lovPropertyName), + true, + { [sortVar]: sorts } + ) + ); + } return sorts; } return old; diff --git a/taipy/core/config/data_node_config.py b/taipy/core/config/data_node_config.py index 0b095fc297..8edc6ce171 100644 --- a/taipy/core/config/data_node_config.py +++ b/taipy/core/config/data_node_config.py @@ -288,6 +288,7 @@ def __init__( suggest="exposed_type='pandas'", ) properties["exposed_type"] = DataNodeConfig._EXPOSED_TYPE_PANDAS + self._ranks: Dict[str, int] = {} def __copy__(self): return DataNodeConfig(self.id, self._storage_type, self._scope, self._validity_period, **copy(self._properties)) diff --git a/taipy/core/config/scenario_config.py b/taipy/core/config/scenario_config.py index b948591173..f7b19a97ed 100644 --- a/taipy/core/config/scenario_config.py +++ b/taipy/core/config/scenario_config.py @@ -13,6 +13,8 @@ from copy import copy from typing import Any, Callable, Dict, List, Optional, Union +import networkx as nx + from taipy.common.config import Config from taipy.common.config._config import _Config from taipy.common.config.common._template_handler import _TemplateHandler as _tpl @@ -85,6 +87,7 @@ def __init__( else: self.comparators[_validate_id(k)].append(v) super().__init__(id, **properties) + self.__build_datanode_configs_ranks() def __copy__(self): comp = None if self.comparators is None else self.comparators @@ -179,6 +182,55 @@ def default_config(cls) -> "ScenarioConfig": """ return ScenarioConfig(cls._DEFAULT_KEY, [], [], None, {}) + def draw(self, file_path: Optional[str] = None) -> None: + """ + Export the scenario configuration graph as a PNG file. + + This function uses the `matplotlib` library to draw the scenario configuration graph. + `matplotlib` must be installed independently of `taipy` as it is not a dependency. + If `matplotlib` is not installed, the function will log an error message, and do nothing. + + Arguments: + file_path (Optional[str]): The path to save the PNG file. + If not provided, the file will be saved with the scenario configuration id. + """ + from importlib import util + + from taipy.common.logger._taipy_logger import _TaipyLogger + logger = _TaipyLogger._get_logger() + + if not util.find_spec("matplotlib"): + logger.error("Cannot draw the scenario configuration as `matplotlib` is not installed.") + return + import matplotlib.pyplot as plt + + from taipy.core._entity._dag import _DAG + graph = self.__build_nx_dag() + positioned_nodes = _DAG(graph).nodes.values() + pos = {node.entity: (node.x, node.y) for node in positioned_nodes} + labls = {node.entity: node.entity.id for node in positioned_nodes} + + # Draw the graph + plt.figure(figsize=(10, 10)) + nx.draw_networkx_nodes(graph, pos, + nodelist=[node for node in graph.nodes if isinstance(node, DataNodeConfig)], + node_color="skyblue", + node_shape="s", + node_size=2000) + nx.draw_networkx_nodes(graph, pos, + nodelist=[node for node in graph.nodes if isinstance(node, TaskConfig)], + node_color="orange", + node_shape="D", + node_size=2000) + nx.draw_networkx_labels(graph, pos, labels=labls) + nx.draw_networkx_edges(graph, pos, node_size=2000, edge_color="black", arrowstyle="->", arrowsize=25) + + # Save the graph as a PNG file + path = file_path or f"{self.id}.png" + plt.savefig(path) + plt.close() # Close the plot to avoid display + logger.info(f"The graph image of the scenario configuration `{self.id}` is exported: {path}") + def _clean(self): self._tasks = [] self._additional_data_nodes = [] @@ -198,7 +250,8 @@ def _to_dict(self) -> Dict[str, Any]: } @classmethod - def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] = None) -> "ScenarioConfig": # type: ignore + def _from_dict(cls, as_dict: Dict[str, Any], id: str, + config: Optional[_Config] = None) -> "ScenarioConfig": # type: ignore as_dict.pop(cls._ID_KEY, id) tasks = cls.__get_task_configs(as_dict.pop(cls._TASKS_KEY, []), config) @@ -223,34 +276,6 @@ def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] **as_dict, ) - def __get_all_unique_data_nodes(self) -> List[DataNodeConfig]: - data_node_configs = set(self._additional_data_nodes) - for task in self._tasks: - data_node_configs.update(task.inputs) - data_node_configs.update(task.outputs) - - return list(data_node_configs) - - @staticmethod - def __get_task_configs(task_config_ids: List[str], config: Optional[_Config]): - task_configs = set() - if config: - if task_config_section := config._sections.get(TaskConfig.name): - for task_config_id in task_config_ids: - if task_config := task_config_section.get(task_config_id, None): - task_configs.add(task_config) - return list(task_configs) - - @staticmethod - def __get_additional_data_node_configs(additional_data_node_ids: List[str], config: Optional[_Config]): - additional_data_node_configs = set() - if config: - if data_node_config_section := config._sections.get(DataNodeConfig.name): - for additional_data_node_id in additional_data_node_ids: - if additional_data_node_config := data_node_config_section.get(additional_data_node_id): - additional_data_node_configs.add(additional_data_node_config) - return list(additional_data_node_configs) - def _update(self, as_dict: Dict[str, Any], default_section=None): self._tasks = as_dict.pop(self._TASKS_KEY, self._tasks) if self._tasks is None and default_section: @@ -373,63 +398,62 @@ def _set_default_configuration( Config._register(section) return Config.sections[ScenarioConfig.name][_Config.DEFAULT_KEY] - def draw(self, file_path: Optional[str]=None) -> None: - """ - Export the scenario configuration graph as a PNG file. - - This function uses the `matplotlib` library to draw the scenario configuration graph. - `matplotlib` must be installed independently of `taipy` as it is not a dependency. - If `matplotlib` is not installed, the function will log an error message, and do nothing. - - Arguments: - file_path (Optional[str]): The path to save the PNG file. - If not provided, the file will be saved with the scenario configuration id. - """ - from importlib import util - - from taipy.common.logger._taipy_logger import _TaipyLogger - logger = _TaipyLogger._get_logger() + def __get_all_unique_data_nodes(self) -> List[DataNodeConfig]: + data_node_configs = set(self._additional_data_nodes) + for task in self._tasks: + data_node_configs.update(task.inputs) + data_node_configs.update(task.outputs) - if not util.find_spec("matplotlib"): - logger.error("Cannot draw the scenario configuration as `matplotlib` is not installed.") - return - import matplotlib.pyplot as plt - import networkx as nx + return list(data_node_configs) - from taipy.core._entity._dag import _DAG + @staticmethod + def __get_task_configs(task_config_ids: List[str], config: Optional[_Config]): + task_configs = set() + if config: + if task_config_section := config._sections.get(TaskConfig.name): + for task_config_id in task_config_ids: + if task_config := task_config_section.get(task_config_id, None): + task_configs.add(task_config) + return list(task_configs) - def build_dag() -> nx.DiGraph: - g = nx.DiGraph() - for task in set(self.tasks): - if has_input := task.inputs: - for predecessor in task.inputs: - g.add_edges_from([(predecessor, task)]) - if has_output := task.outputs: - for successor in task.outputs: - g.add_edges_from([(task, successor)]) - if not has_input and not has_output: - g.add_node(task) - return g - graph = build_dag() - dag = _DAG(graph) - pos = {node.entity: (node.x, node.y) for node in dag.nodes.values()} - labls = {node.entity: node.entity.id for node in dag.nodes.values()} + @staticmethod + def __get_additional_data_node_configs(additional_data_node_ids: List[str], config: Optional[_Config]): + additional_data_node_configs = set() + if config: + if data_node_config_section := config._sections.get(DataNodeConfig.name): + for additional_data_node_id in additional_data_node_ids: + if additional_data_node_config := data_node_config_section.get(additional_data_node_id): + additional_data_node_configs.add(additional_data_node_config) + return list(additional_data_node_configs) - # Draw the graph - plt.figure(figsize=(10, 10)) - nx.draw_networkx_nodes(graph, pos, - nodelist=[node for node in graph.nodes if isinstance(node, DataNodeConfig)], - node_color="skyblue", - node_shape="s", - node_size=2000) - nx.draw_networkx_nodes(graph, pos, - nodelist=[node for node in graph.nodes if isinstance(node, TaskConfig)], - node_color="orange", - node_shape="D", - node_size=2000) - nx.draw_networkx_labels(graph, pos, labels=labls) - nx.draw_networkx_edges(graph, pos, node_size=2000, edge_color="black", arrowstyle="->", arrowsize=25) - path = file_path or f"{self.id}.png" - plt.savefig(path) - plt.close() # Close the plot to avoid display - logger.info(f"The graph image of the scenario configuration `{self.id}` is exported: {path}") + def __build_nx_dag(self) -> nx.DiGraph: + g = nx.DiGraph() + for task in set(self.tasks): + if has_input := task.inputs: + for predecessor in task.inputs: + g.add_edges_from([(predecessor, task)]) + if has_output := task.outputs: + for successor in task.outputs: + g.add_edges_from([(task, successor)]) + if not has_input and not has_output: + g.add_node(task) + return g + + def __build_datanode_configs_ranks(self): + # build the DAG + dag = self.__build_nx_dag() + # Remove tasks with no input + to_remove = [t for t, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(t, TaskConfig)] + dag.remove_nodes_from(to_remove) + # get data nodes in the dag + dn_cfgs = [nodes for nodes in nx.topological_generations(dag) if (DataNodeConfig in (type(n) for n in nodes))] + + # assign ranks to data nodes configs starting from 1 + rank = 1 + for same_rank_datanode_cfgs in dn_cfgs: + for dn_cfg in same_rank_datanode_cfgs: + dn_cfg._ranks[self.id] = rank + rank += 1 + # additional data nodes (not in the dag) have a rank of 0 + for add_dn_cfg in self._additional_data_nodes: + add_dn_cfg._ranks[self.id] = 0 diff --git a/taipy/core/data/data_node.py b/taipy/core/data/data_node.py index ad15e61072..2b9d3bba05 100644 --- a/taipy/core/data/data_node.py +++ b/taipy/core/data/data_node.py @@ -15,10 +15,11 @@ import uuid from abc import abstractmethod from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Dict, List, Optional, Set, Tuple, Union, cast import networkx as nx +from taipy.common.config import Config from taipy.common.config.common._validate_id import _validate_id from taipy.common.config.common.scope import Scope from taipy.common.logger._taipy_logger import _TaipyLogger @@ -104,9 +105,9 @@ class DataNode(_Entity, _Labeled): _ID_PREFIX = "DATANODE" __ID_SEPARATOR = "_" + _MANAGER_NAME: str = "data" _logger = _TaipyLogger._get_logger() _REQUIRED_PROPERTIES: List[str] = [] - _MANAGER_NAME: str = "data" _PATH_KEY = "path" __EDIT_TIMEOUT = 30 @@ -177,7 +178,7 @@ def config_id(self) -> str: @property def owner_id(self) -> Optional[str]: - """The identifier of the owner (sequence_id, scenario_id, cycle_id) or None.""" + """The identifier of the owner (scenario_id, cycle_id or None).""" return self._owner_id @property # type: ignore @@ -211,7 +212,7 @@ def edits(self, val): def last_edit_date(self) -> Optional[datetime]: """The date and time of the last modification.""" last_modified_datetime = self._get_last_modified_datetime(self._properties.get(self._PATH_KEY, None)) - if last_modified_datetime and last_modified_datetime > self._last_edit_date: # type: ignore + if last_modified_datetime and last_modified_datetime > self._last_edit_date: # type: ignore return last_modified_datetime else: return self._last_edit_date @@ -319,7 +320,7 @@ def editor_expiration_date(self, val): @_self_reload(_MANAGER_NAME) def job_ids(self) -> List[JobId]: """List of the jobs having edited this data node.""" - return [edit.get("job_id") for edit in self.edits if edit.get("job_id")] + return [job_id for edit in self.edits if (job_id := edit.get("job_id"))] @property def properties(self): @@ -379,7 +380,7 @@ def is_up_to_date(self) -> bool: if ( isinstance(ancestor_node, DataNode) and ancestor_node.last_edit_date - and ancestor_node.last_edit_date > self.last_edit_date + and ancestor_node.last_edit_date > cast(datetime, self.last_edit_date) ): return False return True @@ -450,6 +451,8 @@ def write(self, **kwargs: Any): """Write some data to this data node. + once the data is written, the data node is unlocked and the edit is tracked. + Arguments: data (Any): The data to write to this data node. job_id (JobId): An optional identifier of the job writing the data. @@ -605,6 +608,35 @@ def get_last_edit(self) -> Optional[Edit]: """ return self._edits[-1] if self._edits else None + def _get_rank(self, scenario_config_id: str) -> int: + """Get the data node rank for given scenario config. + + The rank corresponds to the order of appearance of the data nodes in a scenario config DAG. + + Arguments: + scenario_config_id (str): The identifier of the scenario config used to + get the data node rank. + + Returns: + The int value representing the rank of the data node config in the scenario config DAG. + If the scenario config is None or an empty string, 0xfffb is returned.
+ If the data node config is not found, 0xfffd is returned. This case cannot + happen in a normal situation.
+ If the data node config has no precomputed ranks, 0xfffe is returned. This case + cannot happen in a normal situation.
+ If the data node config is not part of the scenario config, 0xfffc is returned as an infinite rank. + """ + if not scenario_config_id: + return 0xfffb + dn_config = Config.data_nodes.get(self._config_id, None) + if not dn_config: + self._logger.error(f"Data node config `{self.config_id}` for data node `{self.id}` is not found.") + return 0xfffd + if not dn_config._ranks: + self._logger.error(f"Data node config `{self.config_id}` for data node `{self.id}` has no rank.") + return 0xfffe + return dn_config._ranks.get(scenario_config_id, 0xfffc) + @abstractmethod def _read(self): raise NotImplementedError diff --git a/taipy/gui_core/_adapters.py b/taipy/gui_core/_adapters.py index 6d9a238ece..ff712792ec 100644 --- a/taipy/gui_core/_adapters.py +++ b/taipy/gui_core/_adapters.py @@ -17,7 +17,7 @@ from abc import ABC, abstractmethod from collections.abc import Iterable from dataclasses import dataclass -from datetime import date, datetime +from datetime import date, datetime, time from enum import Enum from operator import attrgetter, contains, eq, ge, gt, le, lt, ne @@ -44,7 +44,7 @@ from taipy.gui.gui import _DoNotUpdate from taipy.gui.utils import _is_boolean, _is_true, _TaipyBase -from .filters import DataNodeFilter, ScenarioFilter, _Filter +from .filters import DataNodeFilter, ParamType, ScenarioFilter, _Filter # prevent gui from trying to push scenario instances to the front-end @@ -360,7 +360,7 @@ def _invoke_action( return True -def _get_entity_property(col: str, *types: t.Type): +def _get_entity_property(col: str, *types: t.Type, params: t.Optional[t.List[t.Any]] = None): col_parts = col.split("(", 2) # handle the case where the col is a method (ie get_simple_label()) col_fn = ( next( @@ -372,25 +372,24 @@ def _get_entity_property(col: str, *types: t.Type): ) def sort_key(entity: t.Union[Scenario, Cycle, Sequence, DataNode]): + val: t.Any = "Z" # we compare only strings if isinstance(entity, types): if isinstance(entity, Cycle): - the_col = "creation_date" + the_col = "creation_date" if col == "creation_date" else None the_fn = None else: the_col = col the_fn = col_fn - try: - val = attrgetter(the_fn or the_col)(entity) - if the_fn: - val = val() - except AttributeError as e: - if _is_debugging(): - _warn(f"sort_key({entity.id}):", e) - val = "" - else: - val = "" - return val.isoformat() if isinstance(val, (datetime, date)) else str(val) + if the_col: + try: + val = attrgetter(the_fn or the_col)(entity) + if the_fn: + val = val(*params) if params else val() + except Exception as e: + if _is_debugging(): + _warn(f"sort_key({entity.id}):", e) + return val.isoformat() if isinstance(val, (datetime, date, time)) else str(val) return sort_key @@ -450,9 +449,10 @@ def get(self): attr.get_property(), attr.get_type(), self.get_enums().get(attr.get_property()), + [p.value for p in attr.get_params() or []], ) if self.full_desc() - else (attr.label, attr.get_property()) + else (attr.label, attr.get_property(), [p.value for p in attr.get_params() or []]) for attr in f_list ] ) @@ -567,6 +567,9 @@ class _GuiCoreDatanodeProperties(_GuiCoreProperties): _GuiCorePropDesc(DataNodeFilter("Last edit date", datetime, "last_edit_date"), for_sort=True), _GuiCorePropDesc(DataNodeFilter("Expiration date", datetime, "expiration_date"), extended=True, for_sort=True), _GuiCorePropDesc(DataNodeFilter("Expired", bool, "is_expired"), extended=True), + _GuiCorePropDesc( + DataNodeFilter("Rank", int, "_get_rank()", [ParamType.ScenarioConfigId]), for_sort=True + ), ] __DN_VALIDITY = None diff --git a/taipy/gui_core/_context.py b/taipy/gui_core/_context.py index 739929ec5e..8928d589af 100644 --- a/taipy/gui_core/_context.py +++ b/taipy/gui_core/_context.py @@ -71,7 +71,7 @@ _invoke_action, ) from ._utils import _ClientStatus -from .filters import CustomScenarioFilter +from .filters import CustomScenarioFilter, ParamType class _GuiCoreContext(CoreEventConsumerBase): @@ -697,6 +697,25 @@ def get_filtered_datanode_list( # remove empty cycles return [e for e in filtered_list if isinstance(e, DataNode) or (isinstance(e, (tuple, list)) and len(e[2]))] + @staticmethod + def _get_sort_params(params: t.Optional[t.List[t.Any]] = None, parent: t.Optional[Scenario] = None): + args: t.Optional[t.List[t.Any]] = None + if params: + args = [] + for param in params: + if param == ParamType.ScenarioConfigId.value: + args.append( + parent.config_id + if isinstance(parent, Scenario) + else next(filter(lambda id: id != "default", iter(Config.scenarios)), None) # type: ignore[arg-type] + ) + elif param == ParamType.ScenarioId.value: + args.append(parent.id if isinstance(parent, Scenario) else None) + else: + args.append(None) + return args + + def get_sorted_datanode_list( self, entities: t.Union[ @@ -704,6 +723,7 @@ def get_sorted_datanode_list( ], sorts: t.Optional[t.List[t.Dict[str, t.Any]]], adapt_dn=False, + parent: t.Optional[Scenario] = None, ): if not entities: return entities @@ -712,7 +732,10 @@ def get_sorted_datanode_list( for sd in reversed(sorts): col = sd.get("col", "") order = sd.get("order", True) - sorted_list = sorted(sorted_list, key=_get_entity_property(col, DataNode), reverse=not order) + args = self._get_sort_params(t.cast(t.List[int], sd.get("params")), parent) + sorted_list = sorted( + sorted_list, key=_get_entity_property(col, DataNode, params=args), reverse=not order + ) else: sorted_list = entities return [self.data_node_adapter(e, sorts, adapt_dn) for e in sorted_list] @@ -731,26 +754,39 @@ def get_datanodes_tree( sorts: t.Optional[t.List[t.Dict[str, t.Any]]], ): self.__lazy_start() - base_list = [] + base_list: t.List[t.Union[Cycle, Scenario, DataNode]] = [] + parent: t.Optional[Scenario] = None with self.lock: self.__do_datanodes_tree() if datanodes is None: if scenarios is None: - base_list = (self.data_nodes_by_owner or {}).get(None, []) + ( - self.get_scenarios(None, None, None) or [] - ) + tree: t.List[t.Union[Cycle, Scenario]] = [] + with self.lock: + # always needed to get scenarios for a cycle in cycle_adapter + if self.scenario_by_cycle is None: + self.scenario_by_cycle = get_cycles_scenarios() + for cycle, c_scenarios in self.scenario_by_cycle.items(): + if cycle is None: + tree.extend(c_scenarios) + else: + tree.append(cycle) + base_list = (self.data_nodes_by_owner or {}).get(None, []) + tree else: if isinstance(scenarios, (list, tuple)) and len(scenarios) > 1: base_list = list(scenarios) else: + parent = ( + scenarios[0] + if scenarios and isinstance(scenarios, (list, tuple)) + else t.cast(Scenario, scenarios) + ) if self.data_nodes_by_owner: - owners = scenarios if isinstance(scenarios, (list, tuple)) else [scenarios] - base_list = [d for owner in owners for d in (self.data_nodes_by_owner).get(owner.id, [])] + base_list = t.cast(list, self.data_nodes_by_owner.get(parent.id, [])) else: base_list = [] else: - base_list = datanodes - adapted_list = self.get_sorted_datanode_list(t.cast(list, base_list), sorts) + base_list = t.cast(list, datanodes) + adapted_list = self.get_sorted_datanode_list(t.cast(list, base_list), sorts, parent=parent) return self.get_filtered_datanode_list(t.cast(list, adapted_list), filters) def data_node_adapter( @@ -763,8 +799,12 @@ def data_node_adapter( if isinstance(data, tuple): raise NotImplementedError if isinstance(data, list): - if data[2] and isinstance(t.cast(list, data[2])[0], (Cycle, Scenario, Sequence, DataNode)): - data[2] = self.get_sorted_datanode_list(t.cast(list, data[2]), sorts, False) + if ( + data[2] + and (parent := t.cast(Scenario, t.cast(list, data[2])[0])) + and isinstance(parent, (Cycle, Scenario, Sequence, DataNode)) + ): + data[2] = self.get_sorted_datanode_list(t.cast(list, data[2]), sorts, False, parent=parent) return data try: if hasattr(data, "id") and is_readable(data.id) and core_get(data.id) is not None: @@ -779,14 +819,15 @@ def data_node_adapter( self.__do_datanodes_tree() if self.data_nodes_by_owner: if isinstance(data, Cycle): + scenarios = (self.scenario_by_cycle or {}).get(data, []) return [ data.id, data.get_simple_label(), self.get_sorted_datanode_list( - self.data_nodes_by_owner.get(data.id, []) - + (self.scenario_by_cycle or {}).get(data, []), + self.data_nodes_by_owner.get(data.id, []) + scenarios, sorts, False, + parent=scenarios[0] if scenarios else None, ), _EntityType.CYCLE.value, False, @@ -799,6 +840,7 @@ def data_node_adapter( t.cast(list, self.data_nodes_by_owner.get(data.id, []) + list(data.sequences.values())), sorts, False, + parent=data, ), _EntityType.SCENARIO.value, data.is_primary, @@ -808,7 +850,14 @@ def data_node_adapter( return [ data.id, data.get_simple_label(), - self.get_sorted_datanode_list(datanodes, sorts, False), + self.get_sorted_datanode_list( + datanodes, + sorts, + False, + parent=t.cast( + Scenario, core_get(t.cast(ScenarioId, data.owner_id)) if data.owner_id else None + ), + ), _EntityType.SEQUENCE.value, ] except Exception as e: @@ -1050,7 +1099,7 @@ def update_data(self, state: State, id: str, payload: t.Dict[str, str]): if data.get("type") == "float" else data.get("value"), editor_id=self.gui._get_client_id(), - comment=t.cast(dict, data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT)), + comment=t.cast(str, data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT)), ) _GuiCoreContext.__assign_var(state, error_var, "") except Exception as e: @@ -1135,9 +1184,11 @@ def tabular_data_edit(self, state: State, var_name: str, payload: dict): # noqa "Error updating data node tabular value: type does not support at[] indexer.", ) if new_data is not None: - datanode.write(new_data, - editor_id=self.gui._get_client_id(), - comment=user_data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT)) + datanode.write( + new_data, + editor_id=self.gui._get_client_id(), + comment=user_data.get(_GuiCoreContext.__PROP_ENTITY_COMMENT), + ) _GuiCoreContext.__assign_var(state, error_var, "") except Exception as e: _GuiCoreContext.__assign_var(state, error_var, f"Error updating data node tabular value. {e}") @@ -1250,7 +1301,7 @@ def on_file_action(self, state: State, id: str, payload: t.Dict[str, t.Any]): act_payload.get("path", ""), t.cast(t.Callable[[str, t.Any], bool], checker) if callable(checker) else None, editor_id=self.gui._get_client_id(), - comment=None + comment=None, ) ): state.assign(error_id, f"Data unavailable: {reason.reasons}") diff --git a/taipy/gui_core/filters.py b/taipy/gui_core/filters.py index 7a299f8f35..defe352146 100644 --- a/taipy/gui_core/filters.py +++ b/taipy/gui_core/filters.py @@ -12,11 +12,17 @@ import typing as t from dataclasses import dataclass from datetime import date, datetime +from enum import Enum from taipy.core import Scenario from taipy.gui.gui import _DoNotUpdate +class ParamType(Enum): + ScenarioConfigId = 0 + ScenarioId = 1 + + @dataclass class _Filter(_DoNotUpdate): label: str @@ -36,6 +42,9 @@ def get_type(self): return "str" return "any" + def get_params(self) -> t.Optional[t.List[ParamType]]: + return None + @dataclass class ScenarioFilter(_Filter): @@ -96,6 +105,10 @@ class DataNodeFilter(_Filter): """ property_id: str + params: t.Optional[t.List[ParamType]] = None def get_property(self): return self.property_id + + def get_params(self): + return self.params diff --git a/tests/core/config/test_scenario_config.py b/tests/core/config/test_scenario_config.py index 0b58942f1b..9b31ed4b45 100644 --- a/tests/core/config/test_scenario_config.py +++ b/tests/core/config/test_scenario_config.py @@ -99,7 +99,7 @@ def test_scenario_creation(): dn_config_4 = Config.configure_data_node("dn4") task_config_1 = Config.configure_task("task1", sum, [dn_config_1, dn_config_2], dn_config_3) task_config_2 = Config.configure_task("task2", print, dn_config_3) - scenario = Config.configure_scenario( + scenario_cfg = Config.configure_scenario( "scenarios1", [task_config_1, task_config_2], [dn_config_4], @@ -107,10 +107,69 @@ def test_scenario_creation(): sequences={"sequence": []}, ) - assert list(Config.scenarios) == ["default", scenario.id] + assert list(Config.scenarios.keys()) == ["default", scenario_cfg.id] scenario2 = Config.configure_scenario("scenarios2", [task_config_1], frequency=Frequency.MONTHLY) - assert list(Config.scenarios) == ["default", scenario.id, scenario2.id] + assert list(Config.scenarios.keys()) == ["default", scenario_cfg.id, scenario2.id] + + +def test_datanode_config_ranks(): + dn_config_1 = Config.configure_data_node("dn1") + dn_config_2 = Config.configure_data_node("dn2") + dn_config_3 = Config.configure_data_node("dn3") + dn_config_4 = Config.configure_data_node("dn4") + dn_config_5 = Config.configure_data_node("dn5") + dn_config_6 = Config.configure_data_node("dn6") + + task_config_1 = Config.configure_task("task1", sum, dn_config_1, dn_config_2) + task_config_2 = Config.configure_task("task2", sum, dn_config_2, dn_config_3) + task_config_3 = Config.configure_task("task3", sum, [dn_config_1, dn_config_2], dn_config_3) + task_config_4 = Config.configure_task("task4", sum, dn_config_3, [dn_config_4, dn_config_5]) + task_config_5 = Config.configure_task("task5", sum, dn_config_5, dn_config_6) + + # s1 additional: dn3 + # s1 dag: dn1 -> dn2 + Config.configure_scenario("s1", [task_config_1],[dn_config_3]) + # s2 additional: dn4 + # s2 dag: dn2 -> dn3 + Config.configure_scenario("s2", [task_config_2],[dn_config_4]) + # s3 additional: None + # s3 dag: dn1 -> dn2 -> dn3 + Config.configure_scenario("s3", [task_config_1, task_config_2]) + # s4 additional: None + # s4 dag: dn1 -- --> dn4 + # \ / + # |----> dn3 ---| + # / \ + # dn2 -- --> dn5 ---> dn6 + Config.configure_scenario("s4", [task_config_3, task_config_4, task_config_5]) + + assert len(dn_config_1._ranks) == 3 + assert dn_config_1._ranks["s1"] == 1 + assert dn_config_1._ranks["s3"] == 1 + assert dn_config_1._ranks["s4"] == 1 + + assert len(dn_config_2._ranks) == 4 + assert dn_config_2._ranks["s1"] == 2 + assert dn_config_2._ranks["s2"] == 1 + assert dn_config_2._ranks["s3"] == 2 + assert dn_config_2._ranks["s4"] == 1 + + assert len(dn_config_3._ranks) == 4 + assert dn_config_3._ranks["s1"] == 0 + assert dn_config_3._ranks["s2"] == 2 + assert dn_config_3._ranks["s3"] == 3 + assert dn_config_3._ranks["s4"] == 2 + + assert len(dn_config_4._ranks) == 2 + assert dn_config_4._ranks["s2"] == 0 + assert dn_config_4._ranks["s4"] == 3 + + assert len(dn_config_5._ranks) == 1 + assert dn_config_5._ranks["s4"] == 3 + + assert len(dn_config_6._ranks) == 1 + assert dn_config_6._ranks["s4"] == 4 def test_scenario_count(): diff --git a/tests/core/data/test_data_node.py b/tests/core/data/test_data_node.py index 5442b63065..5101177644 100644 --- a/tests/core/data/test_data_node.py +++ b/tests/core/data/test_data_node.py @@ -83,6 +83,26 @@ def test_create_with_default_values(self): assert not dn.is_ready_for_reading assert len(dn.properties) == 0 + def test_create_with_ranks(self): + # Test _rank is propagated from the config + cfg = Config.configure_data_node("foo_bar") + cfg._ranks = {"A": 1, "B": 2, "C": 0} + + dn = DataNode("foo_bar") + assert dn.config_id == "foo_bar" + assert dn.scope == Scope.SCENARIO + assert dn.id is not None + assert dn.name is None + assert dn.owner_id is None + assert dn.parent_ids == set() + assert dn.last_edit_date is None + assert dn.job_ids == [] + assert not dn.is_ready_for_reading + assert len(dn.properties) == 0 + assert dn._get_rank("A") == 1 + assert dn._get_rank("B") == 2 + assert dn._get_rank("C") == 0 + def test_is_up_to_date_when_not_written(self): dn_confg_1 = Config.configure_in_memory_data_node("dn_1", default_data="a") dn_confg_2 = Config.configure_in_memory_data_node("dn_2")