Skip to content

Commit

Permalink
Merge #4616
Browse files Browse the repository at this point in the history
4616: Reducing qcodes import times r=jenshnielsen a=edumur

Hi all,

Following this discussion #4565 and this issue #4543 I am proposing this pull request to work on reducing qcodes loading time.

I have started with pandas.
I tried to propagate what `@jenshnielsen` did to make pandas import lazy-import.
I am not sure I have succeeded since I still see pandas being imported while doing `importtime-waterfall`.
I would gladly improve the request if someone help me on this.

If this becomes a successful endeavor, I will continue by making other import lazy-import. 

Co-authored-by: Etienne Dumur <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Jens H. Nielsen <[email protected]>
  • Loading branch information
4 people authored Oct 4, 2022
2 parents ce454db + f8718ed commit 77f1609
Show file tree
Hide file tree
Showing 20 changed files with 220 additions and 145 deletions.
4 changes: 4 additions & 0 deletions docs/changes/newsfragments/4616.breaking
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
The `test_intrument` and `test_intruments` functions are no longer available
from the toplevel qcodes namespace.
If you require these functions they must be imported from
`qcodes.instrument_drivers.test`.
1 change: 0 additions & 1 deletion qcodes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
VisaInstrument,
find_or_create_instrument,
)
from qcodes.instrument_drivers.test import test_instrument, test_instruments
from qcodes.monitor import Monitor
from qcodes.parameters import (
ArrayParameter,
Expand Down
23 changes: 14 additions & 9 deletions qcodes/data/data_set.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
"""DataSet class and factory functions."""
from __future__ import annotations

import logging
import time
from collections import OrderedDict
from copy import deepcopy
from traceback import format_exc
from typing import Any, Callable, Dict, List, Optional
from typing import TYPE_CHECKING, Any, Callable, Dict

import numpy as np
import xarray as xr

if TYPE_CHECKING:
import xarray as xr

from qcodes.data.data_array import (
DataArray,
Expand Down Expand Up @@ -165,7 +168,7 @@ class DataSet(DelegateAttributes):
default_formatter = GNUPlotFormat()
location_provider = FormatLocation()

background_functions: Dict[str, Callable[..., Any]] = OrderedDict()
background_functions: dict[str, Callable[..., Any]] = OrderedDict()
"""
The value ``fn`` is a callable accepting no
arguments, and ``key`` is a name to identify the function and help
Expand Down Expand Up @@ -423,7 +426,7 @@ def store(self, loop_indices, ids_values):
# else:
# log.debug('.store method: This is not the right time to write')

def default_parameter_name(self, paramname: Optional[str] = None) -> Optional[str]:
def default_parameter_name(self, paramname: str | None = None) -> str | None:
"""Return name of default parameter for plotting
The default parameter is determined by looking into
Expand Down Expand Up @@ -694,7 +697,7 @@ def to_xarray(self) -> xr.Dataset:
return qcodes_dataset_to_xarray_dataset(self)

@classmethod
def from_xarray(cls, xarray_dataset: xr.Dataset) -> 'DataSet':
def from_xarray(cls, xarray_dataset: xr.Dataset) -> DataSet:
""" Convert the dataset to an xarray DataSet """
return xarray_dataset_to_qcodes_dataset(xarray_dataset)

Expand All @@ -718,7 +721,7 @@ def _indent(self, s):

def dataset_to_xarray_dictionary(
data_set: DataSet, include_metadata: bool = True
) -> Dict[str, Any]:
) -> dict[str, Any]:
"""Convert QcodesDataSet to dictionary.
Args:
Expand All @@ -729,7 +732,7 @@ def dataset_to_xarray_dictionary(
Returns:
Dictionary containing the serialized data.
"""
data_dictionary: Dict[str, Any] = {
data_dictionary: dict[str, Any] = {
"dims": {},
"attrs": {},
"coords": {},
Expand Down Expand Up @@ -763,13 +766,15 @@ def qcodes_dataset_to_xarray_dataset(
data_set: DataSet,
) -> xr.Dataset:
""" Convert QCoDeS gridded dataset to xarray dataset """
import xarray as xr

xarray_dictionary = dataset_to_xarray_dictionary(data_set)
xarray_dataset = xr.Dataset.from_dict(xarray_dictionary)
return xarray_dataset


def xarray_dictionary_to_dataset(
xarray_dictionary: Dict[str, Any],
xarray_dictionary: dict[str, Any],
) -> DataSet:
"""Convert xarray dictionary to Qcodes DataSet.
Expand All @@ -782,7 +787,7 @@ def xarray_dictionary_to_dataset(
dataset = new_data()
dataset.metadata.update(xarray_dictionary["attrs"])

grid_coords: List[Any] = []
grid_coords: list[Any] = []
set_array_names = []

coordinate_names = list(xarray_dictionary["data_vars"].values())[0]["dims"]
Expand Down
13 changes: 5 additions & 8 deletions qcodes/dataset/dond/do_0d.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,22 @@
from __future__ import annotations

import logging
from typing import TYPE_CHECKING

from qcodes import config
from qcodes.parameters import ParameterBase

from ..descriptions.detect_shapes import detect_shape_of_measurement
from ..descriptions.versioning.rundescribertypes import Shapes
from ..experiment_container import Experiment
from ..measurements import Measurement
from ..threading import process_params_meas
from .do_nd_utils import (
AxesTupleListWithDataSet,
ParamMeasT,
_handle_plotting,
_register_parameters,
_set_write_period,
)
from .do_nd_utils import _handle_plotting, _register_parameters, _set_write_period

LOG = logging.getLogger(__name__)

if TYPE_CHECKING:
from ..descriptions.versioning.rundescribertypes import Shapes
from .do_nd_utils import AxesTupleListWithDataSet, ParamMeasT

def do0d(
*param_meas: ParamMeasT,
Expand Down
16 changes: 10 additions & 6 deletions qcodes/dataset/dond/do_1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,15 @@
import logging
import sys
import time
from typing import Sequence
from typing import TYPE_CHECKING, Sequence

import numpy as np
from tqdm.auto import tqdm

from qcodes import config
from qcodes.dataset.descriptions.detect_shapes import detect_shape_of_measurement
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.dond.do_nd_utils import (
ActionsT,
AxesTupleListWithDataSet,
BreakConditionInterrupt,
BreakConditionT,
ParamMeasT,
_catch_interrupts,
_handle_plotting,
_register_actions,
Expand All @@ -34,6 +29,15 @@

LOG = logging.getLogger(__name__)

if TYPE_CHECKING:
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.dond.do_nd_utils import (
ActionsT,
AxesTupleListWithDataSet,
BreakConditionT,
ParamMeasT,
)


def do1d(
param_set: ParameterBase,
Expand Down
15 changes: 9 additions & 6 deletions qcodes/dataset/dond/do_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,15 @@
import logging
import sys
import time
from typing import Sequence
from typing import TYPE_CHECKING, Sequence

import numpy as np
from tqdm.auto import tqdm

from qcodes import config
from qcodes.dataset.descriptions.detect_shapes import detect_shape_of_measurement
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.dond.do_nd_utils import (
ActionsT,
AxesTupleListWithDataSet,
BreakConditionInterrupt,
BreakConditionT,
ParamMeasT,
_catch_interrupts,
_handle_plotting,
_register_actions,
Expand All @@ -34,6 +29,14 @@

LOG = logging.getLogger(__name__)

if TYPE_CHECKING:
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.dond.do_nd_utils import (
ActionsT,
AxesTupleListWithDataSet,
BreakConditionT,
ParamMeasT,
)

def do2d(
param_set1: ParameterBase,
Expand Down
18 changes: 11 additions & 7 deletions qcodes/dataset/dond/do_nd.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,16 @@
import time
from contextlib import ExitStack
from dataclasses import dataclass
from typing import Any, Mapping, Sequence, Tuple, Union, cast
from typing import TYPE_CHECKING, Any, Mapping, Sequence, Tuple, Union, cast

import numpy as np
from tqdm.auto import tqdm
from typing_extensions import TypedDict

from qcodes import config
from qcodes.dataset.descriptions.detect_shapes import detect_shape_of_measurement
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.dond.do_nd_utils import (
ActionsT,
AxesTupleListWithDataSet,
BreakConditionInterrupt,
BreakConditionT,
MultiAxesTupleListWithDataSet,
ParamMeasT,
_catch_interrupts,
_handle_plotting,
_register_actions,
Expand All @@ -41,6 +35,16 @@

LOG = logging.getLogger(__name__)

if TYPE_CHECKING:
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.dond.do_nd_utils import (
ActionsT,
AxesTupleListWithDataSet,
BreakConditionT,
MultiAxesTupleListWithDataSet,
ParamMeasT,
)

SweepVarType = Any


Expand Down
34 changes: 23 additions & 11 deletions qcodes/dataset/dond/do_nd_utils.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,47 @@
from __future__ import annotations

from contextlib import contextmanager
from typing import Callable, Iterator, List, Optional, Sequence, Tuple, Union

import matplotlib.axes
import matplotlib.colorbar
from typing import (
TYPE_CHECKING,
Callable,
Iterator,
List,
Optional,
Sequence,
Tuple,
Union,
)

if TYPE_CHECKING:
import matplotlib.axes
import matplotlib.colorbar

from qcodes.dataset.data_set_protocol import DataSetProtocol
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes
from qcodes.dataset.measurements import Measurement
from qcodes.dataset.plotting import plot_and_save_image
from qcodes.parameters import MultiParameter, ParameterBase

if TYPE_CHECKING:
from qcodes.dataset.descriptions.versioning.rundescribertypes import Shapes

ActionsT = Sequence[Callable[[], None]]
BreakConditionT = Callable[[], bool]

ParamMeasT = Union[ParameterBase, Callable[[], None]]

AxesTuple = Tuple[matplotlib.axes.Axes, matplotlib.colorbar.Colorbar]
AxesTuple = Tuple["matplotlib.axes.Axes", "matplotlib.colorbar.Colorbar"]
AxesTupleList = Tuple[
List[matplotlib.axes.Axes], List[Optional[matplotlib.colorbar.Colorbar]]
List["matplotlib.axes.Axes"], List[Optional["matplotlib.colorbar.Colorbar"]]
]
AxesTupleListWithDataSet = Tuple[
DataSetProtocol,
List[matplotlib.axes.Axes],
List[Optional[matplotlib.colorbar.Colorbar]],
List["matplotlib.axes.Axes"],
List[Optional["matplotlib.colorbar.Colorbar"]],
]
MultiAxesTupleListWithDataSet = Tuple[
Tuple[DataSetProtocol, ...],
Tuple[List[matplotlib.axes.Axes], ...],
Tuple[List[Optional[matplotlib.colorbar.Colorbar]], ...],
Tuple[List["matplotlib.axes.Axes"], ...],
Tuple[List[Optional["matplotlib.colorbar.Colorbar"]], ...],
]


Expand Down
2 changes: 1 addition & 1 deletion qcodes/dataset/exporters/export_to_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def dataframe_to_csv(
single_file: bool = False,
single_file_name: str | None = None,
) -> None:
import pandas as pd
import pandas as pd # pylint: disable=import-outside-toplevel

dfs_to_save = list()
for parametername, df in dfdict.items():
Expand Down
6 changes: 3 additions & 3 deletions qcodes/dataset/exporters/export_to_pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def load_to_dataframe_dict(datadict: ParameterData) -> dict[str, pd.DataFrame]:


def load_to_concatenated_dataframe(datadict: ParameterData) -> pd.DataFrame:
import pandas as pd
import pandas as pd # pylint: disable=import-outside-toplevel

if not _same_setpoints(datadict):
warnings.warn(
Expand All @@ -40,7 +40,7 @@ def load_to_concatenated_dataframe(datadict: ParameterData) -> pd.DataFrame:
def _data_to_dataframe(
data: Mapping[str, np.ndarray], index: pd.Index | pd.MultiIndex | None
) -> pd.DataFrame:
import pandas as pd
import pandas as pd # pylint: disable=import-outside-toplevel
if len(data) == 0:
return pd.DataFrame()
dependent_col_name = list(data.keys())[0]
Expand All @@ -63,7 +63,7 @@ def _generate_pandas_index(
) -> pd.Index | pd.MultiIndex | None:
# the first element in the dict given by parameter_tree is always the dependent
# parameter and the index is therefore formed from the rest
import pandas as pd
import pandas as pd # pylint: disable=import-outside-toplevel
keys = list(data.keys())
if len(data) <= 1:
index = None
Expand Down
Loading

0 comments on commit 77f1609

Please sign in to comment.