Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/generic_loader' into generic_loader
Browse files Browse the repository at this point in the history
# Conflicts:
#	sasdata/data_io/io_base.py
  • Loading branch information
krzywon committed Feb 21, 2024
2 parents 21ae1f0 + ebfccb8 commit 9c9aff6
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 3 deletions.
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Data import/export requirements
h5py
lxml
lxml < 5.0

# Calculation
numpy
Expand Down
60 changes: 60 additions & 0 deletions sasdata/data/condition_meta.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from dataclasses import dataclass
from typing import Union, Optional

import numpy as np

from sasdata.data_util.nxsunit import Converter, standardize_units

SCALAR_TYPE = Union[float, int, str]
VALUE_TYPE = Union[str, float, list[str], np.ndarray]


class ConditionBase:
pass


@dataclass
class Scalar:
# A scalar value for a condition
_name: str
_value: SCALAR_TYPE
_unit: Optional[str]
_uncertainty: Optional[VALUE_TYPE]
_desired_unit: Optional[str]

def __init__(self, **kwargs):
# Data class automatically assigns class values
super().__init__(**kwargs)
self._converter = Converter(self._unit)
self._desired_unit = self._unit

def convert(self, new_unit: str):
if self.are_units_sensible(new_unit):
self._desired_unit = new_unit
self._converter(self._value, new_unit)
else:
# TODO: warn the user in a meaningful way (no Exceptions!)
pass

def are_units_sensible(self, units: str):
"""A check to see if the units passed to the method make sense based on the condition type used."""
if self._converter:
compatible = self._converter.get_compatible_units()
std_units = standardize_units(units)
if len(compatible) == len(std_units):
for comp, unit in zip(compatible, std_units):
if unit not in comp:
return False
else:
return False
return True


class Vector:
def __init__(self, vals: dict[str, dict[str, VALUE_TYPE]]):
self.values: list[Scalar] = []
for name, val_dict in vals.items():
value = val_dict.get('value', 0.0)
unit = val_dict.get('unit', None)
unc = val_dict.get('uncertainty', None)
self.values.append(Scalar(name=name, value=value, unit=unit, uncertainty=unc))
17 changes: 17 additions & 0 deletions sasdata/data/data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from dataclasses import dataclass

import numpy as np


@dataclass
class DataMixin:
"""Class for storing as-loaded data from any source."""
# An array of dependent values (n-dimensional)
dependent: np.ndarray
# An array of signal values
signal: np.ndarray
# An array of uncertainties for the signal values where np.shape(noise) == np.shape(signal)
noise: np.ndarray
# An array of uncertainties for the dependent variables
resolution: np.ndarray
# Plottable dependent values
2 changes: 1 addition & 1 deletion sasdata/data_io/importer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
This is the base file importer class most importers should inherit from.
This is the base file importer class all importers should inherit from.
All generic functionality required for file import is built into this class.
"""

Expand Down
3 changes: 2 additions & 1 deletion sasdata/data_io/open.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from typing import Union, Optional

PATHLIKE = Union[str, Path, path]
FILELIKE = Union[BytesIO, FileIO, StringIO]


class CustomFileOpen:
Expand Down Expand Up @@ -45,7 +46,7 @@ class CustomFileOpen:
Closes any active file handles.
"""

def __init__(self, file: Union[PATHLIKE, BytesIO, FileIO, StringIO], mode: Optional[str] = 'rb',
def __init__(self, file: Union[PATHLIKE, FILELIKE], mode: Optional[str] = 'rb',
full_path: Optional[PATHLIKE] = None):
"""Create an instance of the file handler.
Expand Down

0 comments on commit 9c9aff6

Please sign in to comment.