Skip to content

Commit

Permalink
Bump [skip actions]
Browse files Browse the repository at this point in the history
  • Loading branch information
bjhardcastle committed May 22, 2024
1 parent bc32c6c commit 5358309
Show file tree
Hide file tree
Showing 10 changed files with 71 additions and 38 deletions.
8 changes: 4 additions & 4 deletions pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "npc_sessions"
version = "0.0.222"
version = "0.0.223"
description = "Tools and interfaces for working with behavior and epyhys sessions from the Mindscope Neuropixels team, in the cloud."
authors = [
{ name = "Ben Hardcastle", email = "[email protected]" },
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ ndx-events==0.2.0
ndx-pose==0.1.1
neo==0.13.0
nest-asyncio==1.6.0
npc-ephys==0.1.14
npc-ephys==0.1.15
npc-io==0.1.26
npc-lims==0.1.150
npc-mvr==0.1.6
Expand Down
4 changes: 3 additions & 1 deletion src/npc_sessions/scripts/write_notebooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,9 @@ def write_notebooks(
reversed: bool = False,
) -> None:
t0 = time.time()
session_infos = utils.get_session_infos(session_type=session_type, reversed=reversed)
session_infos = utils.get_session_infos(
session_type=session_type, reversed=reversed
)

helper_opts = {
"version": version,
Expand Down
4 changes: 3 additions & 1 deletion src/npc_sessions/scripts/write_sessions_to_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ def write_sessions_to_cache(
reversed: bool = False,
) -> None:
t0 = time.time()
session_infos = utils.get_session_infos(session_type=session_type, reversed=reversed)
session_infos = utils.get_session_infos(
session_type=session_type, reversed=reversed
)

helper_opts = {
"skip_existing": skip_existing,
Expand Down
4 changes: 3 additions & 1 deletion src/npc_sessions/scripts/write_sessions_to_nwb.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@ def write_sessions_to_cache(
reversed: bool = False,
) -> None:
t0 = time.time()
session_infos = utils.get_session_infos(session_type=session_type, reversed=reversed)
session_infos = utils.get_session_infos(
session_type=session_type, reversed=reversed
)

helper_opts = {
"skip_existing": skip_existing,
Expand Down
25 changes: 18 additions & 7 deletions src/npc_sessions/sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ class DynamicRoutingSession:
excluded_stim_file_names = ["DynamicRouting1_670248_20230802_120703"]
"""File names (or substrings) that should never be considered as valid stim
files, for example they are known to be corrupt and cannot be opened"""

mvr_to_nwb_camera_name = {
"eye": "eye_camera",
"face": "front_camera",
Expand Down Expand Up @@ -1129,14 +1129,19 @@ def get_epoch_record(stim_file: npc_io.PathLike) -> dict[str, Any]:
interval_names = []
if "RFMapping" in stim_name:
interval_names.extend(
[utils.get_taskcontrol_intervals_table_name(n) for n in ("VisRFMapping", "AudRFMapping")]
[
utils.get_taskcontrol_intervals_table_name(n)
for n in ("VisRFMapping", "AudRFMapping")
]
)
else:
interval_names.append(utils.get_taskcontrol_intervals_table_name(stim_name))
interval_names.append(
utils.get_taskcontrol_intervals_table_name(stim_name)
)
if self.task_stim_name in stim_file.name and self.is_task:
interval_names.append("performance")
interval_names = list(dict.fromkeys(interval_names).keys())

invalid_times_notes: list[str] = []
if not self.is_sync:
# only one stim, so we use its frame times as recorded on stim computer
Expand Down Expand Up @@ -2413,18 +2418,24 @@ def probe_letters_with_sorted_data(self) -> tuple[npc_session.ProbeRecord, ...]:
if not self.is_sorted:
return ()
return self.sorted_data.probes

@property
def probe_letters_skipped_by_sorting(self) -> tuple[npc_session.ProbeRecord, ...]:
if not self.is_sorted:
return ()
return tuple(npc_session.ProbeRecord(p) for p in 'ABCDEF' if p not in self.sorted_data.probes)
return tuple(
npc_session.ProbeRecord(p)
for p in "ABCDEF"
if p not in self.sorted_data.probes
)

@property
def probe_letters_to_skip(self) -> tuple[npc_session.ProbeRecord, ...]:
"""Includes probes skipped by sorting"""
if (v := getattr(self, "_probe_letters_to_skip", None)) is not None:
probe_letters_to_skip = tuple(npc_session.ProbeRecord(letter) for letter in v)
probe_letters_to_skip = tuple(
npc_session.ProbeRecord(letter) for letter in v
)
else:
probe_letters_to_skip = ()
return probe_letters_to_skip + self.probe_letters_skipped_by_sorting
Expand Down
8 changes: 5 additions & 3 deletions src/npc_sessions/trials/TaskControl/DynamicRouting1.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,16 +761,18 @@ def parse_wavelengths(
if not devices:
return (np.nan,) # type: ignore
if devices in ("led_1", "led_2"):
return (470, ) # behavior box cannulae test experiments
return (470,) # behavior box cannulae test experiments
try:
value = int(devices.split("_")[-1])
except ValueError as exc:
raise ValueError(
f"Invalid opto devices string (expected 'laser_488' format): {devices}"
) from exc
else:
assert 300 < value < 1000, f"Unexpected wavelength parsed from `trialOptoDevice`: {value}"
return (value, )
assert (
300 < value < 1000
), f"Unexpected wavelength parsed from `trialOptoDevice`: {value}"
return (value,)
result: tuple[int | np.floating, ...] = ()
for device in devices:
result = result + parse_wavelengths(device)
Expand Down
48 changes: 30 additions & 18 deletions src/npc_sessions/trials/TaskControl/OptoTagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@

from __future__ import annotations

from collections.abc import Iterable
import datetime
from collections.abc import Iterable

import DynamicRoutingTask.TaskUtils
import npc_io
Expand Down Expand Up @@ -46,11 +46,11 @@ def __init__(
super().__init__(
hdf5, sync, ephys_recording_dirs=ephys_recording_dirs, **kwargs
)

@npc_io.cached_property
def _datetime(self) -> datetime.datetime:
return npc_session.DatetimeRecord(self._hdf5['startTime'].asstr()[()]).dt
return npc_session.DatetimeRecord(self._hdf5["startTime"].asstr()[()]).dt

@npc_io.cached_property
def _device(self) -> str:
"""If multiple devices were used, this should be ignored."""
Expand Down Expand Up @@ -79,20 +79,27 @@ def get_trial_opto_device(self, trial_idx: int) -> str:
return devices[0]

def assert_is_single_device(self) -> None:
assert self._hdf5.get("trialOptoDevice") is None, f"Multiple optotagging devices found for {self._datetime} session - update `get_trial_opto_device` method to handle multiple devices"
assert (
self._hdf5.get("trialOptoDevice") is None
), f"Multiple optotagging devices found for {self._datetime} session - update `get_trial_opto_device` method to handle multiple devices"

@npc_io.cached_property
def _trial_opto_device(self) -> tuple[str, ...]:
## for multiple devices:
#return tuple(self.get_trial_opto_device(idx) for idx in range(self._len))
# return tuple(self.get_trial_opto_device(idx) for idx in range(self._len))
self.assert_is_single_device()
## for single device:
return (self._device,) * self._len

def get_stim_recordings_from_sync(self, line_label: str = "laser_488") -> tuple[npc_samstim.StimRecording, ...] | None:

def get_stim_recordings_from_sync(
self, line_label: str = "laser_488"
) -> tuple[npc_samstim.StimRecording, ...] | None:
try:
recordings = npc_samstim.get_stim_latencies_from_sync(
self._hdf5, self._sync, waveform_type="opto", line_index_or_label=npc_sync.get_sync_line_for_stim_onset(line_label)
self._hdf5,
self._sync,
waveform_type="opto",
line_index_or_label=npc_sync.get_sync_line_for_stim_onset(line_label),
)
except IndexError:
return None
Expand All @@ -101,15 +108,15 @@ def get_stim_recordings_from_sync(self, line_label: str = "laser_488") -> tuple[
), f"{recordings.count(None) = } encountered: expected a recording of stim onset for every trial"
# TODO check this works for all older sessions
return tuple(_ for _ in recordings if _ is not None)

@npc_io.cached_property
def _stim_recordings_488(self) -> tuple[npc_samstim.StimRecording, ...] | None:
return self.get_stim_recordings_from_sync("laser_488")

@npc_io.cached_property
def _stim_recordings_633(self) -> tuple[npc_samstim.StimRecording, ...] | None:
return self.get_stim_recordings_from_sync("laser_633")

@npc_io.cached_property
def _stim_recordings(self) -> tuple[npc_samstim.StimRecording, ...]:
rec_488 = self._stim_recordings_488 or ()
Expand All @@ -124,12 +131,12 @@ def _stim_recordings(self) -> tuple[npc_samstim.StimRecording, ...]:
else:
raise NotImplementedError(f"Unexpected opto device: {device}")
return tuple(rec)

@npc_io.cached_property
def _len(self) -> int:
"""Number of trials"""
return len(self.trial_index)

@npc_io.cached_property
def trial_index(self) -> npt.NDArray[np.int32]:
"""0-indexed"""
Expand Down Expand Up @@ -197,7 +204,7 @@ def _location(self) -> npt.NDArray[np.str_]:
[label[np.all(xy == v, axis=1)][0] for v in self._bregma_xy], dtype=str
)[self.trial_index]
raise ValueError("No known optotagging location data found")

@npc_io.cached_property
def power(self) -> npt.NDArray[np.float64]:
calibration_data = self._hdf5["optoPowerCalibrationData"]
Expand Down Expand Up @@ -225,10 +232,15 @@ def parse_wavelength(device: str) -> int:
f"Invalid opto device string (expected 'laser_488' format): {device}"
) from exc
else:
assert 300 < value < 1000, f"Unexpected wavelength parsed from `trialOptoDevice`: {value}"
assert (
300 < value < 1000
), f"Unexpected wavelength parsed from `trialOptoDevice`: {value}"
return value
return np.array([parse_wavelength(device) for device in self._trial_opto_device])


return np.array(
[parse_wavelength(device) for device in self._trial_opto_device]
)


if __name__ == "__main__":
import doctest
Expand Down
4 changes: 3 additions & 1 deletion src/npc_sessions/utils/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,9 @@ def write_nwb_component_to_cache(
else:
df = _remove_pynwb_containers_from_table(component)
if df.empty:
logger.debug(f"{session_id} {component_name} is empty - but we're writing it to cache so we don't have to check again")
logger.debug(
f"{session_id} {component_name} is empty - but we're writing it to cache so we don't have to check again"
)
df = add_session_metadata(df, session_id)
_write_df_to_cache(
session_id=session_id,
Expand Down

0 comments on commit 5358309

Please sign in to comment.