From aeff4b789ec2b148563eb58ab23505712cf12647 Mon Sep 17 00:00:00 2001 From: Robert Ennis Date: Mon, 16 Sep 2024 22:30:43 +0200 Subject: [PATCH 1/7] minor typos/formatting --- alpha-lab/imu-transformations/index.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/alpha-lab/imu-transformations/index.md b/alpha-lab/imu-transformations/index.md index 0d158e347..d587bcdcb 100644 --- a/alpha-lab/imu-transformations/index.md +++ b/alpha-lab/imu-transformations/index.md @@ -32,7 +32,7 @@ As you work through this guide, you may want to check out the [Application Examp ## Rotation between the IMU and the World -The IMU data includes a description of how the IMU is rotated in relation to the world. Concretely, the IMU data contains quaternions that define a rotation transformation between the [the world coordinate system](http://docs.pupil-labs.com/neon/data-collection/data-streams/#movement-imu-data) and the IMU's local coordinate system at different points in time. +The IMU data includes a description of how the IMU is rotated in relation to the world. Concretely, the IMU data contains quaternions that define a rotation transformation between [the world coordinate system](http://docs.pupil-labs.com/neon/data-collection/data-streams/#movement-imu-data) and the IMU's local coordinate system at different points in time. The `transform_imu_to_world` function below demonstrates how to use these quaternions to transform data from the IMU's local coordinate system to the world coordinate system. @@ -75,7 +75,7 @@ Neutral orientation (i.e. an identity rotation in the quaternion) of the IMU wou ### Example: Acceleration in World Coordinates -The IMU’s translational acceleration data is given in the IMU's local coordinate system. To understand how the observer is accelerating through the world it can be helpful to transform the data into the world coordinate system: +The IMU’s translational acceleration data is given in the IMU's local coordinate system. To understand how the observer is accelerating through the world, it can be helpful to transform the data into the world coordinate system: ```python accelerations_in_world = transform_imu_to_world( @@ -121,7 +121,7 @@ def transform_scene_to_imu(coords_in_scene, translation_in_imu=np.array([0.0, -1 return coords_in_imu.T ``` -Combining the `transform_scene_to_imu` function with the `transform_imu_to_world` function allows us to go all the way from scene camera coordinate system to world coordinate system +Combining the `transform_scene_to_imu` function with the `transform_imu_to_world` function allows us to go all the way from the scene camera coordinate system to the world coordinate system. ```python def transform_scene_to_world(coords_in_scene, imu_quaternions, translation_in_imu=np.array([0.0, -1.3, -6.62])): @@ -131,10 +131,10 @@ def transform_scene_to_world(coords_in_scene, imu_quaternions, translation_in_im ### Example: Eyestate in World Coordinates -The `transform_scene_to_world` function allows us easily convert [eye state data](https://docs.pupil-labs.com/neon/data-collection/data-streams/#_3d-eye-states) given in scene camera coordinates to world coordinates. +The `transform_scene_to_world` function allows us to easily convert [eye state data](https://docs.pupil-labs.com/neon/data-collection/data-streams/#_3d-eye-states) given in scene camera coordinates to world coordinates. ::: warning -Note, to do this right in practice you need to make sure you sample the quaternions and eye state data from the same timestamps. Since both data streams are generated independently and do not share the same set of timestamps, this is a challenge in itself. +Note, to do this right in practice you need to make sure you sample the quaternions and eye state data at the same timestamps. Since both data streams are generated independently and do not share the same set of timestamps, this is a challenge in itself. We are glossing over this here, but one possible solution to this is interpolating the IMU data to match the timestamps of the eye state data, which is demonstrated [here](http://docs.pupil-labs.com/alpha-lab/imu-transformations/#application-example). ::: @@ -206,9 +206,9 @@ def gaze_3d_to_world(gaze_elevation, gaze_azimuth, imu_quaternions): ``` ## World Spherical Coordinates -Using the transformations introduced above, we can transform various data into cartesian world coordinates. For some things it is more intuitive to have the data in spherical coordinates though. For instance, you might want to know when someone’s gaze or heading deviates from parallel with the horizon, i.e. if they are looking/facing upwards or downwards. +Using the transformations introduced above, we can transform various data into cartesian world coordinates. For some purposes, it is more intuitive to have the data in spherical coordinates though. For instance, you might want to know when someone’s gaze or heading deviates from parallel with the horizon, i.e. if they are looking/facing upwards or downwards. -Converting data into spherical world coordinates makes these things obvious. When wearing Neon, an elevation and azimuth of 0 degrees corresponds to a neutral orientation: i.e., aimed at magnetic North and parallel to the horizon. A positive elevation corresponds to looking upwards, and a negative elevation corresponds to looking downwards. +Converting data into spherical world coordinates makes this obvious. When wearing Neon, an elevation and azimuth of 0 degrees corresponds to a neutral orientation: i.e., aimed at magnetic North and parallel to the horizon. A positive elevation corresponds to looking upwards, and a negative elevation corresponds to looking downwards. The [Euler angles from the IMU](https://docs.pupil-labs.com/neon/data-collection/data-streams/#euler-angles) are already in a compatible format. For gaze data in world coordinates, the `cartesian_to_spherical_world` function below will do the necessary transformation. From f3455401a972e2d7eef78645e2a223a806e605af Mon Sep 17 00:00:00 2001 From: Robert Ennis Date: Wed, 18 Sep 2024 23:03:36 +0200 Subject: [PATCH 2/7] grammar --- alpha-lab/imu-transformations/index.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/alpha-lab/imu-transformations/index.md b/alpha-lab/imu-transformations/index.md index d587bcdcb..69eb5493e 100644 --- a/alpha-lab/imu-transformations/index.md +++ b/alpha-lab/imu-transformations/index.md @@ -173,15 +173,15 @@ def spherical_to_cartesian_scene(elevations, azimuths): elevations_rad = np.deg2rad(elevations) azimuths_rad = np.deg2rad(azimuths) - # Elevation of 0 in Neon system corresponds to Y = 0, but + # Elevation of 0 in Neon's system corresponds to Y = 0, but # an elevation of 0 in traditional spherical coordinates would # correspond to Y = 1, so we convert elevation to the # more traditional format. elevations_rad += np.pi / 2 - # Azimuth of 0 in Neon system corresponds to X = 0, but + # Azimuth of 0 in Neon's system corresponds to X = 0, but # an azimuth of 0 in traditional spherical coordinates would - # correspond to X = 1. Also, azimuth to the right in Neon is + # correspond to X = 1. Also, azimuth to the right for Neon is # more positive, whereas it is more negative in traditional # spherical coordiantes. So, we convert azimuth to the more # traditional format. From 619ba75cb4c238d5794f7a8851de42fafb68ebb3 Mon Sep 17 00:00:00 2001 From: Dominic Canare Date: Mon, 21 Oct 2024 16:26:14 -0400 Subject: [PATCH 3/7] Format fixes --- neon/data-collection/psychopy/index.md | 28 +++++++++++++------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/neon/data-collection/psychopy/index.md b/neon/data-collection/psychopy/index.md index 55a8f74cf..ee3fdb031 100644 --- a/neon/data-collection/psychopy/index.md +++ b/neon/data-collection/psychopy/index.md @@ -1,26 +1,26 @@ # PsychoPy -[PsychoPy](https://psychopy.org/) is widely used open-source software for creating and running psychophysics experiments. +[PsychoPy](https://psychopy.org/) is widely used open-source software for creating and running psychophysics experiments. It enables users to present stimuli, collect data, and interface with a variety of hardware and software applications. -We have created a dedicated plugin for PsychoPy that enables Neon to be used in PsychoPy experiments. PsychoPy +We have created a dedicated plugin for PsychoPy that enables Neon to be used in PsychoPy experiments. PsychoPy users have two options for designing their experiments, both of which can be used alongside Neon: - [Builder](https://www.psychopy.org/builder/) – Gives users a graphical interface with little or no need to write code - although it does support custom code when necessary. - [Coder](https://psychopy.org/coder/index.html) – Gives users the option to generate experiments or do other things programmatically, [using Psychopy like any other Python package](https://psychopy.org/api/). ## Using PsychoPy with Neon -When using PsychoPy with Neon, you can save eyetracking data in PsychoPy's hdf5 format, by enabling the "Save hdf5 file" -option within the experiment settings. But we also recommend recording in the Neon Companion app for the duration of +When using PsychoPy with Neon, you can save eyetracking data in PsychoPy's hdf5 format, by enabling the "Save hdf5 file" +option within the experiment settings. But we also recommend recording in the Neon Companion app for the duration of the experiment for data redundancy. PsychoPy’s standard "Eyetracker Record" component can be used to start and stop recordings on the Companion Device accordingly. -For experiments that only require pupillometry/eye state, make sure the "Compute Eye State" setting is enabled in the companion app. +For experiments that only require pupillometry/eye state, make sure the "Compute Eye State" setting is enabled in the companion app. For experiments that do not require screen-based gaze coordinates, this is all that is required. -To use Neon for screen-based work in PsychoPy, the screen needs to be robustly located within the scene camera’s field of view, -and Neon’s gaze data subsequently transformed from scene camera-based coordinates to screen-based coordinates. The plugin for -PsychoPy achieves this with the use of AprilTag Markers and the +To use Neon for screen-based work in PsychoPy, the screen needs to be robustly located within the scene camera’s field of view, +and Neon’s gaze data subsequently transformed from scene camera-based coordinates to screen-based coordinates. The plugin for +PsychoPy achieves this with the use of AprilTag Markers and the [real-time-screen-gaze](https://github.com/pupil-labs/real-time-screen-gaze) Python package (installed automatically with the plugin). ## Builder @@ -51,21 +51,21 @@ Two new Builder components will be available in the components list under the Ey ### Data Format -[PsychoPy saves eyetracking data in its own format](https://psychopy.org/hardware/eyeTracking.html#what-about-the-data). +[PsychoPy saves eyetracking data in its own format](https://psychopy.org/hardware/eyeTracking.html#what-about-the-data). -When processing eyetracking data in PsychoPy's data format, please note that PsychoPy doesn’t have distinct record types -for gaze data versus eye state. If you’re collecting screen-gaze coordinates and pupillometry data, their records they will +When processing eyetracking data in PsychoPy's data format, please note that PsychoPy doesn’t have distinct record types +for gaze data versus eye state. If you’re collecting screen-gaze coordinates and pupillometry data, their records they will be intermixed, but they can be distinguished. - For screen gaze records - `[left|right]_gaze_[x|y]` will be the screen coordinates in PsychoPy’s display units `[left|right]_gaze_z` will be `0` - `[left|right]_eye_cam_[x|y|z]` will be `0` - - `left_pupil_measure1` and `left_pupil_measure1_type` will be `0` + - `[left|right]_pupil_measure1` and `[left|right]_pupil_measure1_type` will be `0` - For eye state records - `[left|right]_gaze_[x|y|z]` will be the optical axis vector - `[left|right]_eye_cam_[x|y|z]` will be eye position - - `left_pupil_measure1` will be pupil diameter in mm - - `left_pupil_measure1_type` will be `77` + - `[left|right]_pupil_measure1` will be pupil diameter in mm + - `[left|right]_pupil_measure1_type` will be `77` ### Example Builder Experiment From 7ed3b3a10f03c17c269349d44d1845066ea99afa Mon Sep 17 00:00:00 2001 From: Dominic Canare Date: Mon, 21 Oct 2024 16:26:40 -0400 Subject: [PATCH 4/7] Update coder example to use iohub instead of rt-api --- neon/data-collection/psychopy/index.md | 72 ++++++++++++++++++-------- 1 file changed, 49 insertions(+), 23 deletions(-) diff --git a/neon/data-collection/psychopy/index.md b/neon/data-collection/psychopy/index.md index ee3fdb031..87727b72e 100644 --- a/neon/data-collection/psychopy/index.md +++ b/neon/data-collection/psychopy/index.md @@ -73,26 +73,36 @@ Check out our simple but complete [gaze contingent demo designed in PsychoPy Bui ## Coder -To use Neon with PsychoPy coder, we recommend interfacing directly with the [real-time API](https://docs.pupil-labs.com/neon/real-time-api/tutorials/) -and, for screen-based tasks, using the [real-time-screen-gaze](https://github.com/pupil-labs/real-time-screen-gaze) package. -`AprilTagFrameStim` and `AprilTagStim` classes are provided to more easily display screen markers and configure a screen-based gaze mapper. +To use Neon with PsychoPy coder, you'll need to configure ioHub, add AprilTag markers to the screen, and register the screen surface with the eyetracker. The example below shows how to collect realtime gaze position and pupil diameter in PsychoPy Coder. ### Example Coder Experiment ```python -import numpy as np - from psychopy import visual, event +from psychopy.core import getTime +from psychopy.iohub import launchHubServer from psychopy.tools.monitorunittools import convertToPix + +import numpy as np + from psychopy_eyetracker_pupil_labs.pupil_labs.stimuli import AprilTagFrameStim -from pupil_labs.realtime_api.simple import discover_one_device -from pupil_labs.real_time_screen_gaze.gaze_mapper import GazeMapper -win = visual.Window(fullscr=True, units='height', checkTiming=False) +# Set up iohub +iohub_config = { + 'eyetracker.hw.pupil_labs.neon.EyeTracker': { + 'name': 'tracker', + 'runtime_settings': { + 'companion_address': '192.168.1.228', + 'companion_port': 8080, + }, + } +} -gaze_circle = visual.Circle(win, radius=.02, color="red") -text = visual.TextStim(win, text='Press "ESCAPE" to exit', height=0.05) +win = visual.Window(fullscr=True, units='height', checkTiming=False, color='black') +io = launchHubServer(window=win, **iohub_config) +eyetracker = io.devices.tracker +# Add a frame of AprilTag markers to the screen tag_frame = AprilTagFrameStim( win=win, name='tag_frame', units='norm', @@ -101,26 +111,42 @@ tag_frame = AprilTagFrameStim( marker_size=0.125, marker_units='height', contrast=1.0, ) + +# Use a red circle to show the gaze location +gaze_circle = visual.Circle(win, radius=.02, color="red") + +# Register the screen surface with the eyetracker win_size_pix = convertToPix(np.array([2, 2]), [0, 0], 'norm', win) +eyetracker.register_surface(tag_frame.marker_verts, win_size_pix) + +# Start a recording +eyetracker.setRecordingState(True) -neon_device = discover_one_device() -gaze_mapper = GazeMapper(neon_device.get_calibration()) -screen_surface = gaze_mapper.add_surface(tag_frame.marker_verts, win_size_pix) +# Run for 30 seconds +start_time = getTime() +while getTime() - start_time < 30: + # exit on escape key + if event.getKeys(keyList=['escape']): + break -while True: - frame, gaze = neon_device.receive_matched_scene_video_frame_and_gaze() - result = gaze_mapper.process_frame(frame, gaze) + # Update gaze circle radius to reflect pupil diameter + for eye_event in eyetracker.getEvents(): + if eye_event.left_pupil_measure1_type == 77: + mean_pupil_diameter = (eye_event.left_pupil_measure1 + eye_event.right_pupil_measure1) / 2 + gaze_circle.radius = (mean_pupil_diameter**1.5) / 100 - for surface_gaze in result.mapped_gaze[screen_surface.uid]: - gaze_circle.pos = surface_gaze.x, surface_gaze.y - gaze_circle.draw() + # Update gaze circle position to reflect gaze position + gaze_circle.pos = eyetracker.getLastGazePosition() - text.draw() + # Update the screen + gaze_circle.draw() tag_frame.draw() win.flip() - if 'escape' in event.getKeys(): - break +# Stop recording +eyetracker.setRecordingState(False) + +win.close() +io.quit() -neon_device.close() ``` From 1d852cd9d0509310861e4408ce21ac5f3be5b64d Mon Sep 17 00:00:00 2001 From: Dominic Canare Date: Tue, 22 Oct 2024 15:41:35 -0400 Subject: [PATCH 5/7] We now generate mono events for gaze and bino events for eye state in psychopy --- neon/data-collection/psychopy/index.md | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/neon/data-collection/psychopy/index.md b/neon/data-collection/psychopy/index.md index 87727b72e..867bd0241 100644 --- a/neon/data-collection/psychopy/index.md +++ b/neon/data-collection/psychopy/index.md @@ -51,20 +51,13 @@ Two new Builder components will be available in the components list under the Ey ### Data Format -[PsychoPy saves eyetracking data in its own format](https://psychopy.org/hardware/eyeTracking.html#what-about-the-data). +[PsychoPy saves eyetracking data in its own format](https://psychopy.org/hardware/eyeTracking.html#what-about-the-data). Screen gaze data will be saved as `MonocularEyeSampleEvent` records (even when using the binocular gaze mode). Eye state data, if enabled, will appear in `BinocularEyeSampleEvent` records. -When processing eyetracking data in PsychoPy's data format, please note that PsychoPy doesn’t have distinct record types -for gaze data versus eye state. If you’re collecting screen-gaze coordinates and pupillometry data, their records they will -be intermixed, but they can be distinguished. - -- For screen gaze records - - `[left|right]_gaze_[x|y]` will be the screen coordinates in PsychoPy’s display units `[left|right]_gaze_z` will be `0` - - `[left|right]_eye_cam_[x|y|z]` will be `0` - - `[left|right]_pupil_measure1` and `[left|right]_pupil_measure1_type` will be `0` +For eye state data in`BinocularEyeSampleEvent` records: - For eye state records - - `[left|right]_gaze_[x|y|z]` will be the optical axis vector - - `[left|right]_eye_cam_[x|y|z]` will be eye position - - `[left|right]_pupil_measure1` will be pupil diameter in mm + - `[left|right]_gaze_[x|y|z]` will be the optical axis vectors + - `[left|right]_eye_cam_[x|y|z]` will be eye positions + - `[left|right]_pupil_measure1` will be pupil diameters in mm - `[left|right]_pupil_measure1_type` will be `77` ### Example Builder Experiment From 0d65886a925690f6306712bc9ef8427bc3e0c245 Mon Sep 17 00:00:00 2001 From: Dominic Canare Date: Wed, 30 Oct 2024 06:15:53 -0400 Subject: [PATCH 6/7] Adds info about sending events --- neon/data-collection/psychopy/index.md | 35 +++++++++++++------------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/neon/data-collection/psychopy/index.md b/neon/data-collection/psychopy/index.md index 867bd0241..b550d37ef 100644 --- a/neon/data-collection/psychopy/index.md +++ b/neon/data-collection/psychopy/index.md @@ -1,27 +1,18 @@ # PsychoPy -[PsychoPy](https://psychopy.org/) is widely used open-source software for creating and running psychophysics experiments. -It enables users to present stimuli, collect data, and interface with a variety of hardware and software applications. +[PsychoPy](https://psychopy.org/) is widely used open-source software for creating and running psychophysics experiments. It enables users to present stimuli, collect data, and interface with a variety of hardware and software applications. -We have created a dedicated plugin for PsychoPy that enables Neon to be used in PsychoPy experiments. PsychoPy -users have two options for designing their experiments, both of which can be used alongside Neon: +We have created a dedicated plugin for PsychoPy that enables Neon to be used in PsychoPy experiments. PsychoPy users have two options for designing their experiments, both of which can be used alongside Neon: - [Builder](https://www.psychopy.org/builder/) – Gives users a graphical interface with little or no need to write code - although it does support custom code when necessary. - [Coder](https://psychopy.org/coder/index.html) – Gives users the option to generate experiments or do other things programmatically, [using Psychopy like any other Python package](https://psychopy.org/api/). ## Using PsychoPy with Neon -When using PsychoPy with Neon, you can save eyetracking data in PsychoPy's hdf5 format, by enabling the "Save hdf5 file" -option within the experiment settings. But we also recommend recording in the Neon Companion app for the duration of -the experiment for data redundancy. PsychoPy’s standard "Eyetracker Record" component can be used to start and stop recordings -on the Companion Device accordingly. +When using PsychoPy with Neon, you can save eyetracking data in PsychoPy's hdf5 format, by enabling the "Save hdf5 file" option within the experiment settings. But we also recommend recording in the Neon Companion app for the duration of the experiment for data redundancy. PsychoPy’s standard "Eyetracker Record" component can be used to start and stop recordings on the Companion Device accordingly. If desired, custom timestamped events can be triggered from PsychoPy and saved in the Neon recording. -For experiments that only require pupillometry/eye state, make sure the "Compute Eye State" setting is enabled in the companion app. -For experiments that do not require screen-based gaze coordinates, this is all that is required. +* For experiments that only require pupillometry/eye state, make sure the "Compute Eye State" setting is enabled in the companion app. For experiments that do not require screen-based gaze coordinates, this is all that is required. -To use Neon for screen-based work in PsychoPy, the screen needs to be robustly located within the scene camera’s field of view, -and Neon’s gaze data subsequently transformed from scene camera-based coordinates to screen-based coordinates. The plugin for -PsychoPy achieves this with the use of AprilTag Markers and the -[real-time-screen-gaze](https://github.com/pupil-labs/real-time-screen-gaze) Python package (installed automatically with the plugin). +* To use Neon for screen-based work in PsychoPy, the screen needs to be robustly located within the scene camera’s field of view, and Neon’s gaze data subsequently transformed from scene camera-based coordinates to screen-based coordinates. The plugin for PsychoPy achieves this with the use of AprilTag Markers and the [real-time-screen-gaze](https://github.com/pupil-labs/real-time-screen-gaze) Python package (installed automatically with the plugin). ## Builder @@ -42,12 +33,18 @@ PsychoPy achieves this with the use of AprilTag Markers and the The standard "Eyetracker Record" and "Region of Interest" components work with Neon. Because Neon is calibration-free, the Calibration and Validation components are unused. -Two new Builder components will be available in the components list under the Eyetracking section: "April Tag Frame" and "April Tag" (necessary for screen-based work). +Three new Builder components will be available in the components list under the Eyetracking section. -- April Tag Frame: this component is recommended for most users. Using it in your Builder experiment will display an array of AprilTag markers around the edge of the screen. You can configure the number of markers to display along the horizontal and vertical edges of the screen, the size and contrast of the markers, and (optionally) the marker IDs. A minimum of four markers (2 horizontally by 2 vertically) is recommended, but more markers will provide more robust detection and accurate mapping. -![AprilTag Frame](./apriltag-frame.png) +- April Tag Markers: for screen-based work, you will need to render AprilTag markers on your display. These components make it easy to do so. We recommend at least four markers, but more markers will improve gaze mapping. -- April Tag: this component will add a single AprilTag marker to your display. It is intended for use when the April Tag Frame component cannot be used (e.g., you need to display stimuli on the edges of the display where the April Tag Frame component would place markers in the way). + - **April Tag Frame**: this component is recommended for most users. Using it in your Builder experiment will display an array of AprilTag markers around the edge of the screen. You can configure the number of markers to display along the horizontal and vertical edges of the screen, the size and contrast of the markers, and (optionally) the marker IDs. A minimum of four markers (2 horizontally by 2 vertically) is recommended, but more markers will provide more robust detection and accurate mapping. Marker IDs are automatically chosen but can be manually specified if needed. + ![AprilTag Frame](./apriltag-frame.png) + + - **April Tag**: this component will add a single AprilTag marker to your display. It is intended for use when the April Tag Frame component cannot be used (e.g., you need to display stimuli on the edges of the display where the April Tag Frame component would place markers in the way). Using this component will give you control over the size and position of each marker. You will need to ensure that a unique marker ID is assigned to each AprilTag marker. + +- **Neon Event**: use this component to send a timestamped event annotation to the Neon Recording. You can mark the start and end of an experiment, the start and end of a trial, the timing of a stimulus presentation, etc. A timestamp can be manually specified or, if set to `0`, automatically assigned when the component start is triggered. + + Events can only be saved to an active recording. You can use PsychoPy's standard "Eyetracking Record" component to start/stop a recording or manually start a recording from the Companion App. ### Data Format @@ -116,10 +113,12 @@ eyetracker.register_surface(tag_frame.marker_verts, win_size_pix) eyetracker.setRecordingState(True) # Run for 30 seconds +eyetracker.send_event('exp-start') start_time = getTime() while getTime() - start_time < 30: # exit on escape key if event.getKeys(keyList=['escape']): + eyetracker.send_event('user-exit') break # Update gaze circle radius to reflect pupil diameter From 50252b1d2bb79ebe7731916f87bbe8fed690a7e4 Mon Sep 17 00:00:00 2001 From: Robert Ennis Date: Mon, 4 Nov 2024 13:13:37 +0100 Subject: [PATCH 7/7] - change how quaternions are passed to `from_quat` to be consistent with rest of Neon ecosystem & docs --- .../imu_heading_visualization.ipynb | 11 +++++++---- alpha-lab/imu-transformations/index.md | 11 +++++++---- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/alpha-lab/imu-transformations/imu_heading_visualization.ipynb b/alpha-lab/imu-transformations/imu_heading_visualization.ipynb index d8eb6cf9f..e3505cc8c 100644 --- a/alpha-lab/imu-transformations/imu_heading_visualization.ipynb +++ b/alpha-lab/imu-transformations/imu_heading_visualization.ipynb @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -26,7 +26,10 @@ "def transform_imu_to_world(imu_coordinates, imu_quaternions):\n", " # This array contains a timeseries of transformation matrices,\n", " # as calculated from the IMU's timeseries of quaternions values.\n", - " imu_to_world_matrices = R.from_quat(imu_quaternions).as_matrix()\n", + " imu_to_world_matrices = R.from_quat(\n", + " imu_quaternions,\n", + " scalar_first=True,\n", + " ).as_matrix()\n", "\n", " if np.ndim(imu_coordinates) == 1:\n", " return imu_to_world_matrices @ imu_coordinates\n", @@ -123,7 +126,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -157,10 +160,10 @@ "# and the video render.\n", "quaternions_resampled = np.array(\n", " [\n", + " np.interp(relative_demo_video_ts, imu[\"relative ts [s]\"], imu[\"quaternion w\"]),\n", " np.interp(relative_demo_video_ts, imu[\"relative ts [s]\"], imu[\"quaternion x\"]),\n", " np.interp(relative_demo_video_ts, imu[\"relative ts [s]\"], imu[\"quaternion y\"]),\n", " np.interp(relative_demo_video_ts, imu[\"relative ts [s]\"], imu[\"quaternion z\"]),\n", - " np.interp(relative_demo_video_ts, imu[\"relative ts [s]\"], imu[\"quaternion w\"]),\n", " ]\n", ").T\n", "\n", diff --git a/alpha-lab/imu-transformations/index.md b/alpha-lab/imu-transformations/index.md index 69eb5493e..1d29a7e9e 100644 --- a/alpha-lab/imu-transformations/index.md +++ b/alpha-lab/imu-transformations/index.md @@ -42,8 +42,11 @@ from scipy.spatial.transform import Rotation as R def transform_imu_to_world(imu_coordinates, imu_quaternions): # This array contains a timeseries of transformation matrices, # as calculated from the IMU's timeseries of quaternions values. - imu_to_world_matrices = R.from_quat(imu_quaternions).as_matrix() - + imu_to_world_matrices = R.from_quat( + imu_quaternions, + scalar_first=True, + ).as_matrix() + if np.ndim(imu_coordinates) == 1: return imu_to_world_matrices @ imu_coordinates else: @@ -210,13 +213,13 @@ Using the transformations introduced above, we can transform various data into c Converting data into spherical world coordinates makes this obvious. When wearing Neon, an elevation and azimuth of 0 degrees corresponds to a neutral orientation: i.e., aimed at magnetic North and parallel to the horizon. A positive elevation corresponds to looking upwards, and a negative elevation corresponds to looking downwards. -The [Euler angles from the IMU](https://docs.pupil-labs.com/neon/data-collection/data-streams/#euler-angles) are already in a compatible format. For gaze data in world coordinates, the `cartesian_to_spherical_world` function below will do the necessary transformation. +The [Euler angles from the IMU](https://docs.pupil-labs.com/neon/data-collection/data-streams/#euler-angles) are already in a compatible format. For gaze data in world coordinates, the `cartesian_to_spherical_world` function below will do the necessary transformation. ```python def cartesian_to_spherical_world(world_points_3d): """ Convert points in 3D Cartesian world coordinates to spherical coordinates. - + For elevation: - Neutral orientation = 0 (i.e., parallel with horizon) - Upwards is positive