Skip to content

Commit

Permalink
Merge pull request #72 from biomarkersParkinson/inversion_axes
Browse files Browse the repository at this point in the history
Inversion axes
  • Loading branch information
nienketimmermans authored Oct 18, 2024
2 parents 5a35c83 + 2564c65 commit 73fd59e
Show file tree
Hide file tree
Showing 26 changed files with 88 additions and 88 deletions.
32 changes: 14 additions & 18 deletions docs/notebooks/gait/gait_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,16 @@
"metadata": {},
"outputs": [],
"source": [
"# Automatically reload modules\n",
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import os\n",
"from paradigma.preprocessing_config import IMUPreprocessingConfig\n",
"from paradigma.gait.gait_analysis import extract_gait_features_io, detect_gait_io, extract_arm_swing_features_io, detect_arm_swing_io, quantify_arm_swing_io\n",
"from paradigma.gait.gait_analysis_config import GaitFeatureExtractionConfig, GaitDetectionConfig, ArmSwingFeatureExtractionConfig, ArmSwingDetectionConfig, ArmSwingQuantificationConfig\n",
"from paradigma.gait.gait_analysis import extract_gait_features_io, detect_gait_io, extract_arm_activity_features_io, detect_other_arm_activities_io, quantify_arm_swing_io\n",
"from paradigma.gait.gait_analysis_config import GaitFeatureExtractionConfig, GaitDetectionConfig, ArmActivityFeatureExtractionConfig, FilteringGaitConfig, ArmSwingQuantificationConfig\n",
"from paradigma.imu_preprocessing import preprocess_imu_data_io"
]
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": 8,
"metadata": {
"tags": [
"parameters"
Expand All @@ -38,7 +34,7 @@
"sensor = 'imu'\n",
"\n",
"path_to_data = '../../../tests/data'\n",
"path_to_classifier = os.path.join(path_to_data, '0.classifiers', branch)\n",
"path_to_classifier = os.path.join(path_to_data, '0.classification', branch)\n",
"path_to_sensor_data = os.path.join(path_to_data, '1.sensor_data', sensor)\n",
"path_to_preprocessed_data = os.path.join(path_to_data, '2.preprocessed_data', sensor)\n",
"path_to_extracted_features = os.path.join(path_to_data, '3.extracted_features', branch)\n",
Expand All @@ -57,7 +53,7 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -74,7 +70,7 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -91,7 +87,7 @@
},
{
"cell_type": "code",
"execution_count": 16,
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -103,7 +99,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Extract arm swing features"
"## Extract arm actvitiy features"
]
},
{
Expand All @@ -112,15 +108,15 @@
"metadata": {},
"outputs": [],
"source": [
"config = ArmSwingFeatureExtractionConfig()\n",
"extract_arm_swing_features_io(path_to_preprocessed_data, path_to_extracted_features, config)"
"config = ArmActivityFeatureExtractionConfig()\n",
"extract_arm_activity_features_io(path_to_preprocessed_data, path_to_extracted_features, config)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Detect arm swing"
"## Filter gait"
]
},
{
Expand All @@ -129,8 +125,8 @@
"metadata": {},
"outputs": [],
"source": [
"config = ArmSwingDetectionConfig()\n",
"detect_arm_swing_io(path_to_extracted_features, path_to_predictions, path_to_classifier, config)"
"config = FilteringGaitConfig()\n",
"detect_other_arm_activities_io(path_to_extracted_features, path_to_predictions, path_to_classifier, config)"
]
},
{
Expand All @@ -142,7 +138,7 @@
},
{
"cell_type": "code",
"execution_count": 19,
"execution_count": 20,
"metadata": {},
"outputs": [],
"source": [
Expand Down
17 changes: 7 additions & 10 deletions docs/notebooks/tremor/tremor.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -9,26 +9,23 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"import os\n",
"from paradigma.preprocessing_config import IMUPreprocessingConfig\n",
"from paradigma.imu_preprocessing import preprocess_imu_data_io"
]
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"path_to_data = '../../tests/data'\n",
"path_to_classifier = os.path.join(path_to_data, '0.classifiers', 'tremor')\n",
"path_to_data = '../../../tests/data'\n",
"path_to_classifier = os.path.join(path_to_data, '0.classification', 'tremor')\n",
"path_to_sensor_data = os.path.join(path_to_data, '1.sensor_data', 'imu')\n",
"path_to_preprocessed_data = os.path.join(path_to_data, '2.preprocessed_data', 'imu')\n",
"path_to_extracted_features = os.path.join(path_to_data, '3.extracted_features', 'tremor')\n",
Expand All @@ -44,7 +41,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -55,7 +52,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "paradigma-cfWEGyqZ-py3.11",
"display_name": "paradigma-Fn6RLG4_-py3.11",
"language": "python",
"name": "python3"
},
Expand All @@ -69,7 +66,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.8"
"version": "3.11.5"
}
},
"nbformat": 4,
Expand Down
4 changes: 2 additions & 2 deletions src/paradigma/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ class DataColumns():
GRAV_ACCELEROMETER_Z : str = "grav_accelerometer_z"
PRED_GAIT_PROBA: str = "pred_gait_proba"
PRED_GAIT : str = "pred_gait"
PRED_ARM_SWING_PROBA: str = "pred_arm_swing_proba"
PRED_ARM_SWING : str = "pred_arm_swing"
PRED_OTHER_ARM_ACTIVITY_PROBA: str = "pred_other_arm_activity_proba"
PRED_OTHER_ARM_ACTIVITY : str = "pred_other_arm_activity"
ANGLE : str = "angle"
ANGLE_SMOOTH : str = "angle_smooth"
VELOCITY : str = "velocity"
Expand Down
60 changes: 30 additions & 30 deletions src/paradigma/gait/gait_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from paradigma.constants import DataColumns
from paradigma.gait.gait_analysis_config import GaitFeatureExtractionConfig, GaitDetectionConfig, \
ArmSwingFeatureExtractionConfig, ArmSwingDetectionConfig, ArmSwingQuantificationConfig
ArmActivityFeatureExtractionConfig, FilteringGaitConfig, ArmSwingQuantificationConfig
from paradigma.gait.feature_extraction import extract_temporal_domain_features, \
extract_spectral_domain_features, pca_transform_gyroscope, compute_angle, \
remove_moving_average_angle, extract_angle_extremes, extract_range_of_motion, \
Expand Down Expand Up @@ -66,8 +66,8 @@ def extract_gait_features_io(input_path: Union[str, Path], output_path: Union[st

def detect_gait(df: pd.DataFrame, config: GaitDetectionConfig, path_to_classifier_input: Union[str, Path]) -> pd.DataFrame:
# Initialize the classifier
clf = pd.read_pickle(os.path.join(path_to_classifier_input, config.classifier_file_name))
with open(os.path.join(path_to_classifier_input, config.thresholds_file_name), 'r') as f:
clf = pd.read_pickle(os.path.join(path_to_classifier_input, 'classifiers', config.classifier_file_name))
with open(os.path.join(path_to_classifier_input, 'thresholds', config.thresholds_file_name), 'r') as f:
threshold = float(f.read())

# Prepare the data
Expand All @@ -80,8 +80,8 @@ def detect_gait(df: pd.DataFrame, config: GaitDetectionConfig, path_to_classifie
X = df.loc[:, clf.feature_names_in_]

# Make prediction
df['pred_gait_proba'] = clf.predict_proba(X)[:, 1]
df['pred_gait'] = df['pred_gait_proba'] >= threshold
df[DataColumns.PRED_GAIT_PROBA] = clf.predict_proba(X)[:, 1]
df[DataColumns.PRED_GAIT] = df[DataColumns.PRED_GAIT_PROBA] >= threshold

return df

Expand All @@ -107,7 +107,7 @@ def detect_gait_io(input_path: Union[str, Path], output_path: Union[str, Path],
write_df_data(metadata_time, metadata_samples, output_path, 'gait_meta.json', df)


def extract_arm_swing_features(df: pd.DataFrame, config: ArmSwingFeatureExtractionConfig) -> pd.DataFrame:
def extract_arm_activity_features(df: pd.DataFrame, config: ArmActivityFeatureExtractionConfig) -> pd.DataFrame:
# temporary add "random" predictions
df[config.pred_gait_colname] = np.concatenate([np.repeat([1], df.shape[0]//3), np.repeat([0], df.shape[0]//3), np.repeat([1], df.shape[0] + 1 - 2*df.shape[0]//3)], axis=0)

Expand Down Expand Up @@ -252,7 +252,7 @@ def extract_arm_swing_features(df: pd.DataFrame, config: ArmSwingFeatureExtracti
return df_windowed


def extract_arm_swing_features_io(input_path: Union[str, Path], output_path: Union[str, Path], config: ArmSwingFeatureExtractionConfig) -> None:
def extract_arm_activity_features_io(input_path: Union[str, Path], output_path: Union[str, Path], config: ArmActivityFeatureExtractionConfig) -> None:
# load accelerometer and gyroscope data
l_dfs = []
for sensor in ['accelerometer', 'gyroscope']:
Expand All @@ -268,26 +268,26 @@ def extract_arm_swing_features_io(input_path: Union[str, Path], output_path: Uni

df = pd.merge(l_dfs[0], l_dfs[1], on=config.time_colname)

df_windowed = extract_arm_swing_features(df, config)
df_windowed = extract_arm_activity_features(df, config)

end_iso8601 = get_end_iso8601(metadata_samples.start_iso8601,
df_windowed[config.time_colname][-1:].values[0] + config.window_length_s)

metadata_samples.end_iso8601 = end_iso8601
metadata_samples.file_name = 'arm_swing_values.bin'
metadata_samples.file_name = 'arm_activity_values.bin'
metadata_time.end_iso8601 = end_iso8601
metadata_time.file_name = 'arm_swing_time.bin'
metadata_time.file_name = 'arm_activity_time.bin'

metadata_samples.channels = list(config.d_channels_values.keys())
metadata_samples.units = list(config.d_channels_values.values())

metadata_time.channels = [config.time_colname]
metadata_time.units = ['relative_time_ms']

write_df_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df_windowed)
write_df_data(metadata_time, metadata_samples, output_path, 'arm_activity_meta.json', df_windowed)


def detect_arm_swing(df: pd.DataFrame, config: ArmSwingDetectionConfig, clf: Union[LogisticRegression, RandomForestClassifier]) -> pd.DataFrame:
def detect_other_arm_activities(df: pd.DataFrame, config: FilteringGaitConfig, clf: Union[LogisticRegression, RandomForestClassifier]) -> pd.DataFrame:

# Prepare the data
clf.feature_names_in_ = ['std_norm_acc'] + [f'{x}_power_below_gait' for x in config.l_accelerometer_cols] + \
Expand All @@ -302,66 +302,66 @@ def detect_arm_swing(df: pd.DataFrame, config: ArmSwingDetectionConfig, clf: Uni
X = df.loc[:, clf.feature_names_in_]

# Make prediction
df[DataColumns.PRED_ARM_SWING_PROBA] = clf.predict_proba(X)[:, 1]
df[DataColumns.PRED_OTHER_ARM_ACTIVITY_PROBA] = clf.predict_proba(X)[:, 1]

return df

def detect_arm_swing_io(input_path: Union[str, Path], output_path: Union[str, Path], path_to_classifier_input: Union[str, Path], config: ArmSwingDetectionConfig) -> None:
def detect_other_arm_activities_io(input_path: Union[str, Path], output_path: Union[str, Path], path_to_classifier_input: Union[str, Path], config: FilteringGaitConfig) -> None:
# Load the data
metadata_time, metadata_samples = read_metadata(input_path, config.meta_filename, config.time_filename, config.values_filename)
df = tsdf.load_dataframe_from_binaries([metadata_time, metadata_samples], tsdf.constants.ConcatenationType.columns)

# Load the classifier
clf = pd.read_pickle(os.path.join(path_to_classifier_input, config.classifier_file_name))
clf = pd.read_pickle(os.path.join(path_to_classifier_input, 'classifiers', config.classifier_file_name))

df = detect_arm_swing(df, config, clf)
df = detect_other_arm_activities(df, config, clf)

# Prepare the metadata
metadata_samples.file_name = 'arm_swing_values.bin'
metadata_time.file_name = 'arm_swing_time.bin'
metadata_samples.file_name = 'arm_activity_values.bin'
metadata_time.file_name = 'arm_activity_time.bin'

metadata_samples.channels = [DataColumns.PRED_ARM_SWING_PROBA]
metadata_samples.channels = [DataColumns.PRED_OTHER_ARM_ACTIVITY_PROBA]
metadata_samples.units = ['probability']

metadata_time.channels = [DataColumns.TIME]
metadata_time.units = ['relative_time_ms']

write_df_data(metadata_time, metadata_samples, output_path, 'arm_swing_meta.json', df)
write_df_data(metadata_time, metadata_samples, output_path, 'arm_activity_meta.json', df)


def quantify_arm_swing(df: pd.DataFrame, config: ArmSwingQuantificationConfig) -> pd.DataFrame:

# temporarily for testing: manually determine predictions
df[DataColumns.PRED_ARM_SWING_PROBA] = np.concatenate([np.repeat([1], df.shape[0]//3), np.repeat([0], df.shape[0]//3), np.repeat([1], df.shape[0] - 2*df.shape[0]//3)], axis=0)
df[DataColumns.PRED_OTHER_ARM_ACTIVITY_PROBA] = np.concatenate([np.repeat([1], df.shape[0]//3), np.repeat([0], df.shape[0]//3), np.repeat([1], df.shape[0] - 2*df.shape[0]//3)], axis=0)

# keep only predicted arm swing
# TODO: Aggregate overlapping windows for probabilities
df_arm_swing = df.loc[df[DataColumns.PRED_ARM_SWING_PROBA]>=0.5].copy().reset_index(drop=True)
df_filtered = df.loc[df[DataColumns.PRED_OTHER_ARM_ACTIVITY_PROBA]>=0.5].copy().reset_index(drop=True)

del df

# create peak angular velocity
df_arm_swing.loc[:, 'peak_ang_vel'] = df_arm_swing.loc[:, ['forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean']].mean(axis=1)
df_arm_swing = df_arm_swing.drop(columns=['forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean'])
df_filtered.loc[:, 'peak_ang_vel'] = df_filtered.loc[:, ['forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean']].mean(axis=1)
df_filtered = df_filtered.drop(columns=['forward_peak_ang_vel_mean', 'backward_peak_ang_vel_mean'])

# Segmenting


df_arm_swing[DataColumns.SEGMENT_NR] = create_segments(
df=df_arm_swing,
df_filtered[DataColumns.SEGMENT_NR] = create_segments(
df=df_filtered,
time_column_name=DataColumns.TIME,
gap_threshold_s=config.segment_gap_s
)
df_arm_swing = discard_segments(
df=df_arm_swing,
df_filtered = discard_segments(
df=df_filtered,
segment_nr_colname=DataColumns.SEGMENT_NR,
min_length_segment_s=config.min_segment_length_s,
sampling_frequency=config.sampling_frequency
)

# Quantify arm swing
df_aggregates = aggregate_segments(
df=df_arm_swing,
df=df_filtered,
time_colname=DataColumns.TIME,
segment_nr_colname=DataColumns.SEGMENT_NR,
window_step_size_s=config.window_step_size,
Expand Down Expand Up @@ -398,7 +398,7 @@ def quantify_arm_swing_io(path_to_feature_input: Union[str, Path], path_to_predi
df_features = df_features[l_feature_cols]

# Concatenate features and predictions
df = pd.concat([df_features, df_predictions[DataColumns.PRED_ARM_SWING_PROBA]], axis=1)
df = pd.concat([df_features, df_predictions[DataColumns.PRED_OTHER_ARM_ACTIVITY_PROBA]], axis=1)

df_aggregates = quantify_arm_swing(df, config)

Expand Down
18 changes: 9 additions & 9 deletions src/paradigma/gait/gait_analysis_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,14 +136,14 @@ class GaitDetectionConfig(IMUConfig):

def __init__(self) -> None:
super().__init__()
self.classifier_file_name = "gd_classifier.pkl"
self.thresholds_file_name = "gd_threshold.txt"
self.classifier_file_name = "gait_detection_classifier.pkl"
self.thresholds_file_name = "gait_detection_threshold.txt"

self.set_filenames_values("gait")



class ArmSwingFeatureExtractionConfig(IMUConfig):
class ArmActivityFeatureExtractionConfig(IMUConfig):

def initialize_window_length_fields(self, window_length_s: int) -> None:
self.window_length_s = window_length_s
Expand Down Expand Up @@ -241,24 +241,24 @@ def __init__(self) -> None:
self.d_channels_values[f"cc_{cc_coef}_{sensor}"] = DataUnits.GRAVITY


class ArmSwingDetectionConfig(IMUConfig):
class FilteringGaitConfig(IMUConfig):

def __init__(self) -> None:
super().__init__()
self.classifier_file_name = "asd_classifier.pkl"
self.classifier_file_name = "gait_filtering_classifier.pkl"

self.set_filenames_values("arm_swing")
self.set_filenames_values("arm_activity")



class ArmSwingQuantificationConfig(IMUConfig):

def __init__(self) -> None:
super().__init__()
self.set_filenames_values("arm_swing")
self.set_filenames_values("arm_activity")

self.pred_arm_swing_proba_colname = DataColumns.PRED_ARM_SWING_PROBA
self.pred_arm_swing_colname = DataColumns.PRED_ARM_SWING
self.pred_other_arm_activity_proba_colname = DataColumns.PRED_OTHER_ARM_ACTIVITY_PROBA
self.pred_other_arm_activity_colname = DataColumns.PRED_OTHER_ARM_ACTIVITY

self.window_length_s = 3
self.window_step_size = 0.75
Expand Down
Loading

0 comments on commit 73fd59e

Please sign in to comment.