Skip to content

Commit

Permalink
Start to add type hints (as per #12)
Browse files Browse the repository at this point in the history
  • Loading branch information
benlansdell committed Feb 2, 2024
1 parent 40b76a6 commit 3812c08
Show file tree
Hide file tree
Showing 10 changed files with 87 additions and 83 deletions.
2 changes: 1 addition & 1 deletion ethome/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

# TODO
# Add support for the user to change these.

global_config = {
"make_movie__y_offset": 60,
"make_movie__y_inc": 30,
Expand Down
60 changes: 31 additions & 29 deletions ethome/features/cnn1d.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
import numpy as np
import pandas as pd

from typing import List, Callable
from ..utils import check_keras
from .mars_features import make_features_mars, make_features_mars_distr


def build_baseline_model(
input_dim,
layer_channels=(512, 256),
dropout_rate=0.0,
learning_rate=1e-3,
conv_size=5,
num_classes=4,
class_weight=None,
input_dim: tuple,
layer_channels: tuple =(512, 256),
dropout_rate: float =0.0,
learning_rate: float =1e-3,
conv_size: int =5,
num_classes: int=4,
class_weight:tuple = None,
):
if not check_keras():
raise RuntimeError(
Expand Down Expand Up @@ -48,7 +49,7 @@ def add_conv_bn_activate(model, out_dim, activation="relu", conv_size=3, drop=0.
return model


def make_df(pts, colnames=None): # pragma: no cover
def make_df(pts, colnames: List[str] =None): # pragma: no cover
df = []
for idx in range(len(pts)):
data = pts[idx].flatten()
Expand All @@ -59,30 +60,31 @@ def make_df(pts, colnames=None): # pragma: no cover
return pd.DataFrame(df)


def features_identity(inputs): # pragma: no cover
def features_identity(inputs: np.ndarray): # pragma: no cover

return inputs, inputs.shape[1:]


def features_via_sklearn(inputs, featurizer): # pragma: no cover
def features_via_sklearn(inputs: np.ndarray, featurizer: Callable): # pragma: no cover
# Use the ML functions to turn this into a pandas data table
df = make_df(inputs)
features_df, _, _ = featurizer(df)
features = np.array(features_df)
return features, features.shape


def features_mars(x): # pragma: no cover
def features_mars(x: np.ndarray): # pragma: no cover
return features_via_sklearn(x, make_features_mars)


# #features_mars_no_shift = lambda x: features_via_sklearn(x, make_features_mars_no_shift)


def features_mars_distr(x): # pragma: no cover
def features_mars_distr(x: np.ndarray): # pragma: no cover
return features_via_sklearn(x, make_features_mars_distr)


def features_distances(inputs):
def features_distances(inputs: np.ndarray):
# inputs.shape (4509, 2,7,2) = (frame, mouse ID, body part, x/y)

features = []
Expand All @@ -109,7 +111,7 @@ def features_distances(inputs):
return features, features.shape[1:]


def features_distances_normalized(inputs): # pragma: no cover
def features_distances_normalized(inputs: np.ndarray): # pragma: no cover
# inputs.shape (4509, 2,7,2) = (frame, mouse ID, body part, x/y)

features = []
Expand Down Expand Up @@ -142,19 +144,19 @@ def features_distances_normalized(inputs): # pragma: no cover
class MABe_Generator:
def __init__(
self,
pose_dict,
batch_size,
dim,
use_conv,
num_classes,
augment=False,
class_to_number=None,
past_frames=0,
future_frames=0,
frame_gap=1,
shuffle=False,
mode="fit",
featurize=features_identity,
pose_dict: dict,
batch_size: int,
dim: tuple,
use_conv: bool,
num_classes: int,
augment: bool =False,
class_to_number: dict =None,
past_frames:int=0,
future_frames:int=0,
frame_gap:int=1,
shuffle:bool=False,
mode:str="fit",
featurize:Callable=features_identity,
):
self.batch_size = batch_size
self.featurize = featurize
Expand Down Expand Up @@ -205,7 +207,7 @@ def __init__(
def __len__(self):
return len(self.indexes) // self.batch_size

def augment_fn(self, x):
def augment_fn(self, x: np.ndarray):
# Rotate
angle = (np.random.rand() - 0.5) * (np.pi * 2)
c, s = np.cos(angle), np.sin(angle)
Expand All @@ -217,7 +219,7 @@ def augment_fn(self, x):
x = x + shift
return x

def __getitem__(self, index):
def __getitem__(self, index: int):
bs = self.batch_size
indexes = self.indexes[index * bs : (index + 1) * bs]
X = np.empty((bs, *self.dim), self.X_dtype)
Expand Down
55 changes: 28 additions & 27 deletions ethome/features/dl_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import os
from copy import deepcopy

from typing import Callable, List
from ethome.features.cnn1d import build_baseline_model
from ethome.features.cnn1d import MABe_Generator, features_identity
from .cnn1d import *
Expand Down Expand Up @@ -55,7 +56,7 @@
}


def seed_everything(seed=2012):
def seed_everything(seed:int=2012):
np.random.seed(seed)
os.environ["PYTHONHASHSEED"] = str(seed)

Expand All @@ -69,19 +70,19 @@ class Trainer(object):
def __init__(
self,
*,
feature_dim,
num_classes,
test_data=None,
class_to_number=None,
past_frames=0,
future_frames=0,
frame_gap=1,
use_conv=False,
build_model=build_baseline_model,
feature_dim: list,
num_classes: int,
test_data:np.ndarray=None,
class_to_number:dict=None,
past_frames:int=0,
future_frames:int=0,
frame_gap:int=1,
use_conv:bool=False,
build_model:Callable=build_baseline_model,
Generator=MABe_Generator,
use_callbacks=False,
learning_decay_freq=10,
featurizer=features_identity,
use_callbacks:bool=False,
learning_decay_freq:int=10,
featurizer:Callable=features_identity,
):
flat_dim = np.prod(feature_dim)
if use_conv:
Expand Down Expand Up @@ -129,7 +130,7 @@ def _set_model(self, model):
"""Set an external, provide initialized and compiled keras model"""
self.model = model

def inference(self, model_params, class_weight=None, n_folds=5):
def inference(self, model_params: dict, class_weight:dict=None, n_folds:int=5):
kwargs = {}
if class_weight is not None:
if type(class_weight) is dict:
Expand Down Expand Up @@ -187,7 +188,7 @@ def get_test_prediction_probabilities(self):
return all_test_preds


def normalize_data(orig_pose_dictionary):
def normalize_data(orig_pose_dictionary:dict):
for key in orig_pose_dictionary:
X = orig_pose_dictionary[key]["keypoints"]
X = X.transpose((0, 1, 3, 2)) # last axis is x, y coordinates
Expand All @@ -199,16 +200,16 @@ def normalize_data(orig_pose_dictionary):


def run_task(
vocabulary,
test_data,
config_name,
build_model,
skip_test_prediction=False,
seed=2021,
vocabulary:dict,
test_data:np.ndarray,
config_name:str,
build_model:Callable,
skip_test_prediction:bool=False,
seed:int=2021,
Generator=MABe_Generator,
use_callbacks=False,
params=None,
use_conv=True,
use_callbacks:bool=False,
params:dict=None,
use_conv:bool=True,
):
if params is None:
if config_name is None:
Expand Down Expand Up @@ -278,13 +279,13 @@ def run_task(
return all_test_probs


def lrs(epoch, lr, freq=10):
def lrs(epoch:int, lr:float, freq:int=10):
if (epoch % freq) == 0 and epoch > 0:
lr /= 3
return lr


def convert_to_mars_format(df, colnames, animal_setup):
def convert_to_mars_format(df:pd.DataFrame, colnames:List[str], animal_setup:dict):
n_animals = len(animal_setup["mouse_ids"])
n_body_parts = len(animal_setup["bodypart_ids"])
pose_dict = {}
Expand All @@ -300,7 +301,7 @@ def convert_to_mars_format(df, colnames, animal_setup):


# Basically, undo the change above
def convert_to_pandas_df(data, colnames=None):
def convert_to_pandas_df(data, colnames:List[str]=None):
dfs = []
for vid in data:
df = pd.DataFrame(data[vid], columns=colnames)
Expand Down
11 changes: 6 additions & 5 deletions ethome/features/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
"""

import warnings

import pandas as pd
from typing import Callable, List
from ethome.features.dl_features import compute_dl_probability_features
from ethome.features.mars_features import (
compute_mars_features,
Expand Down Expand Up @@ -54,11 +55,11 @@ class Features: # pragma: no cover
def __init__(self):
raise NotImplementedError

def transform(self, df):
def transform(self, df: pd.DataFrame):
raise NotImplementedError


def feature_class_maker(name, compute_function, required_columns=[]):
def feature_class_maker(name:str, compute_function:Callable, required_columns:List[str]=[]):
def __init__(self, required_columns=None, **kwargs):
"""Feature creation object. This houses the feature creation function and the columns that are required to compute the features. Performs some checks on data to make sure has these columns.
Expand All @@ -71,10 +72,10 @@ def __init__(self, required_columns=None, **kwargs):
self.required_columns = required_columns
self.kwargs = kwargs

def fit(self, edf, **kwargs): # pragma: no cover
def fit(self, edf:pd.DataFrame, **kwargs): # pragma: no cover
return

def transform(self, edf, **kwargs):
def transform(self, edf:pd.DataFrame, **kwargs):
"""Make the features. This is called internally by the dataset object when running `add_features`.
Args:
Expand Down
2 changes: 1 addition & 1 deletion ethome/features/mars_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def wrapper(*args, **kwargs):
window_sizes = [1, 5, 10]
for ws in window_sizes:
data = np.dstack(
[np.array(df[added_cols].shift(p)) for p in range(-ws, ws + 1)]
[np.array(df[added_cols].shift(p).bfill()) for p in range(-ws, ws + 1)]
)
min_data = pd.DataFrame(
np.min(data, axis=2),
Expand Down
2 changes: 1 addition & 1 deletion ethome/interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pandas as pd
import numpy as np


def interpolate_lowconf_points(
edf: pd.DataFrame,
conf_threshold: float = 0.9,
Expand Down
Loading

0 comments on commit 3812c08

Please sign in to comment.