Skip to content

Commit

Permalink
ruff formatter fixes (#12)
Browse files Browse the repository at this point in the history
  • Loading branch information
benlansdell committed Feb 2, 2024
1 parent d4cd002 commit 420abcb
Show file tree
Hide file tree
Showing 13 changed files with 264 additions and 117 deletions.
2 changes: 1 addition & 1 deletion ethome/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

# TODO
# Add support for the user to change these.

global_config = {
"make_movie__y_offset": 60,
"make_movie__y_inc": 30,
Expand Down
31 changes: 15 additions & 16 deletions ethome/features/cnn1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@

def build_baseline_model(
input_dim: tuple,
layer_channels: tuple =(512, 256),
dropout_rate: float =0.0,
learning_rate: float =1e-3,
conv_size: int =5,
num_classes: int=4,
class_weight:tuple = None,
layer_channels: tuple = (512, 256),
dropout_rate: float = 0.0,
learning_rate: float = 1e-3,
conv_size: int = 5,
num_classes: int = 4,
class_weight: tuple = None,
):
if not check_keras():
raise RuntimeError(
Expand Down Expand Up @@ -49,7 +49,7 @@ def add_conv_bn_activate(model, out_dim, activation="relu", conv_size=3, drop=0.
return model


def make_df(pts, colnames: List[str] =None): # pragma: no cover
def make_df(pts, colnames: List[str] = None): # pragma: no cover
df = []
for idx in range(len(pts)):
data = pts[idx].flatten()
Expand All @@ -61,7 +61,6 @@ def make_df(pts, colnames: List[str] =None): # pragma: no cover


def features_identity(inputs: np.ndarray): # pragma: no cover

return inputs, inputs.shape[1:]


Expand Down Expand Up @@ -149,14 +148,14 @@ def __init__(
dim: tuple,
use_conv: bool,
num_classes: int,
augment: bool =False,
class_to_number: dict =None,
past_frames:int=0,
future_frames:int=0,
frame_gap:int=1,
shuffle:bool=False,
mode:str="fit",
featurize:Callable=features_identity,
augment: bool = False,
class_to_number: dict = None,
past_frames: int = 0,
future_frames: int = 0,
frame_gap: int = 1,
shuffle: bool = False,
mode: str = "fit",
featurize: Callable = features_identity,
):
self.batch_size = batch_size
self.featurize = featurize
Expand Down
52 changes: 27 additions & 25 deletions ethome/features/dl_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
}


def seed_everything(seed:int=2012):
def seed_everything(seed: int = 2012):
np.random.seed(seed)
os.environ["PYTHONHASHSEED"] = str(seed)

Expand All @@ -72,17 +72,17 @@ def __init__(
*,
feature_dim: list,
num_classes: int,
test_data:np.ndarray=None,
class_to_number:dict=None,
past_frames:int=0,
future_frames:int=0,
frame_gap:int=1,
use_conv:bool=False,
build_model:Callable=build_baseline_model,
test_data: np.ndarray = None,
class_to_number: dict = None,
past_frames: int = 0,
future_frames: int = 0,
frame_gap: int = 1,
use_conv: bool = False,
build_model: Callable = build_baseline_model,
Generator=MABe_Generator,
use_callbacks:bool=False,
learning_decay_freq:int=10,
featurizer:Callable=features_identity,
use_callbacks: bool = False,
learning_decay_freq: int = 10,
featurizer: Callable = features_identity,
):
flat_dim = np.prod(feature_dim)
if use_conv:
Expand Down Expand Up @@ -130,7 +130,9 @@ def _set_model(self, model):
"""Set an external, provide initialized and compiled keras model"""
self.model = model

def inference(self, model_params: dict, class_weight:dict=None, n_folds:int=5):
def inference(
self, model_params: dict, class_weight: dict = None, n_folds: int = 5
):
kwargs = {}
if class_weight is not None:
if type(class_weight) is dict:
Expand Down Expand Up @@ -188,7 +190,7 @@ def get_test_prediction_probabilities(self):
return all_test_preds


def normalize_data(orig_pose_dictionary:dict):
def normalize_data(orig_pose_dictionary: dict):
for key in orig_pose_dictionary:
X = orig_pose_dictionary[key]["keypoints"]
X = X.transpose((0, 1, 3, 2)) # last axis is x, y coordinates
Expand All @@ -200,16 +202,16 @@ def normalize_data(orig_pose_dictionary:dict):


def run_task(
vocabulary:dict,
test_data:np.ndarray,
config_name:str,
build_model:Callable,
skip_test_prediction:bool=False,
seed:int=2021,
vocabulary: dict,
test_data: np.ndarray,
config_name: str,
build_model: Callable,
skip_test_prediction: bool = False,
seed: int = 2021,
Generator=MABe_Generator,
use_callbacks:bool=False,
params:dict=None,
use_conv:bool=True,
use_callbacks: bool = False,
params: dict = None,
use_conv: bool = True,
):
if params is None:
if config_name is None:
Expand Down Expand Up @@ -279,13 +281,13 @@ def run_task(
return all_test_probs


def lrs(epoch:int, lr:float, freq:int=10):
def lrs(epoch: int, lr: float, freq: int = 10):
if (epoch % freq) == 0 and epoch > 0:
lr /= 3
return lr


def convert_to_mars_format(df:pd.DataFrame, colnames:List[str], animal_setup:dict):
def convert_to_mars_format(df: pd.DataFrame, colnames: List[str], animal_setup: dict):
n_animals = len(animal_setup["mouse_ids"])
n_body_parts = len(animal_setup["bodypart_ids"])
pose_dict = {}
Expand All @@ -301,7 +303,7 @@ def convert_to_mars_format(df:pd.DataFrame, colnames:List[str], animal_setup:dic


# Basically, undo the change above
def convert_to_pandas_df(data, colnames:List[str]=None):
def convert_to_pandas_df(data, colnames: List[str] = None):
dfs = []
for vid in data:
df = pd.DataFrame(data[vid], columns=colnames)
Expand Down
8 changes: 5 additions & 3 deletions ethome/features/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,9 @@ def transform(self, df: pd.DataFrame):
raise NotImplementedError


def feature_class_maker(name:str, compute_function:Callable, required_columns:List[str]=[]):
def feature_class_maker(
name: str, compute_function: Callable, required_columns: List[str] = []
):
def __init__(self, required_columns=None, **kwargs):
"""Feature creation object. This houses the feature creation function and the columns that are required to compute the features. Performs some checks on data to make sure has these columns.
Expand All @@ -72,10 +74,10 @@ def __init__(self, required_columns=None, **kwargs):
self.required_columns = required_columns
self.kwargs = kwargs

def fit(self, edf:pd.DataFrame, **kwargs): # pragma: no cover
def fit(self, edf: pd.DataFrame, **kwargs): # pragma: no cover
return

def transform(self, edf:pd.DataFrame, **kwargs):
def transform(self, edf: pd.DataFrame, **kwargs):
"""Make the features. This is called internally by the dataset object when running `add_features`.
Args:
Expand Down
24 changes: 18 additions & 6 deletions ethome/features/generic_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import numpy as np


def _diff_within_group(df, sort_key: str, diff_col:str, **kwargs):
def _diff_within_group(df, sort_key: str, diff_col: str, **kwargs):
return df.groupby(sort_key)[diff_col].transform(lambda x: x.diff(**kwargs).bfill())


Expand Down Expand Up @@ -45,7 +45,7 @@ def compute_centerofmass_interanimal_distances(


def compute_centerofmass_interanimal_speed(
df: pd.DataFrame, raw_col_names: list, n_shifts:int=5, **kwargs
df: pd.DataFrame, raw_col_names: list, n_shifts: int = 5, **kwargs
) -> pd.DataFrame:
"""Speeds between all animals' centroids"""
animal_setup = df.pose.animal_setup
Expand Down Expand Up @@ -123,7 +123,11 @@ def compute_centerofmass(


def compute_centerofmass_velocity(
df: pd.DataFrame, raw_col_names: list, n_shifts:int=5, bodyparts: list = [], **kwargs
df: pd.DataFrame,
raw_col_names: list,
n_shifts: int = 5,
bodyparts: list = [],
**kwargs,
) -> pd.DataFrame:
"""Velocity of all animals' centroids"""
animal_setup = df.pose.animal_setup
Expand Down Expand Up @@ -161,7 +165,11 @@ def compute_centerofmass_velocity(


def compute_part_velocity(
df: pd.DataFrame, raw_col_names: list, n_shifts:int=5, bodyparts: list = [], **kwargs
df: pd.DataFrame,
raw_col_names: list,
n_shifts: int = 5,
bodyparts: list = [],
**kwargs,
) -> pd.DataFrame:
"""Velocity of all animals' bodyparts"""
animal_setup = df.pose.animal_setup
Expand Down Expand Up @@ -198,7 +206,11 @@ def compute_part_velocity(


def compute_part_speed(
df: pd.DataFrame, raw_col_names: list, n_shifts:int=5, bodyparts: list = [], **kwargs
df: pd.DataFrame,
raw_col_names: list,
n_shifts: int = 5,
bodyparts: list = [],
**kwargs,
) -> pd.DataFrame:
"""Speed of all animals' bodyparts"""
animal_setup = df.pose.animal_setup
Expand Down Expand Up @@ -235,7 +247,7 @@ def compute_part_speed(


def compute_speed_features(
df: pd.DataFrame, raw_col_names: list, n_shifts:int=5, **kwargs
df: pd.DataFrame, raw_col_names: list, n_shifts: int = 5, **kwargs
) -> pd.DataFrame:
"""Speeds between all body parts pairs (within and between animals)"""
animal_setup = df.pose.animal_setup
Expand Down
Loading

0 comments on commit 420abcb

Please sign in to comment.