Skip to content

Commit

Permalink
clean repo
Browse files Browse the repository at this point in the history
  • Loading branch information
nathanhubens committed Dec 10, 2024
1 parent bf55550 commit cf6c044
Show file tree
Hide file tree
Showing 166 changed files with 202 additions and 26,568 deletions.
225 changes: 110 additions & 115 deletions fasterai/_modidx.py

Large diffs are not rendered by default.

38 changes: 19 additions & 19 deletions fasterai/core/criteria.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/core/00b_core.criteria.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/core/criteria.ipynb.

# %% auto 0
__all__ = ['random', 'large_final', 'squared_final', 'small_final', 'large_init', 'small_init', 'large_init_large_final',
'small_init_small_final', 'magnitude_increase', 'movement', 'updating_magnitude_increase',
'updating_movement', 'movmag', 'updating_movmag', 'criterias', 'Criteria', 'available_criterias',
'grad_crit']

# %% ../../nbs/core/00b_core.criteria.ipynb 2
# %% ../../nbs/core/criteria.ipynb 2
import torch
import torch.nn as nn
import torch.nn.functional as F
Expand All @@ -15,7 +15,7 @@
from .granularity import *
from typing import Callable

# %% ../../nbs/core/00b_core.criteria.ipynb 6
# %% ../../nbs/core/criteria.ipynb 6
class Criteria():
def __init__(self, f:Callable, reducer:str='mean', normalizer:str=None, needs_init:bool=False, needs_update:bool=False, output_f:Callable=None, return_init=False):
store_attr()
Expand Down Expand Up @@ -81,54 +81,54 @@ def update_weights(self, m):
if self.needs_update:
m._old_weights = m.weight.data.clone() # The current value becomes the old one for the next iteration

# %% ../../nbs/core/00b_core.criteria.ipynb 9
# %% ../../nbs/core/criteria.ipynb 9
random = Criteria(torch.randn_like)

# %% ../../nbs/core/00b_core.criteria.ipynb 12
# %% ../../nbs/core/criteria.ipynb 12
large_final = Criteria(torch.abs)

# %% ../../nbs/core/00b_core.criteria.ipynb 15
# %% ../../nbs/core/criteria.ipynb 15
squared_final = Criteria(torch.square)

# %% ../../nbs/core/00b_core.criteria.ipynb 18
# %% ../../nbs/core/criteria.ipynb 18
small_final = Criteria(compose(torch.abs, torch.neg))

# %% ../../nbs/core/00b_core.criteria.ipynb 21
# %% ../../nbs/core/criteria.ipynb 21
large_init = Criteria(torch.abs, needs_init=True, return_init=True)

# %% ../../nbs/core/00b_core.criteria.ipynb 24
# %% ../../nbs/core/criteria.ipynb 24
small_init = Criteria(compose(torch.abs, torch.neg), needs_init=True, return_init=True)

# %% ../../nbs/core/00b_core.criteria.ipynb 27
# %% ../../nbs/core/criteria.ipynb 27
large_init_large_final = Criteria(torch.abs, needs_init=True, output_f=torch.min)

# %% ../../nbs/core/00b_core.criteria.ipynb 30
# %% ../../nbs/core/criteria.ipynb 30
small_init_small_final = Criteria(torch.abs, needs_init=True, output_f=lambda x,y: torch.neg(torch.max(x,y)))

# %% ../../nbs/core/00b_core.criteria.ipynb 33
# %% ../../nbs/core/criteria.ipynb 33
magnitude_increase = Criteria(torch.abs, needs_init=True, output_f= torch.sub)

# %% ../../nbs/core/00b_core.criteria.ipynb 36
# %% ../../nbs/core/criteria.ipynb 36
movement = Criteria(noop, needs_init=True, output_f= lambda x,y: torch.abs(torch.sub(x,y)))

# %% ../../nbs/core/00b_core.criteria.ipynb 41
# %% ../../nbs/core/criteria.ipynb 41
updating_magnitude_increase = Criteria(torch.abs, needs_update=True, output_f= lambda x,y: torch.sub(x,y))

# %% ../../nbs/core/00b_core.criteria.ipynb 44
# %% ../../nbs/core/criteria.ipynb 44
updating_movement = Criteria(noop, needs_update=True, output_f= lambda x,y: torch.abs(torch.sub(x,y)))

# %% ../../nbs/core/00b_core.criteria.ipynb 47
# %% ../../nbs/core/criteria.ipynb 47
movmag = Criteria(noop, needs_init=True, output_f=lambda x,y: torch.abs(torch.mul(x, torch.sub(x,y))))

# %% ../../nbs/core/00b_core.criteria.ipynb 50
# %% ../../nbs/core/criteria.ipynb 50
updating_movmag = Criteria(noop, needs_update=True, output_f=lambda x,y: torch.abs(torch.mul(x, torch.sub(x,y))))

# %% ../../nbs/core/00b_core.criteria.ipynb 52
# %% ../../nbs/core/criteria.ipynb 52
criterias = ('random', 'large_final', 'small_final', 'squared_final', 'small_init', 'small_final', 'large_init_large_final', 'small_init_small_final', 'magnitude_increase', 'movement', 'updating_magnitude_increase', 'updating_movement', 'updating_movmag')
def available_criterias():
print(criterias)

# %% ../../nbs/core/00b_core.criteria.ipynb 73
# %% ../../nbs/core/criteria.ipynb 73
def grad_crit(m, g):
if g in granularities[m.__class__.__name__]:
dim = granularities[m.__class__.__name__][g]
Expand Down
6 changes: 3 additions & 3 deletions fasterai/core/granularity.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/core/00a_core.granularity.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/core/granularity.ipynb.

# %% auto 0
__all__ = ['Granularities']

# %% ../../nbs/core/00a_core.granularity.ipynb 2
# %% ../../nbs/core/granularity.ipynb 2
import torch
import torch.nn as nn
import torch.nn.functional as F
from fastcore.basics import *
from fastcore.imports import *

# %% ../../nbs/core/00a_core.granularity.ipynb 5
# %% ../../nbs/core/granularity.ipynb 5
class Granularities:
_granularities_Conv2d = {'weight':0, 'shared_weight':1, 'channel':2, 'column':3, 'row':4, 'kernel':(3,4), 'filter':(2,3,4), 'shared_channel':(1,2), 'shared_column': (1,3), 'shared_row': (1,4), 'vertical_slice': (2,3), 'horizontal_slice': (2,4), 'shared_vertical_slice': (1,2,3), 'shared_horizontal_slice': (1,2,4), 'shared_kernel': (1,3,4), 'layer':(1,2,3,4)}
_granularities_ConvT2d = {'weight':0, 'shared_weight':2, 'channel':1, 'column':3, 'row':4, 'kernel':(3,4), 'filter':(1,3,4), 'shared_channel':(1,2), 'shared_column': (2,3), 'shared_row': (2,4), 'vertical_slice': (1,3), 'horizontal_slice': (1,4), 'shared_vertical_slice': (1,2,3), 'shared_horizontal_slice': (1,2,4), 'shared_kernel': (2,3,4), 'layer':(1,2,3,4)}
Expand Down
20 changes: 10 additions & 10 deletions fasterai/core/schedule.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/core/00c_core.schedules.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/core/schedules.ipynb.

# %% auto 0
__all__ = ['one_shot', 'iterative', 'agp', 'one_cycle', 'cos', 'lin', 'dsd', 'schedules', 'Schedule', 'sched_oneshot',
'sched_iterative', 'sched_agp', 'sched_onecycle', 'sched_dsd', 'available_schedules']

# %% ../../nbs/core/00c_core.schedules.ipynb 2
# %% ../../nbs/core/schedules.ipynb 2
import numpy as np
import matplotlib.pyplot as plt
from fastcore.basics import *
from fastai.callback.schedule import *
import math

# %% ../../nbs/core/00c_core.schedules.ipynb 4
# %% ../../nbs/core/schedules.ipynb 4
class Schedule():
"Base class to create schedules"
def __init__(self, sched_func, start_pct=0., end_pct=None, start_sparsity=0.):
Expand Down Expand Up @@ -44,35 +44,35 @@ def plot(self, end_sparsity):
def reset(self):
self.current_sparsity, self.previous_sparsity = map(listify, [self.start_sparsity, self.start_sparsity])

# %% ../../nbs/core/00c_core.schedules.ipynb 9
# %% ../../nbs/core/schedules.ipynb 9
def sched_oneshot(start, end, pos): return end

one_shot = Schedule(sched_oneshot, start_pct=0.5)

# %% ../../nbs/core/00c_core.schedules.ipynb 16
# %% ../../nbs/core/schedules.ipynb 16
def sched_iterative(start, end, pos, n_steps=3):
"Perform iterative pruning, and pruning in `n_steps` steps"
return start + ((end-start)/n_steps)*(np.ceil((pos)*n_steps))

iterative = Schedule(sched_iterative, start_pct=0.2)

# %% ../../nbs/core/00c_core.schedules.ipynb 27
# %% ../../nbs/core/schedules.ipynb 27
def sched_agp(start, end, pos): return end + (start - end) * (1 - pos)**3

agp = Schedule(sched_agp, start_pct=0.2)

# %% ../../nbs/core/00c_core.schedules.ipynb 32
# %% ../../nbs/core/schedules.ipynb 32
def sched_onecycle(start, end, pos, α=14, β=6):
out = (1+np.exp(-α+β)) / (1 + (np.exp((-α*pos)+β)))
return start + (end-start)*out

one_cycle = Schedule(sched_onecycle)

# %% ../../nbs/core/00c_core.schedules.ipynb 38
# %% ../../nbs/core/schedules.ipynb 38
cos = Schedule(sched_cos)
lin = Schedule(sched_lin)

# %% ../../nbs/core/00c_core.schedules.ipynb 42
# %% ../../nbs/core/schedules.ipynb 42
def sched_dsd(start, end, pos):
if pos<0.5:
return start + (1 + math.cos(math.pi*(1-pos*2))) * (end-start) / 2
Expand All @@ -81,7 +81,7 @@ def sched_dsd(start, end, pos):

dsd = Schedule(sched_dsd)

# %% ../../nbs/core/00c_core.schedules.ipynb 46
# %% ../../nbs/core/schedules.ipynb 46
schedules = ('one_shot', 'iterative', 'agp', 'one_cycle', 'cos', 'lin', 'dsd')
def available_schedules():
print(schedules)
10 changes: 5 additions & 5 deletions fasterai/distill/distillation_callback.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/distill/04_distill.knowledge_distillation.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/distill/distillation_callback.ipynb.

# %% auto 0
__all__ = ['KnowledgeDistillationCallback', 'get_model_layers', 'get_module_by_name', 'SoftTarget', 'Logits', 'Mutual',
'Attention', 'ActivationBoundaries', 'FitNet', 'Similarity']

# %% ../../nbs/distill/04_distill.knowledge_distillation.ipynb 18
# %% ../../nbs/distill/distillation_callback.ipynb 18
from fastai.vision.all import *

import torch
Expand All @@ -14,7 +14,7 @@
from functools import reduce
from typing import Union

# %% ../../nbs/distill/04_distill.knowledge_distillation.ipynb 20
# %% ../../nbs/distill/distillation_callback.ipynb 20
class KnowledgeDistillationCallback(Callback):
def __init__(self, teacher, loss, activations_student=None, activations_teacher=None, weight=0.5):
self.stored_activation_student, self.stored_activation_teacher = {}, {}
Expand Down Expand Up @@ -59,7 +59,7 @@ def after_fit(self):
self.remove_hooks(self.handles_t)
self.remove_hooks(self.handles_st)

# %% ../../nbs/distill/04_distill.knowledge_distillation.ipynb 22
# %% ../../nbs/distill/distillation_callback.ipynb 22
def get_model_layers(model, getLayerRepr=False):
layers = OrderedDict() if getLayerRepr else []
def get_layers(net, prefix=[]):
Expand All @@ -83,7 +83,7 @@ def get_module_by_name(module: Union[torch.Tensor, nn.Module],
names = access_string.split(sep='.')
return reduce(getattr, names, module)

# %% ../../nbs/distill/04_distill.knowledge_distillation.ipynb 24
# %% ../../nbs/distill/distillation_callback.ipynb 24
def SoftTarget(pred, teacher_pred, T=5, **kwargs):
return nn.KLDivLoss(reduction='batchmean')(F.log_softmax(pred/T, dim=1), F.softmax(teacher_pred/T, dim=1)) * (T*T)

Expand Down
6 changes: 3 additions & 3 deletions fasterai/misc/bn_folding.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/misc/06a_misc.bn_folding.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/misc/bn_folding.ipynb.

# %% auto 0
__all__ = ['BN_Folder']

# %% ../../nbs/misc/06a_misc.bn_folding.ipynb 3
# %% ../../nbs/misc/bn_folding.ipynb 3
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy

# %% ../../nbs/misc/06a_misc.bn_folding.ipynb 12
# %% ../../nbs/misc/bn_folding.ipynb 12
class BN_Folder():
def __init__(self):
super().__init__()
Expand Down
6 changes: 3 additions & 3 deletions fasterai/misc/cpu_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/misc/06c_misc.cpu_optimizer.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/misc/cpu_optimizer.ipynb.

# %% auto 0
__all__ = ['accelerate_model_for_cpu']

# %% ../../nbs/misc/06c_misc.cpu_optimizer.ipynb 3
# %% ../../nbs/misc/cpu_optimizer.ipynb 3
import torch
import torch.nn as nn
from torch.utils.mobile_optimizer import optimize_for_mobile

# %% ../../nbs/misc/06c_misc.cpu_optimizer.ipynb 4
# %% ../../nbs/misc/cpu_optimizer.ipynb 4
def accelerate_model_for_cpu(model: nn.Module, example_input: torch.Tensor):
model.eval()
example_input = example_input.to(memory_format=torch.channels_last)
Expand Down
6 changes: 3 additions & 3 deletions fasterai/misc/fc_decomposer.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/misc/06b_misc.fc_decomposer.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/misc/fc_decomposer.ipynb.

# %% auto 0
__all__ = ['FC_Decomposer']

# %% ../../nbs/misc/06b_misc.fc_decomposer.ipynb 5
# %% ../../nbs/misc/fc_decomposer.ipynb 5
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy

# %% ../../nbs/misc/06b_misc.fc_decomposer.ipynb 6
# %% ../../nbs/misc/fc_decomposer.ipynb 6
class FC_Decomposer:

def __init__(self):
Expand Down
6 changes: 3 additions & 3 deletions fasterai/prune/prune_callback.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/prune/03b_prune.prune_callback.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/prune/prune_callback.ipynb.

# %% auto 0
__all__ = ['PruneCallback']

# %% ../../nbs/prune/03b_prune.prune_callback.ipynb 3
# %% ../../nbs/prune/prune_callback.ipynb 3
from fastai.vision.all import *
from fastai.callback.all import *
from .pruner import *
Expand All @@ -14,7 +14,7 @@
import torch.nn as nn
import torch.nn.functional as F

# %% ../../nbs/prune/03b_prune.prune_callback.ipynb 4
# %% ../../nbs/prune/prune_callback.ipynb 4
class PruneCallback(Callback):
def __init__(self, sparsity:int, context:str, criteria:Callable, schedule:Callable, model:nn.Module=None, round_to:int=None, layer_type:nn.Module=nn.Conv2d):
store_attr()
Expand Down
6 changes: 3 additions & 3 deletions fasterai/prune/pruner.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/prune/03_prune.pruner.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/prune/pruner.ipynb.

# %% auto 0
__all__ = ['Pruner']

# %% ../../nbs/prune/03_prune.pruner.ipynb 2
# %% ../../nbs/prune/pruner.ipynb 2
import torch
import torch.nn as nn
import torch.nn.functional as F
Expand All @@ -19,7 +19,7 @@
from ..core.criteria import *
from fastai.vision.all import *

# %% ../../nbs/prune/03_prune.pruner.ipynb 4
# %% ../../nbs/prune/pruner.ipynb 4
class Pruner():
def __init__(self, model, context, criteria, layer_type=[nn.Conv2d, nn.Linear, nn.LSTM], example_inputs=torch.randn(1,3,224,224), ignored_layers=None):
store_attr()
Expand Down
6 changes: 3 additions & 3 deletions fasterai/quantize/quantize_callback.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/quantize/06b_quantize.quantize_callback.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/quantize/quantize_callback.ipynb.

# %% auto 0
__all__ = ['QuantizeCallback']

# %% ../../nbs/quantize/06b_quantize.quantize_callback.ipynb 3
# %% ../../nbs/quantize/quantize_callback.ipynb 3
import copy
import torch
import torch.nn as nn
Expand All @@ -15,7 +15,7 @@
import torch.ao.quantization.quantize_fx as quantize_fx
from torch.ao.quantization.quantize_fx import convert_fx, prepare_fx

# %% ../../nbs/quantize/06b_quantize.quantize_callback.ipynb 5
# %% ../../nbs/quantize/quantize_callback.ipynb 5
class QuantizeCallback(Callback):
def __init__(self, qconfig_mapping=None, backend='x86'):
self.qconfig_mapping = qconfig_mapping or get_default_qat_qconfig_mapping(backend)
Expand Down
6 changes: 3 additions & 3 deletions fasterai/quantize/quantizer.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/quantize/06_quantize.quantizer.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/quantize/quantizer.ipynb.

# %% auto 0
__all__ = ['Quantizer']

# %% ../../nbs/quantize/06_quantize.quantizer.ipynb 2
# %% ../../nbs/quantize/quantizer.ipynb 2
import torch
import torch.nn as nn
import torch.nn.functional as F
Expand All @@ -12,7 +12,7 @@
import torch.ao.quantization.quantize_fx as quantize_fx
from torch.ao.quantization.quantize_fx import convert_fx, prepare_fx

# %% ../../nbs/quantize/06_quantize.quantizer.ipynb 4
# %% ../../nbs/quantize/quantizer.ipynb 4
class Quantizer():
def __init__(self, backend="x86"):
self.qconfig = get_default_qconfig_mapping(backend)
Expand Down
6 changes: 3 additions & 3 deletions fasterai/regularize/regularize_callback.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/regularize/05_regularize.regularizer.ipynb.
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/regularize/regularize_callback.ipynb.

# %% auto 0
__all__ = ['RegularizeCallback']

# %% ../../nbs/regularize/05_regularize.regularizer.ipynb 3
# %% ../../nbs/regularize/regularize_callback.ipynb 3
from fastai.callback.all import *
from fastcore.basics import store_attr
from ..core.criteria import *
Expand All @@ -13,7 +13,7 @@
import torch.nn as nn
import torch.nn.functional as F

# %% ../../nbs/regularize/05_regularize.regularizer.ipynb 4
# %% ../../nbs/regularize/regularize_callback.ipynb 4
class RegularizeCallback(Callback):
"Callback to apply grouped weight decay"
def __init__(self, g, wd=0.01, layer_type=nn.Conv2d):
Expand Down
Loading

0 comments on commit cf6c044

Please sign in to comment.