Skip to content

Commit

Permalink
123
Browse files Browse the repository at this point in the history
  • Loading branch information
Isaac Zi committed Nov 17, 2022
1 parent bf402a1 commit d2643f5
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 3 deletions.
2 changes: 1 addition & 1 deletion pytorch_mlp_framework/arg_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def get_args():
parser.add_argument('--num_filters', nargs="?", type=int, default=16,
help='Number of convolutional filters per convolutional layer in the network (excluding '
'dimensionality reduction layers)')
parser.add_argument('--num_epochs', nargs="?", type=int, default=100, help='Total number of epochs for model training')
parser.add_argument('--num_epochs', nargs="?", type=int, default=10, help='Total number of epochs for model training')
parser.add_argument('--num_classes', nargs="?", type=int, default=100, help='Number of classes in the dataset')
parser.add_argument('--experiment_name', nargs="?", type=str, default="exp_1",
help='Experiment name - to be used for building the experiment folder')
Expand Down
6 changes: 4 additions & 2 deletions pytorch_mlp_framework/experiment_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,10 @@ def plot_grad_flow(self, named_parameters):
Complete the code in the block below to collect absolute mean of the gradients for each layer in all_grads with the layer names in layers.
"""
########################################


for n, p in named_parameters:
if(p.requires_grad) and ("bias" not in n):
layers.append(n)
all_grads.append(p.grad.abs().mean())
########################################


Expand Down

0 comments on commit d2643f5

Please sign in to comment.