Skip to content

Commit

Permalink
weilinquan update
Browse files Browse the repository at this point in the history
  • Loading branch information
weilinquan committed Dec 23, 2022
1 parent 0b71998 commit 9994040
Show file tree
Hide file tree
Showing 9 changed files with 17 additions and 3 deletions.
Binary file modified __pycache__/Tensor.cpython-39.pyc
Binary file not shown.
Binary file modified nn/__pycache__/Linear.cpython-39.pyc
Binary file not shown.
Binary file modified nn/__pycache__/Lossfunction.cpython-39.pyc
Binary file not shown.
Binary file modified nn/__pycache__/Module.cpython-39.pyc
Binary file not shown.
Binary file modified nn/__pycache__/Parameter.cpython-39.pyc
Binary file not shown.
Binary file modified nn/function/__pycache__/relu.cpython-39.pyc
Binary file not shown.
Binary file modified nn/optimizer/__pycache__/optimizer.cpython-39.pyc
Binary file not shown.
6 changes: 3 additions & 3 deletions nn/optimizer/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,13 @@ def __call__(self, params):
self.n += 1

for i in range(len(params['parameter'])):
self.m = self.beta1 * self.m + (1 - self.beta1) * params['gradient'][i]
self.v = self.beta2 * self.v + (1 - self.beta2) * np.square(params['gradient'][i])
self.m[i] = self.beta1 * self.m[i] + (1 - self.beta1) * params['gradient'][i]
self.v[i] = self.beta2 * self.v[i] + (1 - self.beta2) * np.square(params['gradient'][i])

alpha = self.lr * np.sqrt(1 - np.power(self.beta2, self.n))
alpha = alpha / (1 - np.power(self.beta1, self.n))

params['parameter'][i].data -= alpha * self.m / (np.sqrt(self.v) + self.eps)
params['parameter'][i].data -= alpha * self.m[i] / (np.sqrt(self.v[i]) + self.eps)
"""
self.m = self.beta1 * self.m + (1 - self.beta1) * grads
Expand Down
14 changes: 14 additions & 0 deletions tests/test_minist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import sys
sys.path.append("..")
from nn.function.relu import ReLu
import nn
from nn.Linear import Linear
from Tensor import Tensor
from nn.Lossfunction import MSELoss
from nn.optimizer.optimizer import SGD
import numpy
from matplotlib import pyplot

class model(nn.Module.Module):
def __init__(self):
self.conv1 =

0 comments on commit 9994040

Please sign in to comment.