-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
f2a0652
commit 743d6d8
Showing
8 changed files
with
100 additions
and
51 deletions.
There are no files selected for viewing
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,101 +1,106 @@ | ||
from vis import * | ||
import numpy as np | ||
|
||
class Scalar: | ||
|
||
def __init__(self,data,_children=(),_op='',label=''): | ||
self.data = data | ||
self.grad = 0.0 | ||
self._backward = lambda: None | ||
self._prev = set(_children) | ||
self._op = _op | ||
self.label = label | ||
|
||
def __repr__(self): | ||
return f"Scalar(data={self.data})" | ||
|
||
def __add__(self,other): | ||
|
||
|
||
other = other if isinstance(other,Scalar) else Scalar(other) | ||
out = Scalar(self.data+other.data,(self,other),'+') | ||
|
||
def _backward(): | ||
self.grad += 1.0*out.grad | ||
other.grad += 1.0*out.grad | ||
|
||
self.grad += out.grad | ||
other.grad += out.grad | ||
|
||
out._backward = _backward | ||
|
||
return out | ||
|
||
def __mul__(self,other): | ||
|
||
other = other if isinstance(other,Scalar) else Scalar(other) | ||
out = Scalar(self.data*other.data,(self,other),'*') | ||
|
||
def _backward(): | ||
|
||
self.grad += other.data*out.grad | ||
other.grad += self.data*out.grad | ||
|
||
out._backward = _backward | ||
return out | ||
|
||
def __rmul__(self,other): | ||
return self*other | ||
|
||
def exp(self): | ||
out = Scalar(math.exp(self.data),(self,),'exp') | ||
|
||
def _backward(): | ||
self.backward += out.data*out.grad | ||
out._backward = _backward | ||
|
||
return out | ||
|
||
def __pow__(self,other): | ||
|
||
assert isinstance(other,(int,float)),'support float/int' | ||
out = Scalar(self.data**other,(self,),f'**{other}') | ||
|
||
def _backward(): | ||
self.grad += other*(self.data**(other-1))*out.grad | ||
out._backward = _backward | ||
|
||
return out | ||
|
||
def __truediv__(self,other): | ||
return self*other**-1 | ||
|
||
def __neg__(self): | ||
return self*-1 | ||
|
||
def __sub__(self,other): | ||
return self + (-other) | ||
|
||
|
||
def __radd__(self, other): # other + self | ||
return self + other | ||
|
||
def tanh(self): | ||
n = self.data | ||
ot = (math.exp(2*n)-1)/(math.exp(2*n)+1) | ||
ot = (np.exp(2*n)-1)/(np.exp(2*n)+1) | ||
|
||
out = Scalar(ot,(self,),'tanh') | ||
|
||
def _backward(): | ||
self.grad += (1 - ot**2)*out.grad | ||
out._backward = _backward | ||
return out | ||
|
||
def backward(self): | ||
|
||
topo = [] | ||
visited = set() | ||
def build_topo(v): | ||
if v not in visited: | ||
visited.add(v) | ||
for child in v._prev: | ||
build_topo(child) | ||
topo.append(v) | ||
if v not in visited: | ||
visited.add(v) | ||
for child in v._prev: | ||
build_topo(child) | ||
topo.append(v) | ||
build_topo(self) | ||
|
||
self.grad = 1.0 | ||
for node in reversed(topo): | ||
node._backward() | ||
# go one variable at a time and apply the chain rule to get its gradient | ||
self.grad = 1 | ||
for v in reversed(topo): | ||
v._backward() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
from ffnn import * | ||
import matplotlib.pyplot as plt | ||
|
||
|
||
f = FFNN(3,[4,3,1]) | ||
|
||
test_data = [[random.uniform(-1,1) for i in range(3)] for _ in range(10)] | ||
labels = [random.choice([0,1]) for i in range(len(test_data))] | ||
|
||
pred = [f(data) for data in test_data] | ||
|
||
#implementing Stochastic Gradient Descent | ||
|
||
epochs = 10 | ||
eta = 0.01 | ||
|
||
losses = [] | ||
|
||
for i in range(epochs): | ||
|
||
print("---------------Epoch -------------------",i) | ||
|
||
# Forward propagation | ||
pred = [f(data) for data in test_data] | ||
loss = sum((y_pred-y_true)**2 for y_pred,y_true in zip(pred,labels)) | ||
losses.append(loss.data) | ||
|
||
print('Loss ',i,loss) | ||
|
||
for parameter in f.parameters(): | ||
parameter.grad = 0.0 | ||
|
||
# Backward propagation | ||
|
||
loss.backward() | ||
|
||
# Updating parameters | ||
|
||
for parameter in f.parameters(): | ||
parameter.data += -eta * parameter.grad | ||
|
||
|
||
plt.plot([i for i in range(epochs)],losses) | ||
plt.xlabel('epoch') | ||
plt.ylabel('loss') |