Skip to content

Commit

Permalink
Commiting changes from local system
Browse files Browse the repository at this point in the history
  • Loading branch information
shreyanshhub committed Mar 2, 2024
1 parent f2a0652 commit 743d6d8
Show file tree
Hide file tree
Showing 8 changed files with 100 additions and 51 deletions.
Binary file added __pycache__/ffnn.cpython-39.pyc
Binary file not shown.
Binary file added __pycache__/layer.cpython-39.pyc
Binary file not shown.
Binary file added __pycache__/neuron.cpython-39.pyc
Binary file not shown.
Binary file added __pycache__/scalar.cpython-39.pyc
Binary file not shown.
Binary file added __pycache__/vis.cpython-39.pyc
Binary file not shown.
21 changes: 10 additions & 11 deletions neuron.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@

class Neuron:

def __init__(self,nin):
def __init__(self,nin):
self.w = [Scalar(random.uniform(-1,1)) for i in range(nin)]
self.b = Scalar(random.uniform(-1,1))

self.w = [Scalar(random.uniform(-1,1)) for i in range(nin)]
self.b = random.uniform(-1,1)
def __call__(self,x):
activation = sum((wj*xj for wj,xj in zip(self.w,x)),self.b)
output = activation.tanh()
return output

def __call__(self,x):
def parameters(self):

out = sum(w_i*x_i for w_i,x_i in zip(self.w,x))
activation = (out+self.b).tanh()
return activation

def parameters(self):

return self.w + [self.b]
return self.w + [self.b]

85 changes: 45 additions & 40 deletions scalar.py
Original file line number Diff line number Diff line change
@@ -1,101 +1,106 @@
from vis import *
import numpy as np

class Scalar:

def __init__(self,data,_children=(),_op='',label=''):
self.data = data
self.grad = 0.0
self._backward = lambda: None
self._prev = set(_children)
self._op = _op
self.label = label

def __repr__(self):
return f"Scalar(data={self.data})"

def __add__(self,other):


other = other if isinstance(other,Scalar) else Scalar(other)
out = Scalar(self.data+other.data,(self,other),'+')

def _backward():
self.grad += 1.0*out.grad
other.grad += 1.0*out.grad

self.grad += out.grad
other.grad += out.grad

out._backward = _backward

return out

def __mul__(self,other):

other = other if isinstance(other,Scalar) else Scalar(other)
out = Scalar(self.data*other.data,(self,other),'*')

def _backward():

self.grad += other.data*out.grad
other.grad += self.data*out.grad

out._backward = _backward
return out

def __rmul__(self,other):
return self*other

def exp(self):
out = Scalar(math.exp(self.data),(self,),'exp')

def _backward():
self.backward += out.data*out.grad
out._backward = _backward

return out

def __pow__(self,other):

assert isinstance(other,(int,float)),'support float/int'
out = Scalar(self.data**other,(self,),f'**{other}')

def _backward():
self.grad += other*(self.data**(other-1))*out.grad
out._backward = _backward

return out

def __truediv__(self,other):
return self*other**-1

def __neg__(self):
return self*-1

def __sub__(self,other):
return self + (-other)


def __radd__(self, other): # other + self
return self + other

def tanh(self):
n = self.data
ot = (math.exp(2*n)-1)/(math.exp(2*n)+1)
ot = (np.exp(2*n)-1)/(np.exp(2*n)+1)

out = Scalar(ot,(self,),'tanh')

def _backward():
self.grad += (1 - ot**2)*out.grad
out._backward = _backward
return out

def backward(self):

topo = []
visited = set()
def build_topo(v):
if v not in visited:
visited.add(v)
for child in v._prev:
build_topo(child)
topo.append(v)
if v not in visited:
visited.add(v)
for child in v._prev:
build_topo(child)
topo.append(v)
build_topo(self)

self.grad = 1.0
for node in reversed(topo):
node._backward()
# go one variable at a time and apply the chain rule to get its gradient
self.grad = 1
for v in reversed(topo):
v._backward()
45 changes: 45 additions & 0 deletions test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from ffnn import *
import matplotlib.pyplot as plt


f = FFNN(3,[4,3,1])

test_data = [[random.uniform(-1,1) for i in range(3)] for _ in range(10)]
labels = [random.choice([0,1]) for i in range(len(test_data))]

pred = [f(data) for data in test_data]

#implementing Stochastic Gradient Descent

epochs = 10
eta = 0.01

losses = []

for i in range(epochs):

print("---------------Epoch -------------------",i)

# Forward propagation
pred = [f(data) for data in test_data]
loss = sum((y_pred-y_true)**2 for y_pred,y_true in zip(pred,labels))
losses.append(loss.data)

print('Loss ',i,loss)

for parameter in f.parameters():
parameter.grad = 0.0

# Backward propagation

loss.backward()

# Updating parameters

for parameter in f.parameters():
parameter.data += -eta * parameter.grad


plt.plot([i for i in range(epochs)],losses)
plt.xlabel('epoch')
plt.ylabel('loss')

0 comments on commit 743d6d8

Please sign in to comment.