Skip to content

Commit

Permalink
add linear & module & tensor & tests
Browse files Browse the repository at this point in the history
  • Loading branch information
weilinquan committed Oct 9, 2022
1 parent eea62a3 commit 2551842
Show file tree
Hide file tree
Showing 15 changed files with 423 additions and 0 deletions.
11 changes: 11 additions & 0 deletions .idea/Manualtorch.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions .idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 10 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

296 changes: 296 additions & 0 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

20 changes: 20 additions & 0 deletions Tensor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import numpy


class Tensor:
def __init__(self, data, type):
self.data = numpy.array(data, dtype=type)

def __add__(self, other):
return self.data+other

def __sub__(self, other):
return self.data-other

def __mul__(self, other):
if isinstance(other, Tensor):
return numpy.matmul(self.data, other.data)
return self.data*other

def reshape(self, row, col):
self.data = self.data.reshape(row, col)
Binary file added __pycache__/Tensor.cpython-39.pyc
Binary file not shown.
23 changes: 23 additions & 0 deletions nn/Linear.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
from .Module import Module
import numpy
from .Parameter import Parameter


class Linear(Module):
def __init__(self, row, col):
self.parameter = Parameter((row, col))
self.inputs = []
self.data = None

def forward(self, x):
self.inputs.append(x)
self.data = numpy.matmul(x.data, self.parameter.data)
return self

def __call__(self, x):
return self.forward(x)

def backward(self, grad):
self.parameter.gradient = numpy.matmul(self.inputs[0].data.T, self.data)
if(isinstance(self.inputs[0], Module)):
self.inputs[0].backward()
Loading

0 comments on commit 2551842

Please sign in to comment.