-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathtensor.py
More file actions
100 lines (70 loc) · 2.61 KB
/
tensor.py
File metadata and controls
100 lines (70 loc) · 2.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
## Engine.py
import numpy as np
class Tensor:
def __init__(self, data, _children=(), _op=""):
self.data = np.array(data, dtype=np.float32)
self.grad = np.zeros_like(self.data)
self._backward = lambda: None
self._prev = set(_children)
self._op = _op # for graph debugging
def __repr__(self):
return f"Tensor(data={self.data}, grad={self.grad})"
def __add__(self, other):
other = other if isinstance(other, Tensor) else Tensor(other)
out = Tensor(self.data + other.data, _children = (self, other), _op="+")
def _backward():
self.grad += 1.0 * out.grad
other.grad += 1.0 * out.grad
out._backward = _backward
return out
def __neg__(self):
out = Tensor(-self.data, (self,), _op='neg')
def _backward():
self.grad += -1 * out.grad
out._backward = _backward
return out
def __sub__(self, other):
return self + (-other)
def __mul__(self, other):
other = other if isinstance(other, Tensor) else Tensor(other)
out = Tensor(self.data * other.data, _children = (self, other), _op = "*")
def _backward():
self.grad += other.data * out.grad
other.grad += self.data * out.grad
out._backward = _backward
return out
def __matmul__(self, other):
out = Tensor(self.data @ other.data, (self, other), '@')
def _backward():
self.grad += out.grad @ other.data.T
other.grad += self.data.T @ out.grad
out._backward = _backward
return out
def sum(self):
out = Tensor(self.data.sum(), (self,), 'sum')
def _backward():
grad = np.ones_like(self.data) * out.grad
self.grad += grad
out._backward = _backward
return out
def __pow__(self, power):
out = Tensor(self.data ** power, (self,), f'**{power}')
def _backward():
self.grad += (power * (self.data ** (power - 1))) * out.grad
out._backward = _backward
return out
def backward(self):
topo = []
visited = set()
def build_topo(v):
if v not in visited:
visited.add(v)
for child in v._prev:
build_topo(child)
topo.append(v)
build_topo(self)
self.grad = np.ones_like(self.data)
for node in reversed(topo):
node._backward()
def zero_grad(self):
self.grad = 0