Piyushmryaa commited on
Commit
180d138
1 Parent(s): 9f0f035
Files changed (1) hide show
  1. mygrad.py +239 -0
mygrad.py ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import random
5
+ from typing import Any
6
+
7
+ K = 1
8
+
9
+ def sigmoid(x):
10
+ # print(x)
11
+ return 1/(1+np.exp(-K*x))
12
+ def modsigmoid(x):
13
+ return 2/(1+math.exp(abs(x)))
14
+
15
+
16
+ class Value:
17
+
18
+ def __init__(self, data, _children = (), _op='', label = ''):
19
+ self.data = data
20
+ self.grad = 0.0 # represents derivative of the parent node with respec to current node
21
+ self._prev = set(_children)
22
+ self._backward = lambda: None
23
+ self._op = _op
24
+ self.label = label
25
+
26
+ def __repr__(self):
27
+ return f'Value(data={self.data})'
28
+
29
+ def __add__(self, other):
30
+ other = other if isinstance(other, Value) else Value(other)
31
+ out = Value(self.data+other.data, (self, other), '+')
32
+
33
+ def _backward():
34
+ self.grad += 1.0*out.grad # out and self are addresses here, so if it gets executed in outer node then out == currentnode and self and other == children, so even if we are assigning a different address to out in current node, since out was used in this node, out will be current node when executing the function
35
+ other.grad += 1.0*out.grad
36
+ out._backward = _backward
37
+
38
+ return out
39
+
40
+ def __mul__(self, other):
41
+ other = other if isinstance(other, Value) else Value(other)
42
+ out = Value(self.data*other.data, (self, other), '*')
43
+ def _backward():
44
+ self.grad += other.data*out.grad
45
+ other.grad += self.data*out.grad
46
+ out._backward = _backward
47
+ return out
48
+
49
+ def __pow__(self, other):
50
+ assert isinstance(other,(int, float))
51
+
52
+ out = Value(self.data**other, (self,), f'**{other}')
53
+
54
+ def _backward():
55
+ self.grad += other*(self.data**(other-1))*out.grad
56
+ out._backward = _backward
57
+ return out
58
+
59
+ def __rmul__(self, other): # other*self
60
+ return self*other
61
+
62
+ def __truediv__(self, other):
63
+ return self*other**-1
64
+
65
+ def __neg__(self):
66
+ return self*-1
67
+
68
+ def __sub__(self, other):
69
+ return self + (-other)
70
+
71
+ def __radd__(self, other):
72
+ return self + other
73
+
74
+
75
+
76
+ def tanh(self):
77
+ x = self.data
78
+ t = (math.exp(2*x) - 1)/(math.exp(2*x) + 1)
79
+ out = Value(t, (self, ), 'tanh')
80
+ def _backward():
81
+ self.grad += (1 - t**2)*out.grad
82
+ out._backward = _backward
83
+ return out
84
+ def sin(self):
85
+ x = self.data
86
+ out = Value(math.sin(x), (self, ), 'sin')
87
+ def _backward():
88
+ self.grad += math.cos(x)*out.grad
89
+ out._backward = _backward
90
+ return out
91
+ def cos(self):
92
+ x = self.data
93
+ out = Value(math.cos(x), (self, ), 'cos')
94
+ def _backward():
95
+ self.grad += -math.sin(x)*out.grad
96
+ out._backward = _backward
97
+ return out
98
+ def tan(self):
99
+ x = self.data
100
+ out = Value(math.tan(x), (self, ), 'tan')
101
+ def _backward():
102
+ self.grad += (1/math.cos(x)**2)*out.grad
103
+ out._backward = _backward
104
+ return out
105
+ def cot(self):
106
+ x = self.data
107
+ out = Value(math.cot(x), (self, ), 'cot')
108
+ def _backward():
109
+ self.grad += -(1/math.sin(x)**2)*out.grad
110
+ out._backward = _backward
111
+ return out
112
+ def sinh(self):
113
+ x = self.data
114
+ out = Value(math.sinh(x), (self, ), 'sinh')
115
+ def _backward():
116
+ self.grad += math.cosh(x)*out.grad
117
+ out._backward = _backward
118
+ return out
119
+ def cosh(self):
120
+ x = self.data
121
+ out = Value(math.cosh(x), (self, ), 'sinh')
122
+ def _backward():
123
+ self.grad += math.sinh(x)*out.grad
124
+ out._backward = _backward
125
+ return out
126
+
127
+
128
+ def exp(self):
129
+ x = self.data
130
+ out = Value(math.exp(x), (self,), 'exp')
131
+
132
+ def _backward():
133
+ self.grad += out.data*out.grad
134
+ out._backward = _backward
135
+ return out
136
+ def reLu(self):
137
+ x = self.data
138
+ out = Value(max(0, x), (self, ), 'reLu')
139
+ def _backward():
140
+ if x > 0:
141
+ self.grad += out.grad
142
+ else:
143
+ self.grad += 0
144
+ out._backward = _backward
145
+ return out
146
+
147
+ def sigmoid(self):
148
+ x = self.data
149
+ s = sigmoid(x)
150
+ out = Value(s, (self,), 'sigmoid')
151
+
152
+ def _backward():
153
+ self.grad += K*s*(1 - s)*out.grad
154
+ out._backward = _backward
155
+ return out
156
+ def log(self):
157
+ x = self.data
158
+ # print(x)
159
+ out = Value(math.log(x), (self,), 'log')
160
+
161
+ def _backward():
162
+ self.grad += (1/x)*out.grad
163
+ out._backward = _backward
164
+ return out
165
+
166
+ def modsigmoid(self):
167
+ x = self.data
168
+ s = modsigmoid(x)
169
+ out = Value(s, (self,), 'modsigmoid')
170
+
171
+ def _backward():
172
+ if x >= 0:
173
+ self.grad += -((2*x)/(x*(1+x)**2))*out.grad
174
+ else:
175
+ self.grad += -((2*x)/(-x*(1-x)**2))*out.grad
176
+
177
+ out._backward = _backward
178
+ return out
179
+
180
+
181
+ def sinc(self):
182
+ if x == 0:
183
+ print('error 0 not valdid input')
184
+ return
185
+ x = self.data
186
+ out = Value(math.sinx(x)/x, (self, ), 'sinc')
187
+ def _backward():
188
+ self.grad += ((2*x*math.sin(x) - (x**2)*math.cos(x))/(x**4))*out.grad
189
+ out._backward = _backward
190
+ return out
191
+
192
+ def backward(self):
193
+ topo = []
194
+ visited = set()
195
+ def build_topo(v):
196
+ if v not in visited:
197
+ visited.add(v)
198
+ for child in v._prev:
199
+ build_topo(child)
200
+ topo.append(v)
201
+ build_topo(self)
202
+ self.grad = 1.0
203
+ for node in reversed(topo):
204
+ node._backward()
205
+ class Neuron:
206
+ def __init__(self, nin, activation='sigmoid'):
207
+ self.w = [Value(random.uniform(-2, 2)) for _ in range(nin)]
208
+ self.b = Value(random.uniform(-2, 2))
209
+ self.activation = activation
210
+
211
+ def parameters(self):
212
+ return self.w + [self.b]
213
+
214
+ def __call__(self, x): # Neuron()(x)
215
+
216
+ act = sum((xi*wi for xi, wi in zip(x, self.w)), self.b)
217
+ if self.activation == 'sigmoid':
218
+ out = act.sigmoid()
219
+ if self.activation == 'reLu':
220
+ out = act.reLu()
221
+ if self.activation == 'modsigmoid':
222
+ out = act.modsigmoid()
223
+ if self.activation == '':
224
+ return act
225
+ if self.activation == 'threshold':
226
+ return Value(1) if act.data > 0 else Value(0)
227
+ return out
228
+
229
+
230
+
231
+ class Layer:
232
+ def __init__(self, nin, nout, activation='sigmoid'):
233
+ self.neurons = [Neuron(nin, activation=activation) for _ in range(nout)]
234
+
235
+ def parameters(self):
236
+ return [p for neuron in self.neurons for p in neuron.parameters()]
237
+ def __call__(self, x):
238
+ outs = [n(x) for n in self.neurons]
239
+ return outs[0] if len(outs) == 1 else outs