Piyushmryaa commited on
Commit
c92f2e7
1 Parent(s): c8374e8
README.md CHANGED
@@ -1,13 +1,11 @@
1
- ---
2
- title: CS772 ASSIGNMENT1
3
- emoji: 📚
4
- colorFrom: blue
5
- colorTo: gray
6
- sdk: streamlit
7
- sdk_version: 1.31.1
8
- app_file: app.py
9
- pinned: false
10
- license: mit
11
- ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
1
+ # CS772-Assignment 1: Implementation of Backpropagation and Training a Palindrome Network
2
+ 1. Implement BP (using any existing tool/platform not allowed)
3
+ 2. Think of the correct architecture for Palindrome
4
+ 3. Train a feedforward n/w for solving the 10-bit palindrome problem (input- bit strings of 1 and 0); there will be 1024 input strings labeled 1 (if the string is Palindrome) and 0 (non-P)
5
+ 4. Train and Test using 4-fold cross-validation
6
+ 5. Measure Precision
7
+ 6. Find out what the hidden layer neurons are doing (VIMP)
 
 
 
 
8
 
9
+
10
+ ### Insights
11
+ 1. Xoring input at opposing ends of input might work but since xoring operation is not linearly separable it cannot be done by one neuron
ReLuOneNeuronArchi.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
app.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from mygrad import Layer, Value
3
+ import pickle
4
+
5
+ # Define the predict function
6
+ def predict(x):
7
+ x1 = hiddenLayer1(x)
8
+ final = outputLayer([x1] + x)
9
+ return final.data
10
+
11
+ # Load model
12
+ def loadModel():
13
+ neuron1weightsbias, outputneuronweightsbias = [], []
14
+ with open(f'parameters/neuron1weightsbias_fn_reLu.pckl', 'rb') as file:
15
+ neuron1weightsbias = pickle.load(file)
16
+ with open('parameters/outputneuronweightsbias2.pckl', 'rb') as file:
17
+ outputneuronweightsbias = pickle.load(file)
18
+ hiddenLayer1_ = Layer(10, 1, 'reLu')
19
+ outputLayer_ = Layer(11, 1, 'sigmoid')
20
+
21
+ hiddenLayer1_.neurons[0].w = [Value(i) for i in neuron1weightsbias[:-1]]
22
+ hiddenLayer1_.neurons[0].b = Value(neuron1weightsbias[-1])
23
+
24
+ outputLayer_.neurons[0].w = [Value(i) for i in outputneuronweightsbias[:-1]]
25
+ outputLayer_.neurons[0].b = Value(outputneuronweightsbias[-1])
26
+ return hiddenLayer1_, outputLayer_
27
+
28
+ hiddenLayer1, outputLayer = loadModel()
29
+
30
+ st.title("Neural Network Prediction")
31
+
32
+ st.header("Input")
33
+ inputs = st.text_input("Input 10 digits Binary no")
34
+ input = []
35
+ flag = 0
36
+ if len(inputs)!=10:
37
+ st.write("Error: Input not equal to 10 bits")
38
+ flag =1
39
+ for i in inputs:
40
+ if i!='0' and i!='1':
41
+ st.write("Please input Binary number only")
42
+ flag = 1
43
+ else:
44
+ input.append(int(i))
45
+
46
+ # Prediction
47
+ if st.button("Predict"):
48
+ if flag:
49
+ st.stop()
50
+ result = predict(input)
51
+ st.success(f"The prediction is: {result}")
data.pckl ADDED
Binary file (18.4 kB). View file
 
mygrad.py ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import random
5
+ from typing import Any
6
+
7
+ def f(x):
8
+ return 3*x**2 + 2*x + 4
9
+
10
+ def sigmoid(x):
11
+ return 1/(1+math.exp(-x))
12
+ def modsigmoid(x):
13
+ return 2/(1+math.exp(abs(x)))
14
+
15
+
16
+ class Value:
17
+
18
+ def __init__(self, data, _children = (), _op='', label = ''):
19
+ self.data = data
20
+ self.grad = 0.0 # represents derivative of the parent node with respec to current node
21
+ self._prev = set(_children)
22
+ self._backward = lambda: None
23
+ self._op = _op
24
+ self.label = label
25
+
26
+ def __repr__(self):
27
+ return f'Value(data={self.data})'
28
+
29
+ def __add__(self, other):
30
+ other = other if isinstance(other, Value) else Value(other)
31
+ out = Value(self.data+other.data, (self, other), '+')
32
+
33
+ def _backward():
34
+ self.grad += 1.0*out.grad # out and self are addresses here, so if it gets executed in outer node then out == currentnode and self and other == children, so even if we are assigning a different address to out in current node, since out was used in this node, out will be current node when executing the function
35
+ other.grad += 1.0*out.grad
36
+ out._backward = _backward
37
+
38
+ return out
39
+
40
+ def __mul__(self, other):
41
+ other = other if isinstance(other, Value) else Value(other)
42
+ out = Value(self.data*other.data, (self, other), '*')
43
+ def _backward():
44
+ self.grad += other.data*out.grad
45
+ other.grad += self.data*out.grad
46
+ out._backward = _backward
47
+ return out
48
+
49
+ def __pow__(self, other):
50
+ assert isinstance(other,(int, float))
51
+
52
+ out = Value(self.data**other, (self,), f'**{other}')
53
+
54
+ def _backward():
55
+ self.grad += other*(self.data**(other-1))*out.grad
56
+ out._backward = _backward
57
+ return out
58
+
59
+ def __rmul__(self, other): # other*self
60
+ return self*other
61
+
62
+ def __truediv__(self, other):
63
+ return self*other**-1
64
+
65
+ def __neg__(self):
66
+ return self*-1
67
+
68
+ def __sub__(self, other):
69
+ return self + (-other)
70
+
71
+ def __radd__(self, other):
72
+ return self + other
73
+
74
+
75
+
76
+ def tanh(self):
77
+ x = self.data
78
+ t = (math.exp(2*x) - 1)/(math.exp(2*x) + 1)
79
+ out = Value(t, (self, ), 'tanh')
80
+ def _backward():
81
+ self.grad += (1 - t**2)*out.grad
82
+ out._backward = _backward
83
+ return out
84
+ def sin(self):
85
+ x = self.data
86
+ out = Value(math.sin(x), (self, ), 'sin')
87
+ def _backward():
88
+ self.grad += math.cos(x)*out.grad
89
+ out._backward = _backward
90
+ return out
91
+ def cos(self):
92
+ x = self.data
93
+ out = Value(math.cos(x), (self, ), 'cos')
94
+ def _backward():
95
+ self.grad += -math.sin(x)*out.grad
96
+ out._backward = _backward
97
+ return out
98
+ def tan(self):
99
+ x = self.data
100
+ out = Value(math.tan(x), (self, ), 'tan')
101
+ def _backward():
102
+ self.grad += (1/math.cos(x)**2)*out.grad
103
+ out._backward = _backward
104
+ return out
105
+ def cot(self):
106
+ x = self.data
107
+ out = Value(math.cot(x), (self, ), 'cot')
108
+ def _backward():
109
+ self.grad += -(1/math.sin(x)**2)*out.grad
110
+ out._backward = _backward
111
+ return out
112
+ def sinh(self):
113
+ x = self.data
114
+ out = Value(math.sinh(x), (self, ), 'sinh')
115
+ def _backward():
116
+ self.grad += math.cosh(x)*out.grad
117
+ out._backward = _backward
118
+ return out
119
+ def cosh(self):
120
+ x = self.data
121
+ out = Value(math.cosh(x), (self, ), 'sinh')
122
+ def _backward():
123
+ self.grad += math.sinh(x)*out.grad
124
+ out._backward = _backward
125
+ return out
126
+
127
+
128
+ def exp(self):
129
+ x = self.data
130
+ out = Value(math.exp(x), (self,), 'exp')
131
+
132
+ def _backward():
133
+ self.grad += out.data*out.grad
134
+ out._backward = _backward
135
+ return out
136
+ def reLu(self):
137
+ x = self.data
138
+ out = Value(max(0, x), (self, ), 'reLu')
139
+ def _backward():
140
+ if x > 0:
141
+ self.grad += out.grad
142
+ else:
143
+ self.grad += 0
144
+ out._backward = _backward
145
+ return out
146
+
147
+ def sigmoid(self):
148
+ x = self.data
149
+ s = sigmoid(x)
150
+ out = Value(s, (self,), 'sigmoid')
151
+
152
+ def _backward():
153
+ self.grad += s*(1 - s)*out.grad
154
+ out._backward = _backward
155
+ return out
156
+ def log(self):
157
+ x = self.data
158
+ # print(x)
159
+ out = Value(math.log(x), (self,), 'log')
160
+
161
+ def _backward():
162
+ self.grad += (1/x)*out.grad
163
+ out._backward = _backward
164
+ return out
165
+
166
+ def modsigmoid(self):
167
+ x = self.data
168
+ s = modsigmoid(x)
169
+ out = Value(s, (self,), 'modsigmoid')
170
+
171
+ def _backward():
172
+ if x >= 0:
173
+ self.grad += -((2*x)/(x*(1+x)**2))*out.grad
174
+ else:
175
+ self.grad += -((2*x)/(-x*(1-x)**2))*out.grad
176
+
177
+ out._backward = _backward
178
+ return out
179
+
180
+
181
+ def sinc(self):
182
+ if x == 0:
183
+ print('error 0 not valdid input')
184
+ return
185
+ x = self.data
186
+ out = Value(math.sinx(x)/x, (self, ), 'sinc')
187
+ def _backward():
188
+ self.grad += ((2*x*math.sin(x) - (x**2)*math.cos(x))/(x**4))*out.grad
189
+ out._backward = _backward
190
+ return out
191
+
192
+ def backward(self):
193
+ topo = []
194
+ visited = set()
195
+ def build_topo(v):
196
+ if v not in visited:
197
+ visited.add(v)
198
+ for child in v._prev:
199
+ build_topo(child)
200
+ topo.append(v)
201
+ build_topo(self)
202
+ self.grad = 1.0
203
+ for node in reversed(topo):
204
+ node._backward()
205
+ class Neuron:
206
+ def __init__(self, nin, activation='sigmoid'):
207
+ self.w = [Value(random.uniform(-2, 2)) for _ in range(nin)]
208
+ self.b = Value(random.uniform(-2, 2))
209
+ self.activation = activation
210
+
211
+ def parameters(self):
212
+ return self.w + [self.b]
213
+
214
+ def __call__(self, x): # Neuron()(x)
215
+
216
+ act = sum((xi*wi for xi, wi in zip(x, self.w)), self.b)
217
+ if self.activation == 'sigmoid':
218
+ out = act.sigmoid()
219
+ if self.activation == 'reLu':
220
+ out = act.reLu()
221
+ if self.activation == 'modsigmoid':
222
+ out = act.modsigmoid()
223
+ if self.activation == '':
224
+ return act
225
+ return out
226
+
227
+
228
+
229
+ class Layer:
230
+ def __init__(self, nin, nout, activation='sigmoid'):
231
+ self.neurons = [Neuron(nin, activation=activation) for _ in range(nout)]
232
+
233
+ def parameters(self):
234
+ return [p for neuron in self.neurons for p in neuron.parameters()]
235
+ def __call__(self, x):
236
+ outs = [n(x) for n in self.neurons]
237
+ return outs[0] if len(outs) == 1 else outs
238
+
239
+ class MLP:
240
+ def __init__(self, nin, nouts):
241
+ sz = [nin] + nouts
242
+ self.layers = [Layer(sz[i], sz[i+1]) for i in range(len(nouts))]
243
+ def __call__(self, x):
244
+ for layer in self.layers:
245
+ x = layer(x)
246
+ return x
247
+ def parameters(self):
248
+ return [p for layer in self.layers for p in layer.parameters()]
249
+
250
+ def fit(n, X, Y, epochs, learning_rate):
251
+ for k in range(epochs):
252
+ ypred = [n(x) for x in X]
253
+ loss = sum([(yout - ygt)**2 for ygt, yout in zip(Y, ypred)])
254
+ # resets all the grad to zero for our new weight and biases
255
+ for node in n.parameters():
256
+ node.grad = 0
257
+ # deposits all the gradients in the nodes
258
+ loss.backward()
259
+ # subtracts all the values by a fraction of gradient decent
260
+ for node in n.parameters():
261
+ node.data -= learning_rate*node.grad
262
+ # print(k, loss)
preparedata.ipynb ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "def checkPalindrome(number):\n",
10
+ " number = str(number)\n",
11
+ " l = 0\n",
12
+ " r = len(number)-1\n",
13
+ " while l < r:\n",
14
+ " if number[l] != number[r]:\n",
15
+ " return False\n",
16
+ " l, r = l+1, r-1\n",
17
+ " return True\n",
18
+ "\n",
19
+ "def generate_non_palindromic_numbers(count):\n",
20
+ " palindromic_numbers = []\n",
21
+ " i = 0\n",
22
+ " while count > 0:\n",
23
+ " binary_str = bin(i)[2:] # Convert the index to binary\n",
24
+ " # palindromic_number = '1' + binary_str + binary_str[::-1][1:] + '1' # Create a palindromic number\n",
25
+ " palindromic_number = binary_str\n",
26
+ " palindromic_number = palindromic_number.zfill(10)\n",
27
+ " if not checkPalindrome(palindromic_number):\n",
28
+ " palindromic_numbers.append(str(palindromic_number)) # Ensure it's 10 digits long\n",
29
+ " count -= 1\n",
30
+ " i += 1\n",
31
+ " return palindromic_numbers\n",
32
+ "\n",
33
+ "def generate_palindromic_numbers(count):\n",
34
+ " palindromic_numbers = []\n",
35
+ " i = 0\n",
36
+ " while count > 0:\n",
37
+ " binary_str = bin(i)[2:] # Convert the index to binary\n",
38
+ " # palindromic_number = '1' + binary_str + binary_str[::-1][1:] + '1' # Create a palindromic number\n",
39
+ " palindromic_number = binary_str\n",
40
+ " palindromic_number = palindromic_number.zfill(10)\n",
41
+ " if len(palindromic_number) > 10:\n",
42
+ " print('all 10 digits palindromic numbers exhausted at', len(palindromic_numbers))\n",
43
+ " return palindromic_numbers\n",
44
+ " if checkPalindrome(palindromic_number):\n",
45
+ " palindromic_numbers.append(str(palindromic_number)) # Ensure it's 10 digits long\n",
46
+ " count -= 1\n",
47
+ " i += 1\n",
48
+ " return palindromic_numbers\n"
49
+ ]
50
+ },
51
+ {
52
+ "cell_type": "code",
53
+ "execution_count": 5,
54
+ "metadata": {},
55
+ "outputs": [
56
+ {
57
+ "name": "stdout",
58
+ "output_type": "stream",
59
+ "text": [
60
+ "all 10 digits palindromic numbers exhausted at 8\n"
61
+ ]
62
+ }
63
+ ],
64
+ "source": [
65
+ "data = generate_non_palindromic_numbers(512) + generate_palindromic_numbers(512)*16"
66
+ ]
67
+ },
68
+ {
69
+ "cell_type": "code",
70
+ "execution_count": 6,
71
+ "metadata": {},
72
+ "outputs": [],
73
+ "source": [
74
+ "label = []\n",
75
+ "for number in data:\n",
76
+ " if checkPalindrome(number):\n",
77
+ " label.append(1)\n",
78
+ " else:\n",
79
+ " label.append(0)"
80
+ ]
81
+ },
82
+ {
83
+ "cell_type": "code",
84
+ "execution_count": 7,
85
+ "metadata": {},
86
+ "outputs": [],
87
+ "source": [
88
+ "import pandas as pd\n",
89
+ "df = pd.DataFrame({'number':data, 'label':label})"
90
+ ]
91
+ },
92
+ {
93
+ "cell_type": "code",
94
+ "execution_count": 8,
95
+ "metadata": {},
96
+ "outputs": [],
97
+ "source": [
98
+ "import pickle\n",
99
+ "with open('data6dig.pckl', 'wb') as file:\n",
100
+ " pickle.dump(df, file)"
101
+ ]
102
+ },
103
+ {
104
+ "cell_type": "code",
105
+ "execution_count": 12,
106
+ "metadata": {},
107
+ "outputs": [],
108
+ "source": [
109
+ "df.to_csv('data.csv')"
110
+ ]
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": 13,
115
+ "metadata": {},
116
+ "outputs": [
117
+ {
118
+ "data": {
119
+ "text/plain": [
120
+ "(512, 512)"
121
+ ]
122
+ },
123
+ "execution_count": 13,
124
+ "metadata": {},
125
+ "output_type": "execute_result"
126
+ }
127
+ ],
128
+ "source": [
129
+ "label.count(0), label.count(1)"
130
+ ]
131
+ },
132
+ {
133
+ "cell_type": "code",
134
+ "execution_count": null,
135
+ "metadata": {},
136
+ "outputs": [],
137
+ "source": []
138
+ }
139
+ ],
140
+ "metadata": {
141
+ "kernelspec": {
142
+ "display_name": "Python 3",
143
+ "language": "python",
144
+ "name": "python3"
145
+ },
146
+ "language_info": {
147
+ "codemirror_mode": {
148
+ "name": "ipython",
149
+ "version": 3
150
+ },
151
+ "file_extension": ".py",
152
+ "mimetype": "text/x-python",
153
+ "name": "python",
154
+ "nbconvert_exporter": "python",
155
+ "pygments_lexer": "ipython3",
156
+ "version": "3.11.4"
157
+ }
158
+ },
159
+ "nbformat": 4,
160
+ "nbformat_minor": 2
161
+ }
usethistocheckaccuracywotraingin.ipynb ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 7,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from mygrad import Layer\n",
10
+ "from mygrad import Value\n",
11
+ "import pickle\n"
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": 8,
17
+ "metadata": {},
18
+ "outputs": [],
19
+ "source": [
20
+ "def predict(x):\n",
21
+ " x1 = hiddenLayer1(x) \n",
22
+ " final = outputLayer([x1] + x)\n",
23
+ " return final"
24
+ ]
25
+ },
26
+ {
27
+ "cell_type": "code",
28
+ "execution_count": 9,
29
+ "metadata": {},
30
+ "outputs": [],
31
+ "source": [
32
+ "from sklearn.metrics import accuracy_score, precision_score, f1_score, recall_score\n",
33
+ "def getAccuracy(X, Y):\n",
34
+ " predicted = [1 if predict(x).data > 0.5 else 0 for x in X ]\n",
35
+ " return accuracy_score(Y, predicted)\n",
36
+ "def getPrecision(X, Y):\n",
37
+ " predicted = [1 if predict(x).data > 0.5 else 0 for x in X ]\n",
38
+ " return precision_score(Y, predicted)\n",
39
+ "def getf1(X, Y):\n",
40
+ " predicted = [1 if predict(x).data > 0.5 else 0 for x in X ]\n",
41
+ " return f1_score(Y, predicted)\n",
42
+ "def getRecall(X, Y):\n",
43
+ " predicted = [1 if predict(x).data > 0.5 else 0 for x in X ]\n",
44
+ " return recall_score(Y, predicted)\n"
45
+ ]
46
+ },
47
+ {
48
+ "cell_type": "code",
49
+ "execution_count": 10,
50
+ "metadata": {},
51
+ "outputs": [],
52
+ "source": [
53
+ "# Load model\n",
54
+ "\n",
55
+ "def loadModel():\n",
56
+ " neuron1weightsbias, outputneuronweightsbias = [], []\n",
57
+ " with open(f'parameters/neuron1weightsbias_fn_reLu.pckl', 'rb') as file:\n",
58
+ " neuron1weightsbias = pickle.load(file)\n",
59
+ " with open('parameters/outputneuronweightsbias_fn_reLu.pckl', 'rb') as file:\n",
60
+ " outputneuronweightsbias = pickle.load(file)\n",
61
+ " hiddenLayer1_ = Layer(10, 1, 'reLu')\n",
62
+ " outputLayer_ = Layer(11, 1, 'sigmoid')\n",
63
+ "\n",
64
+ " hiddenLayer1_.neurons[0].w = [Value(i) for i in neuron1weightsbias[:-1]]\n",
65
+ " hiddenLayer1_.neurons[0].b = Value(neuron1weightsbias[-1])\n",
66
+ "\n",
67
+ " outputLayer_.neurons[0].w = [Value(i) for i in outputneuronweightsbias[:-1]]\n",
68
+ " outputLayer_.neurons[0].b = Value(outputneuronweightsbias[-1])\n",
69
+ " return hiddenLayer1_, outputLayer_, neuron1weightsbias, outputneuronweightsbias"
70
+ ]
71
+ },
72
+ {
73
+ "cell_type": "code",
74
+ "execution_count": 11,
75
+ "metadata": {},
76
+ "outputs": [],
77
+ "source": [
78
+ "hiddenLayer1, outputLayer, neuron1weightsbias, outputneuronweightsbias = loadModel()"
79
+ ]
80
+ },
81
+ {
82
+ "cell_type": "code",
83
+ "execution_count": 12,
84
+ "metadata": {},
85
+ "outputs": [],
86
+ "source": [
87
+ "import pickle\n",
88
+ "with open('data.pckl', 'rb') as file:\n",
89
+ " data = pickle.load(file)\n",
90
+ "from sklearn.utils import shuffle\n",
91
+ "data = shuffle(data)\n",
92
+ "X = [list(number) for number in data['number']]\n",
93
+ "Y = [label for label in data['label']]\n",
94
+ "for ix, row in enumerate(X):\n",
95
+ " X[ix] = [Value(float(item)) for item in row]\n",
96
+ "\n"
97
+ ]
98
+ },
99
+ {
100
+ "cell_type": "code",
101
+ "execution_count": 13,
102
+ "metadata": {},
103
+ "outputs": [
104
+ {
105
+ "data": {
106
+ "text/plain": [
107
+ "0.9609375"
108
+ ]
109
+ },
110
+ "execution_count": 13,
111
+ "metadata": {},
112
+ "output_type": "execute_result"
113
+ }
114
+ ],
115
+ "source": [
116
+ "getAccuracy(X, Y)\n"
117
+ ]
118
+ },
119
+ {
120
+ "cell_type": "code",
121
+ "execution_count": null,
122
+ "metadata": {},
123
+ "outputs": [],
124
+ "source": []
125
+ }
126
+ ],
127
+ "metadata": {
128
+ "kernelspec": {
129
+ "display_name": "Python 3",
130
+ "language": "python",
131
+ "name": "python3"
132
+ },
133
+ "language_info": {
134
+ "codemirror_mode": {
135
+ "name": "ipython",
136
+ "version": 3
137
+ },
138
+ "file_extension": ".py",
139
+ "mimetype": "text/x-python",
140
+ "name": "python",
141
+ "nbconvert_exporter": "python",
142
+ "pygments_lexer": "ipython3",
143
+ "version": "3.11.4"
144
+ }
145
+ },
146
+ "nbformat": 4,
147
+ "nbformat_minor": 2
148
+ }