Sreeja123 commited on
Commit
34e4387
1 Parent(s): 52e296b
Files changed (1) hide show
  1. Memristor.py +188 -0
Memristor.py ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ from sklearn import preprocessing
5
+
6
+
7
+ class memristor_models():
8
+ def __init__(self,Roff,Ron,Rint,Amplitude,freq,time_duration,sample_rate,p,j,model):
9
+ self.initial_Roff=Roff
10
+ self.initial_Ron=Ron
11
+ self.Rint=Rint
12
+ self.Amplitude=Amplitude
13
+ self.freq=freq
14
+ self.time_duration=time_duration
15
+ self.time_duration=time_duration
16
+ self.sample_rate=sample_rate
17
+ self.p=p
18
+ self.j=j
19
+ self.model= model
20
+
21
+ # window functions
22
+ def jog(self,x1,p):
23
+ f_x = 1-(((2*x1)-1)**(2*p))
24
+ return f_x
25
+
26
+ def Prodro(self,x1,p,j):
27
+ f_x = j*(1-(((x1-0.5)**2)+0.75**p))
28
+ return f_x
29
+
30
+ def biolek(self,x1,p,i):
31
+ if i<0:
32
+ i=0
33
+ else:
34
+ i=1
35
+ f_x=1-((x1-i)**p)
36
+ return f_x
37
+
38
+ def zha(self,x1,p,j,i):
39
+ if i<0:
40
+ i=0
41
+ else:
42
+ i=1
43
+ f_x=j*((1-(0.25*(x1-i)**2)+0.75)**p)
44
+ return f_x
45
+
46
+ def ideal_model(self):
47
+ start_time = 0
48
+ time = np.arange(start_time, self.time_duration, 1/self.sample_rate)
49
+ sinewave = self.Amplitude * np.sin(2 * np.pi * self.freq * time + 0)
50
+ v_mem=sinewave
51
+ D=10*pow(10,-9)
52
+ uv=10*pow(10,-15)
53
+ delta_R=self.initial_Roff-self.initial_Ron
54
+ x=(self.initial_Roff-self.Rint)/delta_R
55
+ x_t=[]
56
+ i_mem=[]
57
+ x_t.append(x)
58
+ R_mem=[]
59
+ G=[]
60
+ f1=[]
61
+ r_val=(self.initial_Ron*x_t[0])+(self.initial_Roff*(1-x_t[0]))
62
+ R_mem.append(r_val)
63
+ k=(uv*self.initial_Ron)/(D**2)
64
+ i_mem.append(0)
65
+ for i in range(1,len(v_mem)):
66
+ i_val=v_mem[i]/R_mem[i-1]
67
+ i_mem.append(i_val)
68
+
69
+ if self.model=='Joglekar':
70
+ f=self.jog(x_t[i-1],self.p)
71
+ f1.append(f)
72
+ if self.model== 'Prodromakis':
73
+ f=self.Prodro(x_t[i-1],self.p,self.j)
74
+ f1.append(f)
75
+ if self.model== 'Biolek':
76
+ f=self.biolek(x_t[i-1],self.p,i_val)
77
+ f1.append(f)
78
+ if self.model== 'Zha':
79
+ f=self.zha(x_t[i-1],self.p,self.j,i_val)
80
+ f1.append(f)
81
+ dx_dt=k*i_mem[i-1]*f
82
+ dx=dx_dt*(time[i-1]-time[i])
83
+ x=dx+x_t[i-1]
84
+ x_t.append(x)
85
+ r_temp=(self.initial_Ron*x)+(self.initial_Roff*(1-x))
86
+ if r_temp<self.initial_Ron:
87
+ r_temp=self.initial_Ron
88
+ if r_temp>self.initial_Roff:
89
+ r_temp=self.initial_Roff
90
+ R_mem.append(r_temp)
91
+ G.append(1/r_temp)
92
+
93
+ self.Roff=max(R_mem)
94
+ self.Ron=min(R_mem)
95
+
96
+ return v_mem,i_mem,G,x_t,time,f1
97
+
98
+ def plot(self):
99
+ v,curr,G,x,t,f=self.ideal_model()
100
+ plt.plot(v,curr)
101
+ plt.ylabel('i')
102
+ plt.xlabel('v')
103
+ plt.show()
104
+
105
+ def neural_weight(self,neural_weight,X_max,X_min):
106
+ self.neural_weight=np.array(neural_weight)
107
+
108
+ new_min = (1/self.Roff)
109
+ new_max = (1/self.Ron)
110
+
111
+ # new_weights = []
112
+ self.mapped_values = []
113
+ idx = 0
114
+ for item in self.neural_weight:
115
+ self.mapped_values.append([])
116
+ for x in item:
117
+ scaled_x = ((np.abs(x) - X_min) / (X_max - X_min)) * (new_max - new_min) + new_min
118
+ if x<0:
119
+ scaled_x = scaled_x*-1
120
+ self.mapped_values[idx].append(scaled_x)
121
+ idx += 1
122
+
123
+ # Iterate over the old weights and biases and compute the new values
124
+ # for weight in neural_weight:
125
+ # new_weight = ((abs(weight) - X_min) / (X_max - X_min)) * (new_max - new_min) + new_min
126
+ # new_weight = [(new_weight[i] * -1) if weight[i]<0 else new_weight[i] for i in range(len(new_weight)) ]
127
+ # new_weights.append(new_weight)
128
+
129
+ # self.mapped_values = new_weights
130
+
131
+ def variability(self,partition,variability_percentage_Ron,variability_percentage_Roff):
132
+ v,curr,G,x,t,f = self.ideal_model()
133
+ self.partition = partition
134
+ self.variability_percentage_Ron = variability_percentage_Ron
135
+ self.variability_percentage_Roff = variability_percentage_Roff
136
+
137
+ # partitioning
138
+ self.l2 = []
139
+ self.l2.append(1/self.Roff)
140
+ step = ((1/self.Ron)-(1/self.Roff))/(self.partition-1)
141
+ for i in range(1,self.partition):
142
+ (self.l2).append(self.l2[0]+(step*i))
143
+
144
+ # adding variability to the list
145
+ new_Goff = (1/self.Roff)+(((1/self.Roff)*self.variability_percentage_Roff)/100)
146
+ (self.l2).append(new_Goff)
147
+
148
+ new_Gon = (1/self.Ron)+(((1/self.Ron)*self.variability_percentage_Ron)/100)
149
+ (self.l2).append(new_Gon)
150
+
151
+ temp = [val for val in self.l2 if (val<=new_Gon and val>=new_Goff)]
152
+ self.l2 = temp
153
+
154
+ self.l2.sort()
155
+
156
+ def new_weights(self):
157
+ self.new_values = []
158
+ idx = 0
159
+
160
+ def closest(lst, K):
161
+ return lst[min(range(len(lst)), key = lambda i: abs(lst[i]-K))]
162
+
163
+ for values in self.mapped_values:
164
+ self.new_values.append([])
165
+ for value in values:
166
+ close_val = closest(self.l2,abs(value))
167
+ if value <0:
168
+ close_val = close_val*-1000
169
+ else:
170
+ close_val = close_val*1000
171
+ self.new_values[idx].append(close_val)
172
+ idx += 1
173
+
174
+ return self.new_values
175
+
176
+ def Relative_Error (self):
177
+ Weights_with_var= self.new_weights()
178
+ self.variability(self.partition,0,0)
179
+ Weights_without_var= self.new_weights()
180
+
181
+ error=[]
182
+ for i, j in zip(np.array(Weights_without_var), np.array(Weights_with_var)):
183
+ l = (np.abs(i-j)/i)
184
+ error.append(l)
185
+
186
+ error = np.array(error)
187
+ return np.abs(np.sum(error))*100/error.size
188
+