import tensor as ts
import nn


def zeroGrad(tensors):
	for t in tensors:
		t.zeroGrad()

class optimizer:
	def __init__(self,learning_rate):
		self.vars = nn.variable
		self.lr = learning_rate

	def step(self):
		for tensor in self.vars:
			tensor.step(self.lr)

	def zeroGrad(self):
		zeroGrad(nn.tensors)


class Adam(optimizer):
	def __init__(self,learning_rate):
		super().__init__(learning_rate)
		for tensor in self.vars:
			tensor.way = 'Adam'


class Momentum(optimizer):
	def __init__(self,learning_rate):
		super().__init__(learning_rate)
		for tensor in self.vars:
			tensor.way = 'Momentum'	

class RMSProp(optimizer):
	def __init__(self,learning_rate):
		super().__init__(learning_rate)
		for tensor in self.vars:
			tensor.way = 'RMSProp'	

class AdaGrad(optimizer):
	def __init__(self,learning_rate):
		super().__init__(learning_rate)
		for tensor in self.vars:
			tensor.way = 'AdaGrad'	

if __name__=='__main__':
	pass





















