#coding:utf8
import numpy as np
import pandas
import sklearn.cross_validation

class neuralNetwork:
    def __init__(self, inputs, cats, periods):
        self.inputs = inputs
        self.w = np.random.normal(0.0, pow(1.0, -0.5), (self.inputs + 1))
        self.w = np.array(self.w, ndmin=2, dtype='complex128')
        self.w += 1j * np.random.normal(0.0, pow(1.0, -0.5), (self.inputs + 1))
        self.categories = cats   #3
        self.periodicity = periods   #3

    def z_to_class(self,z):
        angles = np.mod(np.angle(z) + 2*np.pi, 2*np.pi)
        p = int(np.floor(self.categories * self.periodicity * angles / (2*np.pi)))
        p = np.mod(p, self.categories)
        return p

    def class_to_angles(self,c):
        angles = (c + 0.5 + (self.categories * np.arange(self.periodicity))) / (self.categories * self.periodicity) * 2 * np.pi
        # print angles
        return angles

    def query(self, inputs_list):
        inputs_list.append(1.0)
        inputs = np.array(inputs_list, ndmin=2, dtype='complex128').T
        z = np.dot(self.w, inputs)
        o = self.z_to_class(z)
        return o

    def train(self, inputs_list, target):
        inputs_list.append(1.0)

        # convert inputs and outputs list to 2d array
        inputs = np.array(inputs_list, ndmin=2, dtype='complex128').T

        # combine inputs, weighted
        z = np.dot(self.w, inputs)[0]
        print z
        # desired angle from training set
        desired_angles = self.class_to_angles(target)
        #print target, desired_angles, "***"
        # print np.exp(1j * desired_angles)
        errors = np.exp(1j * desired_angles) - z
        # select the smallest error
        e = errors[np.argmin(np.abs(errors))]
        dw = (e * np.conj(inputs.T)) / (self.inputs + 1)
        self.w += dw

number_of_inputs = 4
categories = 3
periods = 1

n = neuralNetwork(number_of_inputs, categories, periods)

df = pandas.read_csv('complex_valued_neuralnetwork/iris_dataset/iris.csv')

# scale the lengths   # 150 * 5
df[['PW', 'PL', 'SW', 'SL']] = df[['PW', 'PL', 'SW', 'SL']].astype(np.float64) * 0.01

# shuffle and split dataframe into train and test sets, split 3/4 and 1/4     112 * 5     38 * 5
iris_train, iris_test = sklearn.cross_validation.train_test_split(df, train_size=0.75)
#
#训练次数
epochs = 10
for e in range(epochs):
    for idx, data in iris_train.iterrows():
        data_list = data.tolist()
        species = data_list[0]   #种类
        lengths = data_list[1:]   #特征
        n.train(lengths, species)

print n.w

scorecard = []

for idx, data in iris_test.iterrows():
    data_list = data.tolist()
    correct_species = int(data_list[0])
    lengths = data_list[1:]
    answer = n.query(lengths)
    #print(correct_species, answer)
    if (answer == correct_species):
        # network's answer matches correct answer, add 1 to scorecard
        scorecard.append(1)
    else:
        # network's answer doesn't match correct answer, add 0 to scorecard
        scorecard.append(0)

scorecard_array = np.asarray(scorecard)
print ("performance = ", float(scorecard_array.sum()) / scorecard_array.size)
