# -*- coding: utf-8 -*-
"""
Created on Wed Jun  7 13:23:42 2017

@author: xuanlei
"""
import math
import pandas as pd
import numpy as np
import random
import time
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
class softmax():
    def __init__(self):
        self.learning_rate = 0.01
        self.max_iteration = 1000 
        self.reg_weight = 0.01
    def cal_exp(self,x,l):
        fx = np.dot(self.w[l],x)
        return np.exp(fx)
    def cal_probs(self,x,j):
        x_exp = self.cal_exp(x,j)
        sum_exp = sum(self.cal_exp(x,l) for l in range(self.k))
        probs = x_exp/sum_exp
        return probs
    def cal_partial_derivative(self,x,y,j):
        frist = int(y == j)
        second = self.cal_probs(x,j)
        dw = -x*(frist-second)+self.reg_weight*0.5*self.w[j]*self.w[j]
        return dw
    def train(self,features,labels):
        self.k = len(set([i[0] for i in labels]))
        self.w = np.zeros((self.k,features.shape[1]+1))
        time = 0
        while time < self.max_iteration:
            print('loop : %d'%time)
            time+=1
            index = random.randint(0,len(labels)-1)
            x1 = features[index]
            y = labels[index]
            xx = x1.tolist()
            xx.append(1.0)
            x = np.array(xx)
            d_w = [self.cal_partial_derivative(x,y,j) for j in range(self.k)]
            for j in range(self.k):
                self.w[j] -= self.learning_rate*d_w[j] 
            
            loss = -np.log(np.sum(self.cal_probs(x,j) for j in range(self.k)))
            print(loss)
    def predict_(self,x):
        result = np.dot(self.w,x)
        row,column = result.shape
        _position = np.argmax(result)
        m,n = divmod(_position,column)
        return m
    def predict(self,features):
        labels = []
        for feature in features:
            x = list(feature)
            x.append(1)
            x = np.matrix(x)
            x = np.transpose(x)
            labels.append(self.predict_(x))
        return labels
if __name__ == '__main__':
    print('start read data')
    time_1 =  time.time()
    raw_data = pd.read_csv(r"C:\Users\www\Desktop\learn_code\data_for_test\train.csv",header = 0,encoding='utf-8')
    data = raw_data.values
    imgs = data[0::,1::]
    labels = data[::,:1]
    train_features,test_features,train_labels,test_labels = train_test_split(imgs,labels,test_size = 0.33,random_state =23323)
    time_2 = time.time()
    print("read data cost :"+ str(time_2-time_1)+" second")
    print("start training")
    softm = softmax()
    softm.train(train_features,train_labels)
    time_3 = time.time()
    print('training cost '+ str(time_3 - time_2)+' second')

    print('Start predicting')
    test_predict = softm.predict(test_features)
    time_4 = time.time()
    print('predicting cost ' + str(time_4 - time_3) +' second')

    score = accuracy_score(test_labels, test_predict)
    print("The accruacy socre is " + str(score))