# coding:utf-8
# Author : hiicy redldw
# Date : 2019/04/11
''
'''
1：处理train.csv
    标签one-hot
    feature 二值化
2：knn算法 
'''


import pandas as pd
import csv
import numpy as np
from sklearn.preprocessing import OneHotEncoder,MinMaxScaler
def normalize(data):#二值化
    m,n = data.shape
    for i in range(m):
        for j in range(n):
            if data[i,j]!=0:
                data[i,j] = 1
    return data
def loadDataSet():
    fpath = r"F:\Resources\Dataset\digit_re\train.csv"
    data:pd.DataFrame = pd.read_csv(fpath,header=0,dtype=np.float64)
    labels = data.ix[:,0].values
    # labels = labels.reshape(len(labels),-1)
    # onehot = OneHotEncoder()
    # labels = onehot.fit_transform(labels).toarray()
    data = data.ix[:,1:len(data)//2]  # 先用一半试试吧
    # minmax = MinMaxScaler()
    # data = minmax.fit_transform(data)
    return normalize(data),labels

def classify(inx,dataSet,labels,k):
    n = dataSet.shape[0]
    diffMat = np.tile(inx,(n,1))#(n,1)
    diffMat -= diffMat - dataSet
    sqDistances = np.power(diffMat,2).sum(axis=1)
    distances = sqDistances**0.5
    sortedDistIndices = np.argsort(distances)[::-1] # 对距离索引从大到小排序
    classCount={}
    for i in range(k):
        voteilabel = labels[sortedDistIndices[i]]
        classCount[voteilabel] = classCount.get(voteilabel, 0) + 1
    maxcount = 0
    for key,value in classCount.items():
        if value > maxcount:
            maxcount=value
            classes=key
    return classes
def runTest():
    testfile = r'F:\Resources\Dataset\digit_re\test.csv'
    TestData = pd.read_csv(testfile,header=0,dtype=np.float64)
    dataSet,labels = loadDataSet()
    writer = csv.writer((open(r"F:\Resources\Dataset\digit_re\result.csv", 'w', encoding='utf-8', newline='')))
    writer.writerow(["ImageId","Label"])
    for linenu in range(0,len(TestData)):
        lineData = TestData.iloc[linenu]
        lineData = lineData.values
        classes = classify(lineData,dataSet,labels,10)
        writer.writerow([linenu+1, int(classes)])
runTest()


######### others code ###############

