#include <iostream>
#include <vector>
#include <sstream>
#include <fstream>
#include <cmath>
#include <cstdlib>

#include<random>
#include <unistd.h>
#include <fcntl.h>
#include <sys/mman.h>
#include <string.h>

using namespace std;

template<typename T>
T* mallocArray(int n) {
	T* arr = (T*)malloc(n * sizeof(T));
	return arr;
}
template<typename T>
T** mallocArray2D(int row, int col) {
	T** arr = (T**)malloc(row * sizeof(T*));
	arr[0] = (T*)malloc(row * col * sizeof(T));
	for (int i = 1; i < row; i++) {
		arr[i] = arr[i - 1] + col;
	}
	return arr;
}
template<typename T>
void freeArray(T* arr) {
	if(arr != NULL){
        free(arr);
	}
}
template<typename T>
void freeArray2D(T** arr) {
	if(arr != NULL){
        free(arr[0]);
        free(arr);
    }
}

class MyLR {
public:
	MyLR(string trainFile, string testFile, string predictOutFile);
	~MyLR();
	void train();
	void predict();
private:
	void loadTrainData();
	void init();
	void compute();
	void loadTestData();
	void storePredict();

private:
	float** trainDatas = NULL;
	float** testDatas = NULL;
	int* trainLabels = NULL;
	int* predictLabels = NULL;
	float* weights = NULL;
	int* batches = NULL;
	float* calcVal = NULL;
	string trainFile;
	string testFile;
	string predictOutFile;
private:
	int trainSimpleNum = 0;
	int testSimpleNum = 0;
	int featuresNum = 1000;
	const int batch_size = 128;
	const float wtInitV = 0.1;
	const float stepSize = 0.05;
	const int maxIterTimes = 1600;
	const float predictTrueThresh = 0.5;
};

MyLR::MyLR(string trainF, string testF, string predictOutF){
	trainFile = trainF;
	testFile = testF;
	predictOutFile = predictOutF;
	init();
}

MyLR::~MyLR() {
    freeArray2D(trainDatas);
    freeArray(trainLabels);
    freeArray2D(testDatas);
    freeArray(predictLabels);
    freeArray(weights);
    freeArray(batches);
    freeArray(calcVal);
}

void MyLR::loadTrainData(){
    int fd = open(trainFile.c_str(), O_RDONLY);
    int file_size = lseek(fd, 0, SEEK_END);
    char* p_map =(char*)mmap(NULL, file_size, PROT_READ, MAP_PRIVATE, fd, 0);
    close(fd);
    int i, j;
    float val = 0;
	int sign = 1;
	float step = 0.1;
    char* p = p_map + 6001;//magic number 6001
    while(true){
        if(*p == '\n'){
            trainSimpleNum++;
            if(*(p+1) =='\0') break;
            else p += 6001;//magic number 6001;
        }
        else p++;
    }
    trainDatas = mallocArray2D<float>(trainSimpleNum, featuresNum);
    trainLabels = mallocArray<int>(trainSimpleNum);
	p = p_map;
    for (i = 0; i < trainSimpleNum; i++) {
		for (j = 0; j < featuresNum; j++) {
			val = 0;
			sign = 1;
			if (*p == '-') { sign = -1, p++; }
			while (*p != '.')
			{
				val *= 10;
				val += (*p) - '0';
				p++;
			}
            p++;
			step = 0.1;
            while (*p !=',') {
                val += step * ((*p) - '0');
                step /= 10;
                p++;
            }
			trainDatas[i][j] = sign * val;
			p++;
		}
		trainLabels[i] = *(p++)-'0';
		p++;
	}
    munmap(p_map, file_size);
}

void MyLR::init() {
    loadTrainData();
	weights = mallocArray<float>(featuresNum);
	for (int i = 0; i < featuresNum; i++) {
		weights[i] = wtInitV;
	}
}

void MyLR::compute(){
    int i, j, n;
    float val;
    float grad;
    for(i=0; i<batch_size; i++){
        val = 0.0;
        for(j=0; j<featuresNum; j++){
            val += trainDatas[batches[i]][j] * weights[j];
        }
        calcVal[i] = 1 / (1 + exp(-1 * val));
    }
    for(i=0; i<featuresNum; i++){
        grad = 0.0;
        for(j=0; j<batch_size; j++){
            n = batches[j];
            grad += (calcVal[j] - trainLabels[n]) * trainDatas[n][i];
        }
        weights[i] -= stepSize * (grad/batch_size);
    }
}

void MyLR::train() {
	int i, j;
	batches = mallocArray<int>(batch_size);
	calcVal = mallocArray<float>(batch_size);
	int n=0;
	for(i=0; i<maxIterTimes; i++){
        for(j=0; j<batch_size; j++){
            batches[j] = n;
            n = (i*batch_size+j) % trainSimpleNum;
        }
		compute();
	}
}

void MyLR::predict() {
    int i, j;
	float predictVal;
	loadTestData();
	for (i = 0; i < testSimpleNum; i++) {
        predictVal = 0.0;
        for(j=0; j<featuresNum; j++){
            predictVal += testDatas[i][j] * weights[j];
        }
		predictLabels[i] = predictVal >= predictTrueThresh ? 1 : 0;
	}
	storePredict();
}

void MyLR::loadTestData() {
    int fd = open(testFile.c_str(), O_RDONLY);
    int file_size = lseek(fd, 0, SEEK_END);
    char* p_map =(char*)mmap(NULL, file_size, PROT_READ, MAP_PRIVATE, fd, 0);
    close(fd);
    int i, j;
    float val = 0;
	int sign = 1;
	float step = 0.1;
    char* p = p_map + 5999;//magic number 5999
    while(true){
        if(*p == '\n'){
            testSimpleNum++;
            if(*(p+1) =='\0') break;
            else p += 5999;//magic number 5999
        }
        else p++;
    }
    testDatas = mallocArray2D<float>(testSimpleNum, featuresNum);
    predictLabels = mallocArray<int>(testSimpleNum);
	p = p_map;
    for (i = 0; i < testSimpleNum; i++) {
		for (j = 0; j < featuresNum; j++) {
			val = 0;
			sign = 1;
			if (*p == '-') { sign = -1, p++; }
			while (*p != '.')
			{
				val *= 10;
				val += (*p) - '0';
				p++;
			}
            p++;
			step = 0.1;
            while (*p !=',' && *p !='\n') {
                val += step * ((*p) - '0');
                step /= 10;
                p++;
            }
			testDatas[i][j] = sign * val;
			p++;
		}
	}
    munmap(p_map, file_size);
}

void MyLR::storePredict() {
    ofstream fout(predictOutFile);
    char *ch = mallocArray<char>(2*testSimpleNum);
    for(int i=0; i<testSimpleNum; i++){
        ch[2*i] = '0' + predictLabels[i];
        ch[2*i+1] = '\n';
    }
    fout.write(ch, 2*testSimpleNum);
    fout.close();
    freeArray(ch);
}

int main(int argc, char* argv[]){
	string trainFile = "/data/train_data.txt";
    string testFile = "/data/test_data.txt";
    string predictFile = "/projects/student/result.txt";
    string answerFile = "/projects/student/answer.txt";

	MyLR logist(trainFile, testFile, predictFile);
	logist.train();
	logist.predict();
	return 0;
}