#include <iostream>
#include <vector>
#include <sstream>
#include <fstream>
#include <cmath>
#include <cstdlib>

#include <time.h>
#include<random>

using namespace std;

template<typename T>
T* mallocArray(int n) {
	T* arr = (T*)malloc(n * sizeof(T));
	return arr;
}
template<typename T>
T** mallocArray2D(int row, int col) {
	T** arr = (T**)malloc(row * sizeof(T*));
	arr[0] = (T*)malloc(row * col * sizeof(T));
	for (int i = 1; i < row; i++) {
		arr[i] = arr[i - 1] + col;
	}
	return arr;
}
template<typename T>
void freeArray(T* arr) {
	free(arr);
}
template<typename T>
void freeArray2D(T** arr) {
	free(arr[0]);
	free(arr);
}

class MyLR {
public:
	MyLR(string trainFile, string testFile, string predictOutFile);
	~MyLR();
	void train();
	void predict();
private:
	bool loadTrainData();
	void initParam();
	bool init();
	void forward(bool is_train);
	float lossCal();
	void gradientSlope();
	bool loadTestData();
	int storePredict();
	
private:
	float** trainDatas = NULL;
	float** testDatas = NULL;
	int* trainLabels = NULL;
	int *predictLabels = NULL;
	float* trainSigmoids = NULL;
	float* testSigmoids = NULL;
	float* weights = NULL;
	float* gradients = NULL;
	string trainFile;
	string testFile;
	string predictOutFile;
	string weightParamFile = "modelweight.txt";
private:
	int trainSimpleNum = 0;
	int testSimpleNum = 0;
	int featuresNum = 0;
	const float wtInitV = 1.0;
	const float stepSize = 0.05;
	const int maxIterTimes = 2500;
	const float predictTrueThresh = 0.5; 
	const int train_show_step = 10;
};

MyLR::MyLR(string trainF, string testF, string predictOutF)
{
	trainFile = trainF;
	testFile = testF;
	predictOutFile = predictOutF;
	init();
}

MyLR::~MyLR() {
	if (trainDatas != NULL) {
		freeArray2D<float>(trainDatas);
	}
	if (trainLabels != NULL) {
		freeArray<int>(trainLabels);
	}
	if (testDatas != NULL) {
		freeArray2D<float>(testDatas);
	}
	if (predictLabels != NULL) {
		freeArray<int>(predictLabels);
	}
	if (weights != NULL) {
		freeArray<float>(weights);
	}
	if (trainSigmoids != NULL) {
		freeArray<float>(trainSigmoids);
	}
	if (testSigmoids != NULL) {
		freeArray<float>(testSigmoids);
	}
	if (gradients != NULL) {
		freeArray<float>(gradients);
	}
}

bool MyLR::loadTrainData()
{
	ifstream infile(trainFile, ios::binary);
	if (!infile) {
		exit(0);
	}
	unsigned int size = static_cast<unsigned int>(infile.seekg(0, std::ios::end).tellg());
	char *buf = new char[size];
	infile.seekg(0, ios::beg).read(&buf[0], size);
	infile.close();
	char* p = buf;
	featuresNum = 0;
	trainSimpleNum = 0;
	while (*p != '\0') 
	{
		if ((trainSimpleNum == 0) && (*p == ',')) {
			featuresNum++;
		}
		else if(*p == '\n') {
			trainSimpleNum++;
		}
		p++;
	}
	trainLabels = mallocArray<int>(trainSimpleNum);
	trainSigmoids = mallocArray<float>(trainSimpleNum);
	trainDatas = mallocArray2D<float>(trainSimpleNum, featuresNum);
	float tmp = 0;
	float sign = 1.0;
	float step = 0.1;
	p = buf;
	for (int i = 0; i < trainSimpleNum; i++) {
		for (int j = 0; j < featuresNum; j++) {
			if (*p == '-') { sign = -1.0, p++; }
			while (*p != ',' && *p != '.')
			{
				tmp *= 10;
				tmp += (*p) - '0';
				p++;
			}
			if (*p == '.')
			{
				step = 0.1;
				p++;
				while (*p !=',') {
					tmp += step * ((*p) - '0');
					step /= 10;
					p++;
				}
			}
			trainDatas[i][j] = sign * tmp;
			tmp = 0;
			sign = 1.0;
			p++;
		}
		trainLabels[i] = *(p++)-'0';
		p++;
	}
	delete[] buf;
	return true;
}

void MyLR::initParam() {
	weights = mallocArray<float>(featuresNum);
	for (int i = 0; i < featuresNum; i++) {
		weights[i] = wtInitV;
	}
}

bool MyLR::init() {
	bool status = loadTrainData();
	if (status != true) {
		return false;
	}
	initParam();
	return true;
}

void MyLR::forward(bool is_train) {
	if (is_train) {
		float mulSum = 0.0L;
		for (int i = 0; i < trainSimpleNum; i++) {
			mulSum = 0.0L;
			for (int j = 0; j < featuresNum; j++) {
				mulSum += trainDatas[i][j] * weights[j];
			}
			trainSigmoids[i] = 1 / (1 + exp(-1 * mulSum));
		}
	}
	else {
		float mulSum = 0.0L;
		for (int i = 0; i < testSimpleNum; i++) {
			mulSum = 0.0L;
			for (int j = 0; j < featuresNum; j++) {
				mulSum += testDatas[i][j] * weights[j];
			}
			testSigmoids[i] = 1 / (1 + exp(-1 * mulSum));
		}
	}
	
}

float MyLR::lossCal() {
	float lossV = 0.0L;
	for (int i = 0; i < trainSimpleNum; i++) {
		lossV += pow(trainLabels[i] - trainSigmoids[i], 2);
	}
	lossV /= trainSimpleNum;
	return lossV;
}

void MyLR::gradientSlope() {
	float gsV;
	for (int j = 0; j < featuresNum; j++) {
		gsV = 0.0;
		for (int i = 0; i < trainSimpleNum; i++) {
			gsV += (trainSigmoids[i] - trainLabels[i]) * (trainDatas[i][j]);
		}
		gradients[j] = gsV / trainSimpleNum;
	}
	
}

void MyLR::train() {
	int i, j;
	gradients = mallocArray<float>(featuresNum);
	for (i = 0; i < maxIterTimes; i++) {
		forward(true);
		gradientSlope();
		for (j = 0; j < featuresNum; j++) {
			weights[j] -= stepSize * gradients[j];
		}
	}
}

void MyLR::predict() {
	int predictVal;

	loadTestData();
	forward(false);
	for (int i = 0; i < testSimpleNum; i++) {
		predictVal = testSigmoids[i] >= predictTrueThresh ? 1 : 0;
		predictLabels[i] = predictVal;
	}
	storePredict();
}

bool MyLR::loadTestData() {
	ifstream infile(testFile, ios::binary);
	if (!infile) {
		exit(0);
	}
	unsigned int size = static_cast<unsigned int>(infile.seekg(0, std::ios::end).tellg());
	char* buf = new char[size];
	infile.seekg(0, ios::beg).read(&buf[0], size);
	infile.close();
	char* p = buf;
	featuresNum = 0;
	testSimpleNum = 0;
	while (*p != '\0')
	{
		if ((testSimpleNum == 0) && (*p == ',')) {
			featuresNum++;
		}
		else if (*p == '\n') {
			testSimpleNum++;
		}
		p++;
	}
	featuresNum++;
	testSigmoids = mallocArray<float>(testSimpleNum);
	predictLabels = mallocArray<int>(testSimpleNum);
	testDatas = mallocArray2D<float>(testSimpleNum, featuresNum);
	
	float tmp = 0;
	float sign = 1.0;
	float step = 0.1;
	p = buf;
	for (int i = 0; i < testSimpleNum; i++) {
		for (int j = 0; j < featuresNum; j++) {
			if (*p == '-') { sign = -1.0, p++; }
			while (*p != ',' && *p != '.' && *p != '\n')
			{
				tmp *= 10;
				tmp += (*p) - '0';
				p++;
			}
			if (*p == '.')
			{
				step = 0.1;
				p++;
				while (*p != ',' && *p != '\n') {
					tmp += step * ((*p) - '0');
					step /= 10;
					p++;
				}
			}
			testDatas[i][j] = sign * tmp;
			tmp = 0;
			sign = 1.0;
			p++;
		}
	}
	delete[] buf;
	return true;
}

int MyLR::storePredict() {
	string line;
	int i;

	ofstream fout(predictOutFile);
	if (!fout.is_open()) {
		exit(0);
	}
	for (i = 0; i < testSimpleNum; i++) {
		fout << predictLabels[i] << endl;
	}
	fout.close();
	return 0;
}

int main(int argc, char* argv[])
{
	string trainFile = "/data/train_data.txt";
	string testFile = "/data/test_data.txt";
	string predictFile = "/projects/student/result.txt";
	string answerFile = "/projects/student/answer.txt";

	MyLR logist(trainFile, testFile, predictFile);
	logist.train();

	logist.predict();

	return 0;
}