#!/usr/bin/env python3

"""
Logistic Regression

v = sigmoid(z)
z = w0*x0 + w1*x1 + ... + wn*xn
Given a set of data (X, v), where X is (x0, x1, ..., xn)
Find the (w0, w1, ..., wn) that best fit the data.

The method used to find the best W here is called gradient ascent.
"""

import math
import random
from matplotlib import pyplot as plt
import numpy as np

def sigmoid(x):
    return 1/(1+math.exp(-x))

# this is the key function
def gradient_ascent(dataMatIn, classLables, f=sigmoid):
    """
    gradient ascent using sigmoid(z)*2-1 as f
    this is because lables take -1 or 1, instead of 0 or 1
    if lables take 0 or 1, we could use sigmoid as f directly
    """
    dataMatrix = np.mat(dataMatIn)
    lableMat = np.mat(classLables)
    alpha = 0.001
    maxCycles = 500
    m,n = np.shape(dataMatrix)
    weights = np.ones((n,1))
    for k in range(maxCycles):
        # f = sigmoid(x) * 2 -1
        h = np.mat(np.apply_along_axis(f, 1, dataMatrix*weights))
        error = lableMat - np.mat(h)
        weights = weights + alpha * dataMatrix.transpose() * error.transpose()
    return weights

# this is function that is suitable for large data
def stochastic_gradient_ascent(dataMatIn, classLables, f=sigmoid, weights_init=None):
    """
    stotchastic gradient ascent

    dataMatIn: list of lists
    classLables: list
    f: function to use for gradient ascent
    weights_init: initial weights to use
    """
    dataArray = np.array(dataMatIn)
    m,n = np.shape(dataArray)
    alpha = 0.01
    # the strange comparision is due to the fact that weights is one axis numpy array
    if not type(weights_init) == type(None):
        weights = weights_init
    else:
        weights = np.ones(n)
    indexArray = list(range(m))
    for i in range(m):
        randidx = int(random.uniform(0, len(indexArray)))
        h = f(sum(dataArray[randidx]*weights))
        error = classLables[randidx] - h
        weights = weights + alpha * error * dataArray[randidx]
        del(indexArray[randidx])
    return weights


if __name__ == '__main__':

    def plot_xy(yFunc, xSamples):
        """
        plot two dimentional x-y graph
        yFunc: function accept one param and return one value
        xRange: a list containing x samples
        """
        ySamples = [yFunc(x) for x in xSamples]
        plt.plot(xSamples, ySamples)
        plt.show()

    def load_testdata():
        "load testdata/testSet.txt file"
        dataMat = []
        lableMat = []
        with open('testdata/testSet.txt', 'r') as f:
            for line in f.readlines():
                x1, x2, v = line.strip().split()
                dataMat.append([1, float(x1), float(x2)]) # x0=1
                lableMat.append(int(v))
        return dataMat, lableMat
            
    def plot_data_lables(datas, lables, classifier_line=None):
        """
        datas: list of [1, x1, x2]
        lables: list of 1 or -1
        """
        if not len(datas) == len(lables):
            raise Exception("datas and lables do not have the same number of elements")

        data_red = []
        data_blue = []
        for idx in range(0, len(lables)):
            if lables[idx] == 1:
                data_red.append(datas[idx])
            elif lables[idx] == -1:
                data_blue.append(datas[idx])
            else:
                raise Exception("unexpected value of lable %s" % lables[idx])

        plt.plot([e[1] for e in data_red], [e[2] for e in data_red], 'ro',
                 [e[1] for e in data_blue], [e[2] for e in data_blue], 'bo')
        if classifier_line:
            plt.plot([e[1] for e in classifier_line], [e[2] for e in classifier_line], 'r--')
        plt.show()

    def get_classifier_line(weights, datas):
        """
        return list of [1,x1,x2] where each X*weights = 0
        """
        ret_line = []
        x1_min = datas[0][1]
        x1_max = datas[0][1]
        for data in datas:
            x0, x1, x2 = data
            if x1 < x1_min:
                x1_min = x1
            if x1 > x1_max:
                x1_max = x1
        x1_min -= 1
        x1_max += 1
        w0, w1, w2 = list(weights.flat)
        print(w0, w1, w2)
        for x1 in np.arange(x1_min, x1_max, 0.1):
            # 1*w0 + x1*w1 + x2*w2 = 0
            # x2 = (-w0 - x1*w1) / w2
            x2 = (-w0 -x1*w1) / w2
            ret_line.append([1, x1, x2])
        return ret_line

    #plot_xy(sigmoid, np.arange(-20, 20, 0.1))
    datas, lables = load_testdata()
    #print(datas, lables)
    #weights = gradient_ascent(datas, lables, lambda x: sigmoid(x)*2 -1) # a 3*1 vector
    weights = None
    for i in range(0, 15):
        weights = stochastic_gradient_ascent(datas, lables, f=lambda x: sigmoid(x) * 2 -1, weights_init=weights)
    classfier_line = get_classifier_line(weights, datas)
    plot_data_lables(datas, lables, classfier_line)
