#! -*- coding:utf-8 -*-

import math
import numpy as np
from numpy import *
import matplotlib.pyplot as plt
import sys, os, time, uuid, re, codecs
import urllib2
import chardet
from bs4 import BeautifulSoup
from pymongo import MongoClient
import cPickle

data = []


def gethtml(url):
    # headers = {'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'}
    # req = urllib2.Request(url=url, headers=headers)
    # data = urllib2.urlopen(req).read()
    content = urllib2.urlopen(url).read()
    typeEncode = sys.getfilesystemencoding()  ##系统默认编码
    print chardet.detect(content)
    infoencode = chardet.detect(content).get('encoding', 'utf-8')  ##通过第3方模块来自动提取网页的编码
    html = content  # content.decode(infoencode, 'ignore').encode("utf-8")
    # content =urllib2.urlopen(url).read().decode("gb2312").encode("utf-8") #decode("UTF-8").encode(systype) #data #.decode("UTF-8") #.encode(systype)
    # print html
    return html


# 下载数据并保存到本地
def download(pz):
    url = "http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesDailyKLine?symbol="+pz
    # http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesDailyKLine?symbol=M0
    # 一般截止到前一日(json格式):日期,开盘，最高，最低，收盘。成交量
    datastr = gethtml(url)
    data_list = list(datastr)
    datalenth = len(data_list)
    print "len:", datalenth
    write_file = open('./data/'+pz+'.pkl', 'wb')
    cPickle.dump(datastr, write_file, -1)
    write_file.close()
    return

def get60minutes():
    url = "http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesMiniKLine60m?symbol=RU0"
    # http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesDailyKLine?symbol=M0
    # 一般截止到前一日(json格式):日期,开盘，最高，最低，收盘。成交量
    datastr = gethtml(url)
    data_list = list(datastr)
    datalenth = len(data_list)
    print "len:", datalenth
    write_file = open('./data/' + 'RU60t' + '.pkl', 'wb')
    cPickle.dump(datastr, write_file, -1)
    write_file.close()

def getRU():
    read_file = open('./data/RU0.pkl', 'rb')
    data = cPickle.load(read_file)
    # label = cPickle.load(read_file)
    read_file.close()
    # http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesDailyKLine?symbol=M0
    # 一般截止到前一日(json格式):日期,开盘，最高，最低，收盘。成交量
    data = eval(data)
    return data

def getData(pz):
    read_file = open('./data/'+pz+'.pkl', 'rb')
    data = cPickle.load(read_file)
    # label = cPickle.load(read_file)
    read_file.close()
    # http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesDailyKLine?symbol=M0
    # 一般截止到前一日(json格式):日期,开盘，最高，最低，收盘。成交量
    data = eval(data)
    return data


def showAll():
    kinds=['RU0'] #,'CU0','AU0']
    plt.style.use('ggplot')
    plt.title(u'show report')
    plt.ylabel('Price Value')
    plt.xlabel('Time')
    for kind in kinds:
        data=getData(kind)
        mat = np.matrix(data)
        # print data[:,2]
        if kind == 'AU0':
            opens = [float(inner[2])*100 for inner in data]
        else:
            opens = [float(inner[2]) for inner in data]
        closes = [float(inner[3]) for inner in data]
        opens = np.array(opens)
        #kv = []
        #closes = np.array(closes)
        #for i in range(len(opens) - 2):
        #    kv.append(opens[i ] - data[:,2])
        #print "len of kv:", len(kv)
        #print "type", type(mat[2, 2]), type(opens[2])

        #opens = opens - np.min(opens)
        plt.plot(opens[-60:-1])
        #str = raw_input("Enter your input: ");
    plt.tight_layout()
    plt.show()


def showRU():
    data = getRU()
    mat = np.matrix(data)
    #print data[:,2]
    opens=[float(inner[2]) for inner in data]
    hh = [float(inner[3]) for inner in data] #最高价
    ll = [float(inner[4]) for inner in data]
    closes = [float(inner[5]) for inner in data] #收盘价
    vols = [float(inner[6]) for inner in data] #成交量
    opens = np.array(opens)
    kv=[]
    closes = np.array(closes)
    for i in range(len(closes)-2):
        kv.append(closes[i+1] -closes[i])
    print "len of kv:", len(kv)
    print "type", type(mat[2, 2]),type(closes[2])
    # data = [word for word in data]
    # print "data2 shape:",np.shape(data)
    '''
        plt.style.use('ggplot')
        ax1 = plt.subplot2grid((2, 2), (0, 0))
        ax1.set_title('Accuracy')
        ax1.set_ylabel('Validation Accuracy')
        ax1.set_xlabel('Epochs')
        ax2 = plt.subplot2grid((2, 2), (1, 0))
        ax2.set_title('Loss')
        ax2.set_ylabel('Validation Loss')
        ax2.set_xlabel('Epochs')
        ax3 = plt.subplot2grid((2, 2), (0, 1), rowspan=2)
        ax3.plot(data[:,2])
        ax3.plot(data[:, 3])
    '''
    plt.style.use('ggplot')
    plt.title(u'RU')
    plt.ylabel('Price Value')
    plt.xlabel('Time')
    plt.plot(closes[-200:-1])
    #plt.plot(closes-opens)
    plt.tight_layout()
    plt.show()


#showRU()
'''
download("J0")
download("RB0")
download("CU0")
download("AU0")
'''


#安装5/10/20/30日平均移动线的趋势判定操作方向：
#多头排列的，如果后5个交易日收盘价都在之上并且最高点有5%的价差，则买入
#趋势转平，则平仓
#反向趋势，则卖出，平仓条件相同。
def prepareData():
    data = getRU()
    mat = np.matrix(data)
    #print data[:,2]
    opens=[float(inner[2]) for inner in data]
    hh = [float(inner[3]) for inner in data] #最高价
    ll = [float(inner[4]) for inner in data]
    closes = [float(inner[5]) for inner in data] #收盘价
    vols = [float(inner[6]) for inner in data] #成交量
    opens = np.array(opens)
    kv=[]
    closes = np.array(closes)
    for i in range(600):
        kv.append(closes[i+1] -closes[i])
    print "len of kv:", len(kv)
    print "type", type(mat[2, 2]),type(closes[2])

#获取某种类的日线数据：日期,开盘，最高，最低，收盘。成交量
def getPrices(kind):
    read_file = open('./data/'+kind+'.pkl', 'rb')
    data = cPickle.load(read_file)
    # label = cPickle.load(read_file)
    read_file.close()
    # http://stock2.finance.sina.com.cn/futures/api/json.php/IndexService.getInnerFuturesDailyKLine?symbol=M0
    # 一般截止到前一日(json格式):日期,开盘，最高，最低，收盘。成交量
    data = eval(data)
    #mat = np.matrix(data)
    data = [[inner[0], float(inner[1]), float(inner[2]), float(inner[3]), float(inner[4])] for inner in   data]  #:日期,开盘，最高，最低，收盘。成交量
    return data

#5/10/20/40日均线
def getMA(mat,len,index):
    #data=[[mat[index+i,4]] for i in range(5)]
    data=[]
    for i in range(len):
        data.append(mat[index][4])
    start=0
    if index<len:
        start=len-index
    for i in range(start,len):
        data[i]=mat[index-i][4]
    data=np.matrix(data)
    print data
    return np.mean(data)


def showFigure(data,len):
    cls = []
    ma5 = []
    ma8 = []
    ma14 = []
    ma20 = []
    for i in range(len - 10):
        cls.append(data[i][4])
        ma5.append(getMA(data, 5, i))
        ma8.append(getMA(data, 8, i))
        ma14.append(getMA(data, 14, i))
        ma20.append(getMA(data, 20, i))
    print ma5
    plt.style.use('ggplot')
    plt.title(u'RU')
    plt.ylabel('Price Value')
    plt.xlabel('Time')
    start = 0
    len = len - 10
    plt.plot(cls[start:len], 'r')
    plt.plot(ma5[start:len], 'b', label='ma5')
    plt.plot(ma8[start:len], 'y', label='ma10')
    plt.plot(ma14[start:len], 'g')
    plt.plot(ma20[start:len], 'k')

    # plt.plot(closes-opens)
    plt.tight_layout()
    plt.show()

#get60minutes()
#showAll()
# data =getPrices('RU0')

#data =getPrices('RU0')
#showFigure(data,len(data))

data = getPrices('RU60t')
data.reverse()
showFigure(data,len(data))


from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD, RMSprop
from keras.layers.recurrent import LSTM
from keras.utils import np_utils
from keras.datasets import mnist


def l2_normalizer(vec):
    denom = np.sum([el ** 2 for el in vec])
    return [(el / math.sqrt(denom)) for el in vec]


def getdata():
    # tf_vector = [tf(word, doc) for word in vocabulary]
    # doc_term_matrix_l2 = []
    # for vec in doc_term_matrix:
    #    doc_term_matrix_l2.append(l2_normalizer(vec))
    # print np.matrix(doc_term_matrix)
    a = array([20, 30, 40, 50])
    b = linspace(0, pi, 3)
    a = arange(15).reshape(3, 5)
    np.zeros((3, 4))
    # label = np_utils.to_categorical(label, numClass)
    return (X_train, y_train)


def getmodel():
    print "build model..."
    # build the model: a single LSTM
    print('Build model...')
    maxlen = 40
    dims = 4
    model = Sequential()
    model.add(LSTM(128, input_shape=(maxlen, dims)))
    model.add(Dense(4))
    # model.add(LSTM(64))
    # model.add(Dense(4))
    model.add(Activation('softmax'))
    optimizer = RMSprop(lr=0.01)
    model.compile(loss='categorical_crossentropy', optimizer=optimizer)
    '''
    for mode, result in zip(modes, results):
        ax1.plot(result[0].epoch, result[0].history['val_acc'], label=mode)
        ax2.plot(result[0].epoch, result[0].history['val_loss'], label=mode)
    # not used anyway
    # for a single-input model with 2 classes (binary):
    model = Sequential()
    model.add(Dense(1, input_dim=784, activation='sigmoid'))
    model.compile(optimizer='rmsprop',
                  loss='binary_crossentropy',
                  metrics=['accuracy'])
    '''
    return model


def showmodel(model):
    from keras.utils.visualize_util import plot
    plot(model, to_file='model.png')


def main():
    model = getmodel()
    batch_size = 128
    epochs = 30
    # (X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features)
    (X_train, y_train) = getdata()
    start_time = time.time()
    history = model.fit(X_train, y_train,
                        batch_size=batch_size,
                        nb_epoch=epochs,
                        validation_data=(X_test, y_test))
    average_time_per_epoch = (time.time() - start_time) / epochs
    '''
    # generate dummy data
    data = np.random.random((1000, 784))
    labels = np.random.randint(2, size=(1000, 1))

    # train the model, iterating on the data in batches
    # of 32 samples
    model.fit(data, labels, nb_epoch=10, batch_size=32)
    score = model.evaluate(data, labels, batch_size=32)
    print "score:", score
    # print model.get_weights()
    print model.predict(data[:1])
    '''
    print "end model..."
