import torch
from torch import nn
import torch.utils.data as Data
import params
import log
import context
import math
import numpy as np
import random
import talib
import io
import pymongo
import sys

def gaussian(x, mu, sig):
    return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
def AddDI(datalist):
    cl=np.array([one["Close"] for one in datalist])
    hi=np.array([one["High"] for one in datalist])
    lo=np.array([one["Low"] for one in datalist])
    DI=(hi+lo+cl+cl)/4
    pr=21
    x=np.linspace(-pr/2, pr/2, pr)
    aveone=gaussian(x, 0, 3)
    aveone/=np.sum(aveone)
    #aveone=np.ones((pr,))/pr
    ma=np.convolve(DI,aveone,"same")
    for i in range(len(datalist)):
        datalist[i]["DI"]=ma[i]

def LoadDb(dbname,lastn=None):
    client = pymongo.MongoClient('mongodb://192.168.31.145:27017/')
    db=client.earecod[dbname]
    datalist=[]
    res=db.find().sort("_id",1)
    if lastn is not None:
        res.skip(db.count() - lastn);
    for one in res:
        if(one["_id"]==0):
            continue
        datalist.append({
            "Date":one["_id"],
            "Open":one["o"],
            "High":one["h"],
            "Low":one["l"],
            "Close":one["c"],
            "Volume":float(one["v"]),
        })
    AddDI(datalist)
    log.msg("data count:{}".format(len(datalist)))
    return datalist

def GetMAStack(datalist):
    if len(datalist)<60:
        raise Exception("datalist len >=60 !")
    Cl=np.array([one["Close"] for one in datalist])
    Hi,Lo=np.array([one["High"] for one in datalist]),np.array([one["Low"] for one in datalist])
    #Vol=np.array([one["Volume"] for one in datalist])
    return GetIndexStack(Hi,Lo,Cl,None)
def GetIndexStack(Hi,Lo,Cl,Vol):
    stack=[Cl,Hi,Lo]
    #stack=[]
    WMAMAX = talib.EMA(Cl,params.malist[-1])
    for one in params.malist[:-1]:
        WMA = talib.EMA(Cl,one)
        stack.append(WMA)
    stack=[(one-WMAMAX)/WMAMAX for one in stack]
    MA5=talib.EMA(Cl,5)
    stack.append((talib.SAR(Hi,Lo,0.02,0.2)-MA5)/MA5)
    stack.extend((
        talib.ADX(Hi,Lo,Cl),
        talib.MINUS_DI(Hi,Lo,Cl),
        talib.PLUS_DI(Hi,Lo,Cl),
        talib.DX(Hi,Lo,Cl),
        talib.PLUS_DM(Hi,Lo),
        talib.MINUS_DM(Hi,Lo),
        talib.CCI(Hi,Lo,Cl)
    ))
    stacked=np.stack(stack)

    return stacked

class RNN(nn.Module):
    def __init__(self,inputsize):
        super(RNN, self).__init__()
        self.rnn = nn.LSTM(
            input_size=inputsize,
            hidden_size=params.LSTM_HIDEN_SIZE,
            num_layers=1,
            batch_first=True,
            bidirectional=True
        )
        self.out = nn.Sequential(
            nn.Linear(params.LSTM_HIDEN_SIZE*2, 5),
            nn.BatchNorm1d(5),
            nn.Sigmoid(),
            nn.Linear(5, 3),
            nn.Softmax(1),
        )

    def forward(self, x):
        r_out, h= self.rnn(x, None)
        out = self.out(r_out[:, -1, :])
        return out

LR = 0.001

def fetchTranlist(datalist,nosign=0):
    srclist=[]
    reslist=[]
    dlist2=[]
    mastack=GetMAStack(datalist).T
    for i in range(len(datalist)):
        one=datalist[i]
        traindata=mastack[i,:]
        if np.any(np.isnan(traindata))==False:
            one["acode"]=traindata
            dlist2.append(one)

    for i in range(params.LSTM_TIME_STEPS - 1, len(dlist2)):
        onesap=[]
        one=dlist2[i]
        if "acode" not in one:
            continue
        for j in range(i - params.LSTM_TIME_STEPS + 1, i + 1):
            onesap.append(dlist2[j]["acode"])
        if "tran_mark" in one:
            srclist.append(onesap)
            reslist.append(one["tran_mark"])
        elif random.random()<=nosign:
            srclist.append(onesap)
            reslist.append(0)
    return srclist,reslist
def findTranPoint(datalist,maxpoint=6,span=60,mindis=0):
    for one in datalist:
        if "tran_mark" in one:
            del one["tran_mark"]
    dl2=[]
    for one in datalist:
        if "DI" in one:
            if math.isnan(one["DI"])==False:
                dl2.append(one)
        else:
            break
    datalist=dl2
    ruleout=int(span/5*4)
    for i in range(len(datalist)-span):
        testdata=datalist[i:i+span]
        indata=testdata[:-ruleout]
        testdata=sorted(testdata,key=lambda a:a["DI"])
        if testdata[-1]["DI"]-testdata[0]["DI"]<mindis:
            continue
        for tp in testdata[:maxpoint]:
            if tp in indata:
                tp["tran_mark"]=1
        for tp in testdata[-maxpoint:]:
            if tp in indata:
                tp["tran_mark"]=2
    return datalist[:-ruleout]
def splitTrainTest(inputs,outputs,trainpick=0.7):
    if trainpick<0:
        trainpick=1+(trainpick/len(inputs))
    train_output = []
    test_output = []
    train_input = []
    test_input = []
    for i in range(len(inputs)):
        if random.random()<trainpick:
            train_input.append(inputs[i])
            train_output.append(outputs[i])
        else:
            test_input.append(inputs[i])
            test_output.append(outputs[i])
    return train_input,train_output,test_input,test_output
use_gpu = False #torch.cuda.is_available()
def main(datalists):
    if params.Test_Reserved>0:
        datalists=datalists[:-params.Test_Reserved]
    if params.Train_Count>0:
        datalists=datalists[-params.Train_Count:]

    datalists=findTranPoint(datalists[10:-10],1,90)
    srclist,reslist=fetchTranlist(datalists,1)

    classcount=[0,0,0]
    for one in reslist:
        classcount[one]+=1
    classcount_g=classcount[0]*classcount[1]*classcount[2]
    for i in range(len(classcount)):
        classcount[i]=classcount_g/classcount[i]
    classcount_s=sum(classcount)
    for i in range(len(classcount)):
        classcount[i]=classcount[i]/classcount_s
    log.msg("weight:{}".format(classcount))
    classcount=torch.FloatTensor(classcount)
    if use_gpu:
        classcount=classcount.cuda()

    train_input,train_output,test_input,test_output=splitTrainTest(srclist,reslist,-2000)
    #x= torch.stack(train_input).detach()
    x=torch.from_numpy(np.array(train_input)).float()
    y=torch.LongTensor(train_output)
    #tx=torch.stack(test_input).detach()
    tx=torch.from_numpy(np.array(test_input)).float()
    ty=torch.LongTensor(test_output)
    train_input=train_output=test_input=test_output=None
    srclist=reslist=None
    datalists=None
    log.msg('train shape:{},{}'.format(x.shape,y.shape))

    _rnn = RNN(x.shape[2])
    if use_gpu:
        _rnn.cuda()
    optimizer = torch.optim.Adam(_rnn.parameters(), lr=0.0005)
    loss_func = nn.NLLLoss(weight=classcount)

    # training and testing
    epoch=0

    b_tx=tx
    b_ty=ty
    if use_gpu:
        b_tx=b_tx.cuda()
        b_ty=b_ty.cuda()

    torch_dataset = Data.TensorDataset(x,y)

    # 把 dataset 放入 DataLoader
    loader = Data.DataLoader(
        dataset=torch_dataset,
        batch_size=10000,
        shuffle=True,
        num_workers=0,
    )

    savedFile=None
    oldloss=1e5
    while True:
        _rnn.train()
        for step, (b_x, b_y) in enumerate(loader):
            if use_gpu:
                b_x=b_x.cuda()
                b_y=b_y.cuda()
            output = _rnn(b_x)
            output=torch.log(output)
            loss = loss_func(output, b_y)
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

        _rnn.eval()
        output2=_rnn(b_tx)
        output2=torch.log(output2)
        loss = loss_func(output2, b_ty)
        tres=loss.item()
        if oldloss>tres:
            oldloss=tres
            epoch=0
            _rnn.cpu()
            savedFile=io.BytesIO()
            torch.save(_rnn, savedFile)
            if use_gpu:
                _rnn.cuda()
        else:
            epoch+=1
        log.msg('Epoch:{},Loss={}'.format(epoch,tres))
        if epoch>100:
            context.savedata(params.RNNmodename,savedFile.getbuffer())
            log.res("save to {}".format(params.RNNmodename))
            return

if __name__ == "__main__":
    docindex=sys.argv[1]
    data_time=sys.argv[2]
    count=int(sys.argv[3])
    datalists=LoadDb(data_time,count)
    #autocode_torch.main(datalists)
    main(datalists)