import requests
import os
from datetime import datetime
import json
import gzip
import io
import numpy as np
import sqlite3
from bson import json_util
import params
import talib
proxies = {
    "http": "http://192.168.47.1:8580",
    "https": "http://192.168.47.1:8580",
}
def readDataWS(endtime=datetime.now().timestamp(),starttime=None):
    url="http://ds.jctytech.com/stock.php?u=test&symbol=FX{}&type=kline&et={}&line=min,1&sort=Date desc&num=200".format(params.Currency,endtime)
    if starttime:
        url+="&st="+str(starttime)
    res=requests.get(url)
    if res.status_code==200:
        data=res.json()
        datalist=[]
        # print(data)
        for line in reversed(data):
            datalist.append({
                "Date":line["Date"],
                "Open":float(line["Open"]),
                "High":float(line["High"]),
                "Low":float(line["Low"]),
                "Close":float(line["Close"]),
            })
        return datalist


def LoadAllData():
    conn = sqlite3.connect('data/history.db')
    tablename=params.Currency+"1M"
    cursor = conn.cursor()
    cursor.execute('create table if not exists {} (dt timestamp primary key, open float,high float ,low float ,close float )'.format(tablename))
    conn.commit()
    cursor.execute('select dt from {} order by dt desc limit 2'.format(tablename))
    all=cursor.fetchall()
    if len(all) >1:
        first=all[1]
        last2=int(datetime.now().timestamp())
        while last2>first[0]:
            datalist=readDataWS(last2,first[0])
            if datalist==None or len(datalist)==0:
                break
            datalist=[(int(one["Date"]),float(one["Open"]),float(one["High"]),float(one["Low"]),float(one["Close"])) for one in datalist]
            cursor.executemany('REPLACE INTO {} VALUES (?,?,?,?,?)'.format(tablename), datalist)
            conn.commit()
            last2=datalist[-1][0]
            print("new",len(datalist))
    try:
        while True:
            cursor.execute('select dt from {} order by dt limit 1'.format(tablename))
            last=cursor.fetchone()
            if last is None:
                last=[datetime.now().timestamp()]
            datalist=readDataWS(last[0])
            if datalist==None or len(datalist)==0:
                break
            datalist=[(int(one["Date"]),float(one["Open"]),float(one["High"]),float(one["Low"]),float(one["Close"])) for one in datalist]
            print("old",len(datalist))
            cursor.executemany('REPLACE INTO {} VALUES (?,?,?,?,?)'.format(tablename), datalist)
            conn.commit()
    except Exception as e:
        print(e)
    conn.close()

def gaussian(x, mu, sig):
    return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
def AddDI(datalist):
    cl=np.array([one["Close"] for one in datalist])
    hi=np.array([one["High"] for one in datalist])
    lo=np.array([one["Low"] for one in datalist])
    DI=(hi+lo+cl+cl)/4
    pr=21
    x=np.linspace(-pr/2, pr/2, pr)
    aveone=gaussian(x, 0, 3)
    aveone/=np.sum(aveone)
    #aveone=np.ones((pr,))/pr
    ma=np.convolve(DI,aveone,"same")
    for i in range(len(datalist)):
        datalist[i]["DI"]=ma[i]

def savedata(data,dbfile):
    try:
        os.remove(dbfile)
    except:
        pass
    with gzip.GzipFile(dbfile,"w") as f:
        ftxt=io.TextIOWrapper(f,"utf-8")
        json.dump(data,ftxt,default=json_util.default)
        ftxt.close()

def loaddate(dbfile):
    with gzip.GzipFile(dbfile,"r") as f:
        ftxt=io.TextIOWrapper(f,"utf-8")
        return json.load(ftxt,object_hook=json_util.object_hook)
        return json.load(f)
def fetchdata():
    datalist=readDataWS()
    AddDI(datalist)
    # print json.dumps(datalist,indent=2)
    print(len(datalist))
    savedata(datalist,"data/"+params.Coinnamezip)
def loadcsv(dbfile):
    datalist=[]
    with open(dbfile) as f:
        for l in f.readlines():
            if l:
                l=l.strip()
                d=l.split(",")
                datalist.append({
                    "Date":"{} {}".format(d[0],d[1]),
                    "Open":float(d[2]),
                    "High":float(d[3]),
                    "Low":float(d[4]),
                    "Close":float(d[5]),
                    "Volume":float(d[6]),
                })
    AddDI(datalist)
    print(len(datalist))
    return datalist

def LoadFxDb(dbname,lastn=None):
    import pymongo
    client = pymongo.MongoClient('mongodb://192.168.31.244:27017/')
    db=client.FX[dbname]
    datalist=[]
    res=db.find().sort("_id",1)
    if lastn is not None:
        res.skip(db.count() - lastn);
    for one in res:
        if(one["_id"]==0):
            continue
        datalist.append({
            "Date":datetime.fromtimestamp(one["_id"]).strftime('%m-%d %H:%M'),
            "Open":one["open"],
            "High":one["high"],
            "Low":one["low"],
            "Close":one["close"],
        })
    AddDI(datalist)
    print(len(datalist))
    return datalist
def ZipBar(datalists,m):
    sec=m*60
    startindex=0
    for i in range(len(datalists)):
        if datalists[i]["Date"]%sec==0:
            startindex=i
            break
    newdatalist=[]
    line=None
    for i in range(startindex,len(datalists)):
        one=datalists[i]
        if line is not None and one["Date"]-line["Date"]>=sec:
            newdatalist.append(line)
            line=None
        if line is None:
            line=one.copy()
        else:
            line["High"]=max(line["High"],one["High"])
            line["Low"]=min(line["Low"],one["Low"])
            line["Close"]=one["Close"]
            line["Volume"]+=one["Volume"]
    if line is not None:
        newdatalist.append(line)
    return newdatalist
if __name__ == "__main__":
    LoadDb("GOLD1")
    #print(loadcsv("data/GOLD1.csv"))