import datetime
import json
import queue
import time
from pytdx.hq import TdxHq_API
import threading
import dao.mongo
import security as gsl
import pandas as pd
from retry import retry
from pymongo.errors import AutoReconnect

data_queue = queue.Queue(100)

market_map = {'sz': 0, 'sh': 1}
finish = False
is_exit = False
queueLock = threading.Lock()
totalCount = 0
finished = 0

def unix():
    t = time.time()
    return int(round(t * 1000))


def save_history_transaction_data(api, market, code, date):
    limit = 2000
    offset = 0
    frames = []
    while True:
        data = api.get_history_transaction_data(market, code, offset, limit, date)
        if len(data) == 0:
            break
        offset += limit
        for d in data:
            d['date'] = date
        df = api.to_df(data)
        frames.insert(0, df)
        if len(data) < limit:
            break
    if frames:
        result = pd.concat(frames)
        result.reset_index(inplace=True)
        collection = 'data_%d_%s' % (market, code)
        data = {'coll': collection, 'value': json.loads(result.T.to_json()).values()}
        data_queue.put(data)


def save_all_history_transaction_data(api, start, end):
    begin_date = datetime.date(2022, 1, 1)
    end_date = datetime.date(2022, 12, 6)
    global finished
    for index in range(start, end):
        stock = list[index]
        market = market_map[stock['jys']]
        code = stock['dm']
        ti1 = unix()
        for i in range((end_date - begin_date).days + 1):
            day = begin_date + datetime.timedelta(days=i)
            date = int(str(day).replace('-', ''))
            save_history_transaction_data(api, market, code, date)
        ti2 = unix()
        queueLock.acquire()
        finished += 1
        queueLock.release()
        print('进度: [%d / %d],code: %s 耗时: %d ms' % (finished, totalCount, code, ti2 - ti1))


@retry(AutoReconnect, tries=4, delay=1)
def save_data_into_mongo():
    db = dao.mongo.get_history_transaction_data_db()
    while not finish or not data_queue.empty():
        data = data_queue.get(block=True)
        if data:
            collection = db[data['coll']]
            collection.insert_many(data['value'])
    print('finish save_data_into_mongo...')
    global is_exit
    is_exit = True


class GetDataThread(threading.Thread):
    def __init__(self, api, left, right):
        threading.Thread.__init__(self)
        self.api = api
        self.left = left
        self.right = right

    def run(self) -> None:
        save_all_history_transaction_data(self.api, self.left, self.right)


class SaveMongoThread(threading.Thread):
    def run(self) -> None:
        save_data_into_mongo()


if __name__ == '__main__':
    list = gsl.load_stock_list()
    totalCount = len(list)
    limit = 1000
    ips = ['110.41.147.114', '8.129.13.54', '120.24.149.49', '8.129.174.169', '110.41.154.219']
    threads = []
    apis = []
    for i in range(len(ips)):
        ip = ips[i]
        api = TdxHq_API(raise_exception=False)
        api.connect(ip, 7709)
        apis.append(api)
        start = i * limit
        end = start + limit
        if i == len(ips)-1:
            end = totalCount
        t = GetDataThread(api, start, end)
        threads.append(t)
    th = SaveMongoThread()
    for t in threads:
        t.start()
    th.start()
    for t in threads:
        t.join()
    finish = True
    th.join()
    for api in apis:
        api.disconnect()

