# coding: utf-8
from multiprocessing import Process
import shutil
import pandas as pd
import pytz
from datetime import datetime
import pymongo
from time import time
import os
import logging
import timeUtils_4from_mongo as timeUtils

"""
把历史行情数据导入到mongodb
"""
logger = logging.basicConfig(level=logging.INFO)
t0 = time()
tz = pytz.timezone(pytz.country_timezones('cn')[0])
path = "/home/hanyu/tonglian/oneMinData/futures/allfutures_2007-01-01_to_2015-11-17/"
path2 = "/home/hanyu/tonglian/oneMinData/futures/allfutures_2007-01-01_to_2015-11-17/"
pathWin = "D:\\wks_python\\allfutures_data\\OMD_2016-12-01_to_2017-01-07\\"  # allfutures_2015-11-17_to_2015-12-2
pwd = os.getcwd()
filelist = os.listdir(pathWin)
column_names = {'ticker': 'symbol', 'closePrice': 'close', 'openPrice': 'open', 'highPrice': 'high', 'lowPrice': 'low',
                'totalVolume': 'volume'}

client = pymongo.MongoClient()  # '121.40.212.219', 27017
db = client.future_data
# fi = open(pwd + '/log_min_data.txt', 'a')


def resample_data(filelist):
    for filename in filelist:
        logging.info(filename)
        # fi.write(filename + '\n')
        df = pd.read_csv(pathWin + filename)
        symbol = filename.split(".")[0]
        # alpha = filter(str.isalpha, symbol)
        # num = filter(str.isdigit, symbol)
        # if len(num) < 4:
        #     if int(num[0]) > 7:
        #         num = '0' + num
        #     elif int(num[0]) < 7:
        #         num = '1' + num
        # symbol = alpha + num

        if len(df.index) > 1:
            df = df[df['barTime'] != '99:99']

            df.rename(columns=column_names, inplace=True)
            df = df[df['close'] != 0]
            df = df[df['low'] > 5]
            df['dateTime'] = df['dataDate'] + ' ' + df['barTime'] + ":00"
            tz = pytz.timezone(pytz.country_timezones('cn')[0])
            df['dateTime'] = df['dateTime'].apply(lambda x: tz.localize(datetime.strptime(x, '%Y-%m-%d %H:%M:%S')))
            df['symbol'] = symbol
            if len(df.index) > 1:
                db.one_min_data.insert_many(df.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------one min done-------  " + symbol + "--- is null ---")
            logging.info("-------one min done-------")
            df_m30 = df.copy()
            df_m60 = df.copy()
            df_m120 = df.copy()
            df_day = df.copy()
            df = df.set_index('dateTime')

            # ---------------write in 5 minute data----------------
            df_m5 = pd.DataFrame([])
            df_m5['open'] = df['open'].resample('5min', how='first', closed='right', label='left').dropna()
            df_m5['close'] = df['close'].resample('5min', how='last', closed='right', label='left').dropna()
            df_m5['high'] = df['high'].resample('5min', how='max', closed='right', label='left').dropna()
            df_m5['low'] = df['low'].resample('5min', how='min', closed='right', label='left').dropna()
            # df_m5['dateTime'] = df['dateTime'].resample('5min', how='last', closed='right', label='right').dropna()
            df_m5['totalValue'] = df['totalValue'].resample('5min', how='sum', closed='right', label='left').dropna()
            df_m5['volume'] = df['volume'].resample('5min', how='sum', closed='right', label='left').dropna()
            df_m5['openInterest'] = df['openInterest'].resample('5min', how='last', closed='right', label='left').dropna()
            df_m5['symbol'] = symbol
            if len(df_m5.index) > 1:
                db.min_5_data.insert_many(df_m5.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------5 min done-------  " + symbol + "--- is null ---")
            logging.info("-------5 min done-------")
            # ---------------write in 15 minute data----------------
            df_m15 = pd.DataFrame([])
            df_m15['open'] = df_m5['open'].resample('15min', how='first', closed='right', label='right').dropna()
            df_m15['close'] = df_m5['close'].resample('15min', how='last', closed='right', label='right').dropna()
            df_m15['high'] = df_m5['high'].resample('15min', how='max', closed='right', label='right').dropna()
            df_m15['low'] = df_m5['low'].resample('15min', how='min', closed='right', label='right').dropna()
            df_m15['totalValue'] = df_m5['totalValue'].resample('15min', how='sum', closed='right', label='right').dropna()
            df_m15['volume'] = df_m5['volume'].resample('15min', how='sum', closed='right', label='right').dropna()
            df_m15['openInterest'] = df_m5['openInterest'].resample('15min', how='last', closed='right',
                                                                    label='left').dropna()
            df_m15['symbol'] = symbol
            if len(df_m15.index) > 1:
                db.min_15_data.insert_many(df_m15.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------15 min done-------  " + symbol + "--- is null ---")
            logging.info("-------15 min done-------")
            # ---------------write in 30 minute data----------------
            df_m30['dateTime'] = timeUtils.Minute30New(df_m30['dateTime'], symbol)
            df_m30['symbol'] = symbol
            grouped30 = df_m30.groupby('dateTime')
            functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                         'totalValue': 'sum',
                         'openInterest': 'last'}
            rsDF30 = grouped30.agg(functions)
            rsDF30['symbol'] = symbol
            if len(rsDF30.index) > 1:
                db.min_30_data.insert_many(rsDF30.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------30 min done-------  " + symbol + "--- is null ---")
            logging.info("-------30 min done-------")
            # ---------------write in 60 minute data----------------
            df_m60['dateTime'] = timeUtils.Minute60New(df_m60['dateTime'], symbol)
            df_m60['symbol'] = symbol
            grouped = df_m60.groupby('dateTime')
            functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                         'totalValue': 'sum',
                         'openInterest': 'last'}
            rsDF60 = grouped.agg(functions)
            rsDF60['symbol'] = symbol
            if len(rsDF60.index) > 1:
                db.min_60_data.insert_many(rsDF60.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------60 min done-------  " + symbol + "--- is null ---")
            logging.info("-------60 min done-------")
            # ---------------write in 120 minute data----------------
            df_m120['dateTime'] = timeUtils.Minute120New(df_m120['dateTime'], symbol)
            df_m120['symbol'] = symbol
            grouped120 = df_m120.groupby('dateTime')
            functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                         'totalValue': 'sum',
                         'openInterest': 'last'}
            rsDF120 = grouped120.agg(functions)
            rsDF120['symbol'] = symbol
            if len(rsDF120.index) > 1:
                db.min_120_data.insert_many(rsDF120.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------120 min done-------  " + symbol + "--- is null ---")
            logging.info("-------120 min done-------")
            # ---------------write in day data----------------
            df_day['dateTime'] = timeUtils.Day(df_day['dateTime'])
            df_day['symbol'] = symbol
            grouped_day = df_day.groupby('dateTime')
            functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                         'totalValue': 'sum',
                         'openInterest': 'last'}
            rsDF_day = grouped_day.agg(functions)
            rsDF_day['symbol'] = symbol
            if len(rsDF_day.index) >= 1:
                db.day.insert_many(rsDF_day.reset_index().to_dict(orient='records'))
            else:
                logging.info("-------day done-------  " + symbol + "--- is null ---")
            logging.info("-------day done-------")
        else:
            print(symbol)

# gtm = df['open'].resample('dateTime', how='first')

# if the first row of pChange is NaN, then we ignore the first row
#     d = tz.localize(datetime(2015, 9, 30, 0, 0))
#     if math.isnan(df.pChange.iloc[0]) and df.dateTime.iloc[0] == d:
#         if df[1:].empty:
#             pass
#         else:
#             db.original_prices.insert_many(df[1:].to_dict(orient='records'))
#     else:
#         db.original_prices.insert_many(df.to_dict(orient='records'))
#     fi.write(filename + '\n')
#
# fi.close()
# print('finished in %.2fs' % (time() - t0))
def launch_multi_thread(filelist):
    threads = []
    size = 30
    len = len(filelist)
    gap = filelist/size + 1
    for i in range(0, size):  # 0 - (ECS-1)
        start = gap * i
        end = gap * (i + 1)
        if end < len:
            files = filelist[start:end]
        else:
            files = filelist[start:]
        th = Process(target=resample_data, args=(files,))
        threads.append(th)

    return threads


if __name__ == '__main__':
    sysstr = platform.system()
    localIP = socket.gethostbyname(socket.gethostname())
    if sysstr == "Windows":
        logger = logging.basicConfig(level=logging.INFO)
        local_dir = "F:\\pics\\"
    elif sysstr == "Linux":
        filename = '/tmp/log/%splot.log' % localIP
        logger = logging.basicConfig(level=logging.INFO, filename=filename)
        local_dir = "/root/candles/"
    else:
        local_dir = "/root/candles/"

    logging.info("start at %s" % ts)

    database = 'Daily_data'
    rlocal_dir = local_dir + database
    if os.path.exists(rlocal_dir):
        shutil.rmtree(rlocal_dir)
        os.makedirs(rlocal_dir)
    else:
        os.makedirs(rlocal_dir)
    stock_codes = get_all_stock_codes()
    redis_count = get_redis_count()
    threads = launch_multi_thread(stock_codes, rlocal_dir, database, redis_count)
    for t in threads:
        t.start()
    t.join()
    te = time.strftime(ISOTIMEFORMAT, time.localtime(time.time()))
    logging.info("end at %s" % te)
