# coding: utf-8
import pandas as pd
import pytz
from datetime import datetime
import pymongo
from time import time
import os
import logging
import timeUtils_4from_mongo as timeUtils

"""
把历史行情数据导入到mongodb
"""
logger = logging.basicConfig(level=logging.INFO)
t0 = time()
tz = pytz.timezone(pytz.country_timezones('cn')[0])
path = "/home/hanyu/tonglian/oneMinData/futures/allfutures_2007-01-01_to_2015-11-17/"
path2 = "/home/hanyu/tonglian/oneMinData/futures/allfutures_2007-01-01_to_2015-11-17/"
pathWin = "D:\\wks_python\\allfutures_data\\OMD_2017-06-29_to_2017-06-30\\"  # allfutures_2015-11-17_to_2015-12-2
pwd = os.getcwd()
filelist = os.listdir(pathWin)
column_names = {'ticker': 'symbol', 'closePrice': 'close', 'openPrice': 'open', 'highPrice': 'high', 'lowPrice': 'low',
                'totalVolume': 'volume'}

client = pymongo.MongoClient('121.40.212.219', 27017)  # '121.40.212.219', 27017
db = client.future_data


# fi = open(pwd + '/log_min_data.txt', 'a')
def handle29_00():
    # 将历史数据文件的11.19与11.30合并为11.29、14.59与15.00合并为14.59
    df29 = pd.DataFrame([])
    dftmp = df.between_time('11:29', '11:30')
    df29['open'] = dftmp['open'].resample('2T', how='first', closed='right', label='left', loffset='1T').dropna()
    df29['high'] = dftmp['high'].resample('2T', how='max', closed='right', label='left', loffset='1T').dropna()
    df29['low'] = dftmp['low'].resample('2T', how='min', closed='right', label='left', loffset='1T').dropna()
    df29['close'] = dftmp['close'].resample('2T', how='last', closed='right', label='left', loffset='1T').dropna()
    df29['totalValue'] = dftmp['totalValue'].resample('2T', how='sum', closed='right', label='left',
                                                      loffset='1T').dropna()
    df29['volume'] = dftmp['volume'].resample('2T', how='sum', closed='right', label='left', loffset='1T').dropna()
    df29['openInterest'] = dftmp['openInterest'].resample('2T', how='last', closed='right', label='left',
                                                          loffset='1T').dropna()
    df29['symbol'] = symbol

    idx = df.between_time('11:30', '11:30').index
    df.drop(idx, inplace='True')

    idx = df.between_time('11:29', '11:29').index
    df.loc[idx] = df29
    # ------------

    df15 = pd.DataFrame([])
    dftmp = df.between_time('14:59', '15:00')
    df15['open'] = dftmp['open'].resample('2T', how='first', closed='right', label='left', loffset='1T').dropna()
    df15['high'] = dftmp['high'].resample('2T', how='max', closed='right', label='left', loffset='1T').dropna()
    df15['low'] = dftmp['low'].resample('2T', how='min', closed='right', label='left', loffset='1T').dropna()
    df15['close'] = dftmp['close'].resample('2T', how='last', closed='right', label='left', loffset='1T').dropna()
    df15['totalValue'] = dftmp['totalValue'].resample('2T', how='sum', closed='right', label='left',
                                                      loffset='1T').dropna()
    df15['volume'] = dftmp['volume'].resample('2T', how='sum', closed='right', label='left', loffset='1T').dropna()
    df15['openInterest'] = dftmp['openInterest'].resample('2T', how='last', closed='right', label='left',
                                                          loffset='1T').dropna()
    df15['symbol'] = symbol

    idx = df.between_time('15:00', '15:00').index
    df.drop(idx, inplace='True')

    idx = df.between_time('14:59', '14:59').index
    df.loc[idx] = df15

    return df


for filename in filelist:
    # if filename != 'al1610.csv':
    #     continue
    logging.info(filename)
    # fi.write(filename + '\n')
    df = pd.read_csv(pathWin + filename)
    symbol = filename.split(".")[0]
    # alpha = filter(str.isalpha, symbol)
    # num = filter(str.isdigit, symbol)
    # if len(num) < 4:
    #     if int(num[0]) > 7:
    #         num = '0' + num
    #     elif int(num[0]) < 7:
    #         num = '1' + num
    # symbol = alpha + num

    if len(df.index) > 1:
        df = df[df['barTime'] != '99:99']

        df.rename(columns=column_names, inplace=True)
        df = df[df['close'] != 0]
        df = df[df['low'] > 5]
        df['dateTime'] = df['dataDate'] + ' ' + df['barTime'] + ":00"
        tz = pytz.timezone(pytz.country_timezones('cn')[0])
        df['dateTime'] = df['dateTime'].apply(lambda x: tz.localize(datetime.strptime(x, '%Y-%m-%d %H:%M:%S')))
        df['symbol'] = symbol
        if len(df.index) > 1:
            db.one_min_data.insert_many(df.reset_index().to_dict(orient='records'))
        else:
            logging.info("-------one min done-------  " + symbol + "--- is null ---")
        logging.info("-------one min done-------")
        df_m30 = df.copy()
        df_m60 = df.copy()
        df_m120 = df.copy()
        df_day = df.copy()
        df = df.set_index('dateTime')

        if df.empty:
            continue
        df = handle29_00()
        # ---------------write in 5 minute data----------------
        # resample('5min', how='last', closed='right', label='left').dropna() 之前的样本
        df_m5 = pd.DataFrame([])
        df_m5['open'] = df['open'].resample('5T', how='first').dropna()
        df_m5['close'] = df['close'].resample('5T', how='last').dropna()
        df_m5['high'] = df['high'].resample('5T', how='max').dropna()
        df_m5['low'] = df['low'].resample('5T', how='min').dropna()
        # df_m5['dateTime'] = df['dateTime'].resample('5min', how='last', closed='right', label='right').dropna()
        df_m5['totalValue'] = df['totalValue'].resample('5T', how='sum').dropna()
        df_m5['volume'] = df['volume'].resample('5T', how='sum').dropna()
        df_m5['openInterest'] = df['openInterest'].resample('5T', how='last').dropna()
        df_m5['symbol'] = symbol
        if len(df_m5.index) > 1:
            db.min_5_data.insert_many(df_m5.reset_index().to_dict(orient='records'))
        else:
            logging.info("-------5 min done-------  " + symbol + "--- is null ---")
        logging.info("-------5 min done-------")
        # ---------------write in 15 minute data----------------
        df_m15 = pd.DataFrame([])
        df_m15['open'] = df_m5['open'].resample('15T', how='first', label='left').dropna()
        df_m15['close'] = df_m5['close'].resample('15T', how='last', label='left').dropna()
        df_m15['high'] = df_m5['high'].resample('15T', how='max', label='left').dropna()
        df_m15['low'] = df_m5['low'].resample('15T', how='min', label='left').dropna()
        df_m15['totalValue'] = df_m5['totalValue'].resample('15T', how='sum', label='left').dropna()
        df_m15['volume'] = df_m5['volume'].resample('15T', how='sum', label='left').dropna()
        df_m15['openInterest'] = df_m5['openInterest'].resample('15T', how='last',
                                                                label='left').dropna()
        df_m15['symbol'] = symbol
        if len(df_m15.index) > 1:
            db.min_15_data.insert_many(df_m15.reset_index().to_dict(orient='records'))
        else:
            logging.info("-------15 min done-------  " + symbol + "--- is null ---")
        logging.info("-------15 min done-------")
        # continue
        # ---------------write in 30 minute data----------------
        df_m30['dateTime'] = timeUtils.Minute30New(df_m30['dateTime'], symbol)
        df_m30['symbol'] = symbol
        grouped30 = df_m30.groupby('dateTime')
        functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                     'totalValue': 'sum',
                     'openInterest': 'last'}
        rsDF30 = grouped30.agg(functions)
        rsDF30['symbol'] = symbol
        if len(rsDF30.index) > 1:
            db.min_30_data.insert_many(rsDF30.reset_index().to_dict(orient='records'))
        else:
            logging.info("-------30 min done-------  " + symbol + "--- is null ---")
        logging.info("-------30 min done-------")
        # ---------------write in 60 minute data----------------
        df_m60['dateTime'] = timeUtils.Minute60New(df_m60['dateTime'], symbol)
        df_m60['symbol'] = symbol
        grouped = df_m60.groupby('dateTime')
        functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                     'totalValue': 'sum',
                     'openInterest': 'last'}
        rsDF60 = grouped.agg(functions)
        rsDF60['symbol'] = symbol
        if len(rsDF60.index) > 1:
            db.min_60_data.insert_many(rsDF60.reset_index().to_dict(orient='records'))
        else:
            logging.info("-------60 min done-------  " + symbol + "--- is null ---")
        logging.info("-------60 min done-------")
        # ---------------write in 120 minute data----------------
        df_m120['dateTime'] = timeUtils.Minute120New(df_m120['dateTime'], symbol)
        df_m120['symbol'] = symbol
        grouped120 = df_m120.groupby('dateTime')
        functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                     'totalValue': 'sum',
                     'openInterest': 'last'}
        rsDF120 = grouped120.agg(functions)
        rsDF120['symbol'] = symbol
        if len(rsDF120.index) > 1:
            db.min_120_data.insert_many(rsDF120.reset_index().to_dict(orient='records'))
        else:
            logging.info("-------120 min done-------  " + symbol + "--- is null ---")
        logging.info("-------120 min done-------")
        # ---------------write in day data----------------
        df_day['dateTime'] = timeUtils.Day(df_day['dateTime'])
        df_day['symbol'] = symbol
        grouped_day = df_day.groupby('dateTime')
        functions = {'open': 'first', 'close': 'last', 'high': 'max', 'low': 'min', 'volume': 'sum',
                     'totalValue': 'sum',
                     'openInterest': 'last'}
        rsDF_day = grouped_day.agg(functions)
        rsDF_day['symbol'] = symbol
        # if len(rsDF_day.index) >= 1:
        #     db.day.insert_many(rsDF_day.reset_index().to_dict(orient='records'))
        # else:
        #     logging.info("-------day done-------  " + symbol + "--- is null ---")
        logging.info("-------day done-------")
    else:
        print(symbol)


# gtm = df['open'].resample('dateTime', how='first')

# if the first row of pChange is NaN, then we ignore the first row
#     d = tz.localize(datetime(2015, 9, 30, 0, 0))
#     if math.isnan(df.pChange.iloc[0]) and df.dateTime.iloc[0] == d:
#         if df[1:].empty:
#             pass
#         else:
#             db.original_prices.insert_many(df[1:].to_dict(orient='records'))
#     else:
#         db.original_prices.insert_many(df.to_dict(orient='records'))
#     fi.write(filename + '\n')
#
# fi.close()
# print('finished in %.2fs' % (time() - t0))
