import queue
import time
import talib as ta
import pandas as pd
import backtrader as bt
from datetime import datetime
from collections import namedtuple
from multiprocessing import Process, Pipe, Queue
from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.schedulers.blocking import BlockingScheduler
from pyLibs.Gadfly import StockStore, StockIndicator, BackTraderIndicator as bti


class StockCommissionScheme(bt.CommInfoBase):
    '''
    1.佣金按照百分比。    2.每一笔交易有一个最低值，比如5块，当然有些券商可能会免5.
    3.卖出股票还需要收印花税。    4.可能有的平台还需要收平台费。    '''
    params = (
        ('stampduty', 0.001),  # 印花税率
        ('commission', 0.00012),  # 佣金率
        ('stocklike', True),  # 股票类资产，不考虑保证金
        ('commtype', bt.CommInfoBase.COMM_PERC),  # 按百分比
        ('minCommission', 5),  # 最小佣金
        ('platFee', 0.00002),  # 过户费
    )

    def _getcommission(self, size, price, pseudoexec):
        if size > 0:  # 买入，不考虑印花税，需要考虑最低收费
            return max(size * price * self.p.commission, self.p.minCommission) + self.p.platFee
        elif size < 0:  # 卖出，考虑印花税。
            return max(abs(size) * price * self.p.commission, self.p.minCommission) + abs(
                size) * price * self.p.stampduty + self.p.platFee
        else:
            return 0  # 防止特殊情况下size为0.

def _cerebroProcess(cerebro):
    ''' conf = conf.Scheduler.store
    for key, d in history.items():
        Stock = namedtuple('Stock', ['stock_code', 'cycle', 'info'])
        stock = Stock(d.info.stock_code, d.cycle, d.info)
        cerebro.adddata(StockLiveData(stock=stock, pipe=pipe, dataname=d.data, name=d.info.tdx_code,
                                      timeframe=d.cycle.timeframe, compression=d.cycle.compression,
                                      fromdate=conf.fromdate, todate=conf.todate))'''
    print('sdgvvsdgsdg')
    cerebro.run()


class Scheduler:
    def __init__(self, config):
        self._state = None
        self.stocks = list()
        self.conf = config
        self.tdxStore = StockStore.TdxStore(config)
        job = {
            'default': {'coalesce': False, 'max_instances': 3},
            'store': {'default': MemoryJobStore()},
        }
        if config.Scheduler.pool.capitalize() == 'Thread':  # 线程池执行器
            self.scheduler = ThreadPoolExecutor()
            job['executor'] = {'default': ThreadPoolExecutor(config.Scheduler.pool_num)}
        elif config.Scheduler.pool.capitalize() == 'Process':  # 进程池执行器
            self.scheduler = ProcessPoolExecutor()
            job['executor'] = {'default': ProcessPoolExecutor(config.Scheduler.pool_num)}
        if config.Scheduler.executor_type.capitalize() == 'Background':
            self.scheduler = BackgroundScheduler(executors=job['executor'],
                                                 job_defaults=job['default'],
                                                 jobstores=job['store'])
        else:
            self.scheduler = BlockingScheduler(executors=job['executor'],
                                               job_defaults=job['default'],
                                               jobstores=job['store'])

    '''
    stock
        dict: {'stock_code': '证券代码', 'cycle': '分析周期', 'bars': '初始K线数量', 'fromdate': '开始', 'todate': '结束'}
        tuple: ('stock_code', 'cycle', 'bars', 'fromdate', 'todate')
    '''

    def start(self, cerebro, codes):
        (pipe_c, pipe_s) = Pipe()
        self.codes = codes
        conf = self.conf.Scheduler.store
        history = self.tdxStore.liveResampleDatas(codes, conf.cycle, conf.bars)
        for key, d in history.items():
            data_queue[key] = queue.Queue()
            Stock = namedtuple('Stock', ['stock_code', 'cycle', 'info'])
            stock = Stock(d.info.stock_code, d.cycle, d.info)
            self.stocks.append(stock)
        cerebro.adddata(StockLiveData(stock=stock, pipe=pipe_c, dataname=d.data, name=d.info.tdx_code,
                                      timeframe=d.cycle.timeframe, compression=d.cycle.compression,
                                      fromdate=conf.fromdate, todate=conf.todate))
        cerebro_process = Process(target=_cerebroProcess, args=(cerebro,))   # 创建一个 Cerebro 进程
        #scheduler_process = Process(target=self._schedulerProcess)      # 创建一个 Scheduler 进程
        cerebro_process.start()
        #scheduler_process.start()

    '''def _cerebroProcess(self):
        conf = self.conf.Scheduler.store
        history = self.tdxStore.liveResampleDatas(self.codes, conf.cycle, conf.bars)
        for key, d in history.items():
            data_queue[key] = queue.Queue()
            Stock = namedtuple('Stock', ['stock_code', 'cycle', 'info'])
            stock = Stock(d.info.stock_code, d.cycle, d.info)
            self.stocks.append(stock)
            self.cerebro.adddata(StockLiveData(stock=stock, pipe=self.pipe, dataname=d.data, name=d.info.tdx_code,
                                          timeframe=d.cycle.timeframe, compression=d.cycle.compression,
                                          fromdate=conf.fromdate, todate=conf.todate))
        self.cerebro.run()'''

    def _schedulerProcess(self):
        self.scheduler.add_listener(self.listener_execute, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)
        # self.scheduler.add_job(self.job_executes, 'cron', hour='09-23',
        # minute='05, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55', second='2')
        '''self.scheduler.add_job(self.job_executes, 'cron', day_of_week='mon-fri', hour='9',
                               minute='35, 40, 45, 50, 55', second='3')
        self.scheduler.add_job(self.job_executes, 'cron', day_of_week='mon-fri', hour='10, 13, 14',
                               minute='00, 05, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55', second='3')
        self.scheduler.add_job(self.job_executes, 'cron', day_of_week='mon-fri', hour='11',
                               minute='00, 05, 10, 15, 20, 25, 30', second='3')'''
        self.scheduler.add_job(self.job_executes, 'cron', day_of_week='mon-fri', hour='9',
                               minute='45', second='3')
        self.scheduler.add_job(self.job_executes, 'cron', day_of_week='mon-fri', hour='10, 13, 14',
                               minute='00, 30, 45', second='3')
        self.scheduler.add_job(self.job_executes, 'cron', day_of_week='mon-fri', hour='11',
                               minute='00, 15, 30', second='3')
        while True:
            if self.pipe.recv() == 'historicalDataLoadSuccess':
                self.scheduler.start()
                return False
            time.sleep(1)

    def job_executes(self):
        for s in self.stocks:
            live = self.tdxStore.getStockBarsFromApi(s.cycle.category, s.stock_code, 0, 2, True)
            data_queue[s.info.tdx_code].put(live)

    # 定义函数监听事件
    def listener_execute(self, event):
        if event.exception:
            print('The job did not run')
        else:
            print('The job is run！')


class StockPandasData(bt.feeds.PandasData):
    lines = ('ratio',)  # 要添加的线
    params = (
        ('nullvalue', 0.0),
        ('ratio', -1),
    )


class SinglePandasData(bt.feeds.PandasData):
    lines = ('ratio', 'open', 'high', 'low', 'close', 'volume', 'ratio', 'volume_ma', 'ema_s', 'ema_m', 'ema_l',
             'upper', 'middle', 'lower', 'bb', 'width', 'dif', 'dea', 'macd', 'atr_loss', 'platform_upper',
             'platform_lower', 'stop_surplus', )  # 要添加的线
    params = (
        ('nullvalue', 0.0),
        ('ratio', -1),
        ('open', -1),
        ('high', -1),
        ('low', -1),
        ('close', -1),
        ('volume', -1),
        ('ratio', -1),
        ('volume_ma', -1),
        ('ema_s', -1),
        ('ema_m', -1),
        ('ema_l', -1),
        ('upper', -1),
        ('middle', -1),
        ('lower', -1),
        ('bb', -1),
        ('width', -1),
        ('dif', -1),
        ('dea', -1),
        ('macd', -1),
        ('atr_loss', -1),
        ('platform_upper', -1),
        ('platform_lower', -1),
        ('stop_surplus', -1),
    )


class DoublePandasData(bt.feeds.PandasData):
    lines = ('ratio', 'open_trend', 'high_trend', 'low_trend', 'close_trend', 'volume_trend', 'ratio_trend',
             'volume_ma_trend', 'ema_s_trend', 'ema_m_trend', 'ema_l_trend', 'upper_trend', 'middle_trend',
             'lower_trend', 'bb_trend', 'width_trend', 'dif_trend', 'dea_trend', 'macd_trend', 'atr_loss_trend',
             'platform_upper_trend', 'platform_lower_trend', 'stop_surplus_trend', 'CDLINVERTEDHAMMER',
             'CDLSHOOTINGSTAR', 'CDLDARKCLOUDCOVER', 'CDLEVENINGSTAR', 'CDLHANGINGMAN', 'CDLHAMMER',
             'CDLINVERTEDHAMMER', 'CDLMORNINGDOJISTAR', 'CDLUNIQUE3RIVER', 'CDL3BLACKCROWS')  # 要添加的线
    params = (
        ('nullvalue', 0.0),
        ('ratio', -1),
        ('open_trend', -1),
        ('high_trend', -1),
        ('low_trend', -1),
        ('close_trend', -1),
        ('volume_trend', -1),
        ('ratio_trend', -1),
        ('volume_ma_trend', -1),
        ('ema_s_trend', -1),
        ('ema_m_trend', -1),
        ('ema_l_trend', -1),
        ('upper_trend', -1),
        ('middle_trend', -1),
        ('lower_trend', -1),
        ('bb_trend', -1),
        ('width_trend', -1),
        ('dif_trend', -1),
        ('dea_trend', -1),
        ('macd_trend', -1),
        ('atr_loss_trend', -1),
        ('platform_upper_trend', -1),
        ('platform_lower_trend', -1),
        ('stop_surplus_trend', -1),
        ('CDLINVERTEDHAMMER', -1),
        ('CDLSHOOTINGSTAR', -1),
        ('CDLDARKCLOUDCOVER', -1),
        ('CDLEVENINGSTAR', -1),
        ('CDLHANGINGMAN', -1),
        ('CDLHAMMER', -1),
        ('CDLINVERTEDHAMMER', -1),
        ('CDLMORNINGDOJISTAR', -1),
        ('CDLUNIQUE3RIVER', -1),
        ('CDL3BLACKCROWS', -1),
    )


class StockLiveData(StockPandasData):
    params = (
        ('historical', False),  # only historical download
        ('backfill_start', False),  # do backfilling at the start
        ('debug', False)
    )

    _ST_START, _ST_LIVE, _ST_HISTORE, _ST_OVER = range(4)

    def __init__(self, queue, pipe):
        super(StockLiveData, self).__init__()
        self._state = self._ST_LIVE
        self.histore_back_state = False
        self.pipe = pipe
        self.queue = queue

    def start(self):
        super(StockLiveData, self).start()
        try:
            print('%s：开始构造【%s】历史数据' %
                  (datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3], self.params.name))
            histore_dp = self.params.dataname
            histore_dp = histore_dp.assign(serial=range(1, len(histore_dp) + 1))
            histore_dp.dropna()
            for row in histore_dp.itertuples(index=True, name='Stock'):
                rows = row._asdict()
                rows.pop('Index')
                histore = pd.DataFrame(rows, index=[0])
                histore.index = pd.to_datetime(histore.datetime)
                self.queue.put(histore)
        except Exception as e:
            print(e)
        finally:
            return True

    def _histore_back(self):
        try:
            histore = self.histore.get()
            if histore.empty:
                return False
            else:
                if 'datetime' in histore.keys():
                    dtime = datetime.strptime(histore['datetime'].values[0], '%Y-%m-%d %H:%M')
                else:
                    dtime = datetime.now()
                self.lines.datetime[0] = bt.date2num(dtime)
                if 'open' in histore.keys():
                    self.lines.open[0] = histore['open'].values[0]
                if 'high' in histore.keys():
                    self.lines.high[0] = histore['high'].values[0]
                if 'low' in histore.keys():
                    self.lines.low[0] = histore['low'].values[0]
                if 'close' in histore.keys():
                    self.lines.close[0] = histore['close'].values[0]
                if 'ratio' in histore.keys():
                    self.lines.ratio[0] = histore['ratio'].values[0]
                if 'volume' in histore.keys():
                    self.lines.volume[0] = histore['volume'].values[0]
                if 'openinterest' in histore.keys():
                    self.lines.openinterest[0] = histore['openinterest'].values[0]
                else:
                    self.lines.openinterest[0] = 0
                self.histore_back_state = False
        except queue.Empty:
            return None  # no data in the queue
        except Exception as e:
            print(e)
        finally:
            return True

    def _load(self):
        if self._state == self._ST_OVER:
            return False
        while True:
            if self._state == self._ST_LIVE:
                try:
                    if not self.queue:
                        return None
                    data = self.queue.get()
                    if 'datetime' in data.keys():
                        dtime = datetime.strptime(data['datetime'].values[0], '%Y-%m-%d %H:%M')
                    else:
                        dtime = datetime.now()
                    self.lines.datetime[0] = bt.date2num(dtime)
                    if 'open' in data.keys():
                        self.lines.open[0] = data['open'].values[0]
                    if 'high' in data.keys():
                        self.lines.high[0] = data['high'].values[0]
                    if 'low' in data.keys():
                        self.lines.low[0] = data['low'].values[0]
                    if 'close' in data.keys():
                        self.lines.close[0] = data['close'].values[0]
                    if 'ratio' in data.keys():
                        self.lines.ratio[0] = data['ratio'].values[0]
                    if 'volume' in data.keys():
                        self.lines.volume[0] = data['volume'].values[0]
                    if 'openinterest' in data.keys():
                        self.lines.openinterest[0] = data['openinterest'].values[0]
                    if 'serial' in data.keys():
                        if data['serial'].values[0] == len(self.params.dataname):
                            self.pipe.send('【' + self.params.name + '】的历史数据已经加载成功！')
                finally:
                    return True
            elif self._state == self._ST_HISTORE:
                if not self._histore_back():
                    return True
                else:
                    self.pipe.send('【' + self.params.name + '】的历史数据已经加载成功！')
                    if self.pipe.recv() == 'Histore Load Over':
                        self._state = self._ST_LIVE
                        self.put_notification(self.LIVE)
                    return True
            else:
                return True

    def haslivedata(self):
        return self._state == self._ST_LIVE

    def islive(self):
        return True


class TradeListAnalyzer(bt.Analyzer):
    """
    交易列表分析器
    https://community.backtrader.com/topic/1274/closed-trade-list-including-mfe-mae-analyzer/2
    """

    def __init__(self):
        self.trades = []
        self.cum_profit = 0.0

    def get_analysis(self) -> tuple:
        """
        获取分析数据
        @return: 交易订单列表，交易日期
        """
        trade_list_df = pd.DataFrame(self.trades)
        return trade_list_df, self._get_trade_date(trade_list_df)

    def _get_trade_date(self, trade_list_df):
        """
        获取交易日期
        @return: 交易日期，获取某只股票的买卖日期，
        返回字典，key为股票名，value为(买入日期列表，卖出日期列表)
        """
        trade_dict = dict()
        if not trade_list_df.empty:
            # 分组，找出买卖日期
            grouped = trade_list_df.groupby('股票')
            for name, group in grouped:
                buy_date_list = list(group['买入日期'])
                sell_date_list = list(group['卖出日期'])
                # 判断是否有买卖日期
                if trade_dict.get(name) is None:
                    trade_dict[name] = (buy_date_list, sell_date_list)
                else:
                    trade_dict[name][0].extend(buy_date_list)
                    trade_dict[name][1].extend(sell_date_list)
        return trade_dict

    def notify_trade(self, trade):
        if trade.isclosed:

            total_value = self.strategy.broker.getvalue()

            dir = 'short'
            if trade.history[0].event.size > 0: dir = 'long'

            pricein = trade.history[len(trade.history) - 1].status.price
            priceout = trade.history[len(trade.history) - 1].event.price
            datein = bt.num2date(trade.history[0].status.dt)
            dateout = bt.num2date(trade.history[len(trade.history) - 1].status.dt)
            if trade.data._timeframe >= bt.TimeFrame.Days:
                datein = datein.date()
                dateout = dateout.date()

            pcntchange = 100 * priceout / pricein - 100
            pnl = trade.history[len(trade.history) - 1].status.pnlcomm
            pnlpcnt = 100 * pnl / total_value
            barlen = trade.history[len(trade.history) - 1].status.barlen
            pbar = pnl / barlen
            self.cum_profit += pnl

            size = value = 0.0
            for record in trade.history:
                if abs(size) < abs(record.status.size):
                    size = record.status.size
                    value = record.status.value

            highest_in_trade = max(trade.data.high.get(ago=0, size=barlen + 1))
            lowest_in_trade = min(trade.data.low.get(ago=0, size=barlen + 1))
            hp = 100 * (highest_in_trade - pricein) / pricein
            lp = 100 * (lowest_in_trade - pricein) / pricein
            if dir == 'long':
                mfe = hp
                mae = lp
            if dir == 'short':
                mfe = -lp
                mae = -hp

            self.trades.append(
                {'订单': trade.ref,
                 '股票': trade.data._name,
                 # 'dir': dir,
                 '买入日期': datein,
                 '买价': round(pricein, 2),
                 '卖出日期': dateout,
                 '卖价': round(priceout, 2),
                 '收益率%': round(pcntchange, 2),
                 '利润': round(pnl, 2),
                 '利润总资产比%': round(pnlpcnt, 2),
                 '股数': size,
                 '股本': round(value, 2),
                 '仓位比%': round(value / total_value * 100, 2),
                 '累计收益': round(self.cum_profit, 2),
                 '持股天数': barlen,  # 以每根 bar 的时间为单位，这里按天计算
                 # 'pnl/bar': round(pbar, 2),
                 '最大利润%': round(mfe, 2),
                 '最大亏损%': round(mae, 2)})


class WatchListAnalyzer(bt.Analyzer):
    def get_analysis(self):
        return self.strategy.watchList
