# -*- coding: utf-8 -*-
'''
@datetime : 2022/6/9 18:35
@author   : zhangxp
@file     : tasks.py
'''
from .celery import app

import sys
import os
import re
import six
import time
import socket
from celery import group
from rqalpha import run_file, run_code

import talib
from dateutil.parser import parse

from copy import deepcopy, copy
from pytz import timezone
from sqlalchemy import DATETIME, DATE, VARCHAR, create_engine
import numpy as np
import pandas as pd

from concurrent.futures import ThreadPoolExecutor,wait,ALL_COMPLETED

NEXTT_MYSQL_DB_HOST = os.getenv('NEXTT_MYSQL_DB_HOST', "192.168.12.110")

class DB_CONF:
    host = NEXTT_MYSQL_DB_HOST
    port = '3309'
    user = "admin"
    password = "admin"
    database = "nextt"

##########################################通用任务##################################################
def method_caller(mod, method_name, *args):
    if len(mod) < 64 or (len(mod) <= 256 and len(mod.split('.')) > 1):
      obj = __import__(mod, fromlist=True)
      if hasattr(obj, method_name):
          func = getattr(obj, method_name)
          return func(*args)
      else:
          raise NameError("There is no attr '%s' " % method_name, "in %s.py" % mod)
    else:
      exec_code = compile(mod, 'method_caller.py', 'exec')
      exec_scope = copy(method_caller.__globals__)
      six.exec_(exec_code, exec_scope)
      if method_name in exec_scope:
          func = exec_scope[method_name]
          return func(*args)
      else:
          raise NameError("There is no attr '%s' " % method_name, "in %s.py" % mod)


@app.task
def task_cpu(mod, method_name, method_param=None):
    result = method_caller(mod, method_name, *method_param)
    if isinstance(result, pd.DataFrame):
      return result.to_dict('list')
    return result

###########################################回测专用#################################################
@app.task(bind=True)
def run_file_task(self, strat_file, cfg):
    """
    文件回测任务，在分布式节点被真实执行，一般用户不直接调用

    :param strat_file: 策略文件路径，注意：分布式要相对路径，以便分布式节点能找到文件
    :param cfg: 回测策略基本参数配置，被叠加自定义的附加参数

    """
    print('Executing task id {0.id}, args: {0.args!r} kwargs: {0.kwargs!r}'.format(
            self.request))
    result = run_file(strat_file, cfg)
    if result is not None:
      dump_to_sql(self.request, result)
      return result['sys_analyser']['summary']
    return result


@app.task(bind=True)
def run_code_task(self, strat_code, cfg):
    """
    代码回测任务，在分布式节点被真实执行，一般用户不直接调用

    :param strat_code: 策略源码的字符串，可以规避分布式节点找到文件问题
    :param cfg: 回测策略基本参数配置，被叠加自定义的附加参数

    """
    print('Executing task id {0.id}, args: {0.args!r} kwargs: {0.kwargs!r}'.format(
            self.request))
    result = run_code(strat_code, cfg)
    if result is not None:
      dump_to_sql(self.request, result)
      return result['sys_analyser']['summary']
    return result


def dump_to_sql(request, result):
    """
    入库
    """
    mysql_connect = create_engine('mysql+pymysql://' + DB_CONF.user + ':' + DB_CONF.password + '@' + DB_CONF.host + ':' + DB_CONF.port + '/' + DB_CONF.database + '?charset=utf8')
    batch_info = {}
    for k,v in result['sys_analyser'].items():
      v['id'] = request.id
      if isinstance(v, dict):
        if 'strategy_name' not in v:
          continue
        # 提取信息
        batch_info = {'root_id': [request.root_id], 'strategy_name':[v['strategy_name']], 'strategy_file':[v['strategy_file']]}
        v['pid'] = os.getpid()
        v['root_id'] = request.root_id
        v['group_id'] = request.id if request.group is None else request.group
        v['origin'], v['hostname'] = request.origin, request.hostname
        v['params'] = str(request.args[1]['params']) if 'params' in request.args[1] else None
        v = pd.DataFrame.from_dict([v])
        v = v.replace([np.inf, -np.inf], np.nan)
        v.set_index(['root_id','group_id','id'], inplace=True)
      else:
        if k == 'trades':
          v.reset_index(drop=True, inplace=True)
        else:
          v.reset_index(inplace=True)
        v.set_index('id', inplace=True)
      v.to_sql('tb_'+k, mysql_connect, if_exists='append', dtype={"datetime": DATETIME, "date": DATE, 'trading_datetime': DATETIME, \
        'pid': VARCHAR(100), 'root_id': VARCHAR(100), 'id': VARCHAR(256), 'group_id': VARCHAR(256) })
    b = pd.DataFrame.from_dict(batch_info)
    b.set_index(['root_id'], inplace=True)
    try:
      b.to_sql('tb_batch', mysql_connect, if_exists='append', dtype={'root_id': VARCHAR(100)})
    except:# 多次写入异常不处理
      pass
    mysql_connect.dispose()


def get_ppath(path):
    """
    获取python路径
    """
    mod = os.path.realpath(path)
    # 去除当前工作目录
    cwd = os.getcwd()
    if mod.lower().startswith(cwd.lower()):
      # 后面继续保持大小写区分
      return mod[len(cwd)+1:].replace('\\', '/')
    # 环境变量路径
    for p in sys.path[1:]:
      if mod.lower().startswith(p.lower()):
        # 后面继续保持大小写区分
        return mod[len(p)+1:].replace('\\', '/')
    if re.match("(c|d|e|f|C|D|E|F):.", path) is not None: raise ValueError("full path error, relative path is expected!")
    return path


def get_param(func, cfg, path):
    """
    获取必要参数
    """
    method = func.__name__
    if func.__module__ == '__main__':
      mod = get_ppath(path).replace('/', '.').replace('.py', '')
    else:
      mod = func.__module__
    return (mod, method, cfg)


# def batch_file_task(strat_file, batch_params, config):
#     """
#     批处理回测任务
#     """
#     # 强制不画图
#     if "mod" in config and "sys_analyser" in config["mod"]:
#       config["mod"]["sys_analyser"]["plot"] = False
    
#     result_list = []
#     for param in batch_params:
#       # 循环内必须复制对象
#       cfg = deepcopy(config)
#       for k,v in param.items():
#         # 策列参数传递
#         if k in cfg['base']:
#           cfg['base'][k] = v
#         else:
#           if 'params' not in cfg:
#             cfg['params'] = {}
#           cfg['params'][k] = v
#       # 分布式异步，文件模式
#       result = run_file_task.apply_async((get_ppath(strat_file), cfg), retry=False)
#       result_list.append(result)
#     ret_list = []
#     for result in result_list:
#       res = result.get()
#       date_done = result.date_done.replace(tzinfo=timezone('UTC')).astimezone(timezone('Asia/Shanghai')).strftime("%Y-%m-%d %H:%M:%S%Z")
#       ret_list.append({'id':result.id, 'datetime':date_done, 'status':result.status, 'result':res})
#     return ret_list


def group_file_task(strat_file, group_params, config, root_id=None):
    """
    分组处理回测任务

    :param strat_file: 策略文件路径，注意：分布式要相对路径，以便分布式节点能找到文件
    :param group_params: 分组参数，即，将被不通节点执行的回测策略附加参数
    :param config: 回测策略基本参数配置，执行时会被叠加group_params里的附加参数
    :param root_id: 唯一描述一批回测的根id，标识用户的一次分布式回测操作
    :example:

    .. code-block:: python

        batch_params = [{"universe":"IF1606", "start_date":"2016-01-01", "end_date":"2016-06-15"}, 
                        {"universe":"IF1607", "start_date":"2016-05-23", "end_date":"2016-07-15"}, 
                        {"universe":"IF1608", "start_date":"2016-06-20", "end_date":"2016-08-19"}]
        ret = group_file_task(__file__, batch_params, __config__)

    """
    # 强制不画图
    if "mod" in config and "sys_analyser" in config["mod"]:
      config["mod"]["sys_analyser"]["plot"] = False
    # 唯一描述一批回测
    if root_id is None:
      root_id = '%d@%s' % (time.time(), socket.gethostname())
    
    task_list = []
    for param in group_params:
      # 循环内必须复制对象
      cfg = deepcopy(config)
      for k,v in param.items():
        # 策列参数传递
        if k in cfg['base']:
          cfg['base'][k] = v
        else:
          if 'params' not in cfg:
            cfg['params'] = {}
          cfg['params'][k] = v
          #
          if k == 'benchmark':
            cfg['mod']['sys_analyser'][k] = v
      # 分布式异步，文件模式
      task_list.append(run_file_task.s(get_ppath(strat_file), cfg))
    # 按组执行
    job = group(task_list)
    result = job.apply_async(root_id=root_id)
    result.join()
    return {'group_id':result.id, 'status':result.successful(), 'result':result.get()}


# def batch_code_task(strat_code, batch_params, config):
#     """
#     批处理回测任务
#     """
#     # 强制不画图
#     if "mod" in config and "sys_analyser" in config["mod"]:
#       config["mod"]["sys_analyser"]["plot"] = False
    
#     result_list = []
#     for param in batch_params:
#       # 循环内必须复制对象
#       cfg = deepcopy(config)
#       for k,v in param.items():
#         # 策列参数传递
#         if k in cfg['base']:
#           cfg['base'][k] = v
#         else:
#           if 'params' not in cfg:
#             cfg['params'] = {}
#           cfg['params'][k] = v
#       # 分布式异步，文件模式
#       result = run_code_task.apply_async((strat_code, cfg), retry=False)
#       result_list.append(result)
#     ret_list = []
#     for result in result_list:
#       res = result.get()
#       date_done = result.date_done.replace(tzinfo=timezone('UTC')).astimezone(timezone('Asia/Shanghai')).strftime("%Y-%m-%d %H:%M:%S%Z")
#       ret_list.append({'id':result.id, 'datetime':date_done, 'status':result.status, 'result':res})
#     return ret_list


def group_code_task(strat_code, group_params, config, root_id=None):
    """
    分组处理回测任务

    :param strat_file: 策略源码的字符串，可以规避分布式节点找到文件问题
    :param group_params: 分组参数，即，将被不通节点执行的回测策略附加参数
    :param config: 回测策略基本参数配置，执行时会被叠加group_params里的附加参数
    :param root_id: 唯一描述一批回测的根id，标识用户的一次分布式回测操作
    :example:

    .. code-block:: python

        strat_code = '''
        import talib
        from rqalpha.apis import *

        # 在这个方法中编写任何的初始化逻辑
        def init(context):
            ...

        # 你选择的期货数据更新将会触发此段逻辑
        def handle_bar(context, bar_dict):
            ...
        '''
        batch_params = [{"universe":"IF1606", "start_date":"2016-01-01", "end_date":"2016-06-15"}, 
                        {"universe":"IF1607", "start_date":"2016-05-23", "end_date":"2016-07-15"}, 
                        {"universe":"IF1608", "start_date":"2016-06-20", "end_date":"2016-08-19"}]
        ret = group_code_task(strat_code, batch_params, __config__)

    """
    # 强制不画图
    if "mod" in config and "sys_analyser" in config["mod"]:
      config["mod"]["sys_analyser"]["plot"] = False
    # 唯一描述一批回测
    if root_id is None:
      root_id = '%d@%s' % (time.time(), socket.gethostname())

    task_list = []
    for param in group_params:
      # 循环内必须复制对象
      cfg = deepcopy(config)
      for k,v in param.items():
        # 策列参数传递
        if k in cfg['base']:
          cfg['base'][k] = v
        else:
          if 'params' not in cfg:
            cfg['params'] = {}
          cfg['params'][k] = v
          #
          if k == 'benchmark':
            cfg['mod']['sys_analyser'][k] = v
      # 分布式异步，文件模式
      task_list.append(run_code_task.s(strat_code, cfg))
    # 按组执行
    job = group(task_list)
    result = job.apply_async(root_id=root_id)
    result.join()
    return {'group_id':result.id, 'status':result.successful(), 'result':result.get()}


def exec_group_file_tasks(batch_tasks, concurrency=1):
    """
    异步执行一批任务，每个任务中有一组参数

    :param batch_tasks: 一批任务，即，要被异步执行一批任务列表
    :param concurrency: 线程并发数
    :example:

    .. code-block:: python

        # 按品种分批
        batch_tasks = []
        for p in batch_list:
          group_params = []
          for y in param_list:
            group_params.append(y.copy())
          batch_tasks.append((__file__, group_params, __config__))
        #
        return exec_group_file_tasks(batch_tasks, len(batch_tasks))

    """
    #
    ret_list = []
    # 唯一描述一批回测
    root_id = '%d@%s' % (time.time(), socket.gethostname())
    #
    with ThreadPoolExecutor(concurrency) as executor:
        jobs = []
        # windows上子进程需要执行
        for strat_file,group_params,config in batch_tasks:
            f = executor.submit(group_file_task, strat_file, group_params, config, root_id)
            jobs.append(f)
        wait(jobs,return_when=ALL_COMPLETED)
        for job in jobs:
            ret_list.append(job.result())
    return {'root_id':root_id, 'results':ret_list}

  
def year_split_file_tasks(strat_file, param_list, config, concurrency=None):
    """
    自动按年分组处理回测函数

    :param strat_file: 策略文件路径，注意：分布式要相对路径，以便分布式节点能找到文件
    :param param_list: 一批中分组的参数列表，每一组回测为按年自动分组
    :param config: 回测策略基本参数配置，执行时会被叠加param_list里的附加参数
    :example:

    .. code-block:: python

        code_list = ['SN','CY','SM','RM','RU','LH','CF','AG','BC','JM','RS','PM','IF','I','SC','A','WH','MA','T','TF','CJ','FB','BU','SS','CS','EG','LU','V','AP','UR',\
            'NR','L','J','HC','SF','RR','FU','SP','TS','PP','CU','WR','BB','PB','C','P','JR','IH','JD','ZN','LR','PG','M','EB','PK','IC','ZC','AL','FG','OI',\
            'RI','SA','B','Y','RB','SR','AU','TA','NI','PF']
        param_list = [{"universe": '%s88' % c} for c in code_list]

        ret = year_split_file_tasks(__file__, param_list, __config__)

    """
    #
    batch_tasks = []
    #
    for p in param_list:
      group_params = []
      last_year = None
      for y in range(2005, 2999):
        # 无法拆解
        if config['base']["start_date"][0:4] == config['base']["end_date"][0:4]:
          group_params.append(p.copy())
          break
        if config['base']["start_date"][0:4] > str(y):
          continue
        if config['base']["end_date"][0:4] < str(y):
          break
        # 每品种一组
        if config['base']["start_date"][0:4] == config['base']["end_date"][0:4]:
          pass
        elif config['base']["start_date"][0:4] == str(y):
          p.update({"start_date": config['base']["start_date"], "end_date": "%s-12-31" % y})
          pass
        elif config['base']["end_date"][0:4] == str(y):
          p.update({"start_date": "%s-01-01" % y, "end_date": config['base']["end_date"]})
          pass
        else:
          p.update({"start_date": "%s-01-01" % y, "end_date": "%s-12-31" % y})
          pass
        group_params.append(p.copy())
      # 按品种分批
      print(group_params)
      batch_tasks.append((strat_file, group_params, config))
    #
    if concurrency is None:
      concurrency = len(batch_tasks)
    return exec_group_file_tasks(batch_tasks, concurrency)


    
def group_params_cpu_task(func, params_list):
    """
    分组处理回测任务

    :param func: 策略源码的字符串，可以规避分布式节点找到文件问题
    :param params_list: 分组参数，即，将被不通节点执行的回测策略附加参数
    :example:

    .. code-block:: python


    """
    if isinstance(func, tuple):
      mod = func[1]
      method_name = func[0]
    else:
      mod = func.__module__
      method_name = func.__name__
    task_list = []
    for params in params_list:
      # 分布式异步，文件模式
      task_list.append(task_cpu.s(mod, method_name, params))
    # 按组执行
    job = group(task_list)
    result = job.apply_async()
    result.join()
    return {'group_id':result.id, 'status':result.successful(), 'result':result.get()}