# coning: utf-8
import json
import math

import pandas as pd
import urllib3
from bs4 import BeautifulSoup

from exts import scheduler, db, log
from server.apps.fund.models import FundBasic
from server.apps.trade_day.views import TradeDay


class Spider(object):
    header = {
        "Referer": "http://fundf10.eastmoney.com/",
        "User-Agent": """Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)
                                 Chrome/91.0.4472.101 Safari/537.36 Edg/91.0.864.48"""
    }

    @staticmethod
    def compute_r_unit_worth(unit_worth, factor, kind, unit_worth_l):
        """
        获取复权单位净值
        :param unit_worth: 单位净值
        :param factor: 因子
        :param kind: ”0“：分红再投资；”106“：基金拆分
        :param unit_worth_l: 上一交易日的单位净值
        :return: 返回复权单位净值
        """
        if factor == "":
            return unit_worth / unit_worth_l
        factor = float(factor)
        if kind == "0":
            return (unit_worth + factor) / unit_worth_l
        if kind == "106":
            return (unit_worth * factor) / unit_worth_l

    @classmethod
    def get_fund_worth(cls):
        """
        更新净值，存入数据库 定时任务是每周一到周五，下午7点到23点执行  每3分钟执行一次
        :return:
        """
        with scheduler.app.app_context():
            latest_day = TradeDay.get_last_trade_day()
            funds = FundBasic.query.with_entities(FundBasic.fund).filter(FundBasic.worth_date < latest_day).all()
        http = urllib3.PoolManager()

        for tp_fund in funds:
            fund = tp_fund[0]
            page = 0
            total_page = 1
            page_size = 2000
            df_save = pd.DataFrame()
            res = None
            while page != total_page:
                page += 1
                url = "http://api.fund.eastmoney.com/f10/lsjz?fundCode={fund}&pageIndex={page}&pageSize={page_size}". \
                    format(fund=fund, page=page, page_size=page_size)
                http_res = http.request(method="GET", url=url, headers=cls.header)
                if http_res.status == 200:
                    res = http_res.data.decode()
                    res = json.loads(res)
                    dt = res["Data"]['LSJZList']
                    df = pd.DataFrame(dt)
                    df_save = df_save.append(df)
                    total_page = math.ceil(res["TotalCount"] / page_size)

            total_count = res["TotalCount"]
            df_save.drop_duplicates(subset=["FSRQ"], inplace=True)
            if df_save.shape[0] != total_count:
                continue

            df_save.rename(columns={"DWJZ": "unit_worth", "FSRQ": "worth_date"}, inplace=True)
            df_save.sort_values(by=["worth_date"], inplace=True)
            df_save["unit_worth"] = df_save["unit_worth"].astype("float")
            df_save["unit_worth_l"] = df_save["unit_worth"].shift(1)
            df_save["rate"] = df_save.apply(lambda x: Spider.compute_r_unit_worth(
                x["unit_worth"], x["FHFCZ"], x["FHFCBZ"], x["unit_worth_l"]), axis=1)
            df_save["cumulative_worth"] = df_save["rate"].cumprod()
            df_save["fund"] = fund
            df_save = df_save[["fund", "worth_date", "unit_worth", "cumulative_worth"]]
            df_save.fillna(value=1, inplace=True)
            new_worth = df_save.iloc[-1:].to_dict(orient="records")
            dt_save = df_save.to_dict(orient="records")

            basic_sql = """update fund_basic set worth_date = :worth_date, unit_worth = :unit_worth
                            where fund = :fund;"""
            daily_sql = """insert ignore into fund_worth (fund, worth_date, unit_worth, cumulative_worth) 
                            values (:fund, :worth_date, :unit_worth, :cumulative_worth);"""

            with scheduler.app.app_context():
                try:
                    db.session.execute(basic_sql, new_worth[0])
                    db.session.execute(daily_sql, dt_save)
                    db.session.commit()
                except Exception as e:
                    db.session.rollback()
                    log.logger.error(str(e))

    @classmethod
    def scrapy_fund(cls, fund):
        url = "http://fundf10.eastmoney.com/jbgk_{fund}.html".format(fund=fund)
        http = urllib3.PoolManager()
        http_res = http.request(method="GET", url=url, headers=cls.header)
        if http_res.status == 200:
            html = http_res.data.decode()
            soup = BeautifulSoup(html)
            tables = soup.find_all("table", class_="info w790")
            table = tables[0]
            tds = table.find_all("td")
            count = 0
            for i in tds:
                count += 1
            est_date = tds[5].get_text()[:10]
            if est_date == "---":
                raise Exception("没有找到基金{fund}".format(fund=fund))
            est_date = est_date.replace("年", "-")
            est_date = est_date.replace("月", "-")
            fund_info = {
                "fund": fund,
                "name": tds[1].get_text(),
                "ttype": tds[3].get_text(),
                "est_date": est_date,
                "company": tds[8].get_text(),
                "bank": tds[9].get_text()
            }
            return fund_info
        else:
            raise Exception("{fund}获取信息失败".format(fund=fund))
