# -*- coding: utf-8 -*-
import datetime
import hashlib
import json
import re
import time

import requests
import scrapy
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta

from apps.listed_company.listed_company.items import ListedCompanyItem, NetListedCompanyYearReportItem, NetListedCompanyValuationDataDetailsItem
from loguru import logger

from components.config import NET_ROBOT_MYSQL_CONFIG
from utils.db.mysqldb import MysqlDB
from utils.time_tools import get_random_date_list
from utils.tools import urlencode, urldecode
import oss2


class EastmoneyValuationDataSpider(scrapy.Spider):
    listed_exchange = '东方财务网'
    name = 'eastmoney_valuation_data'
    headers = {
        "Accept": "*/*",
        "Accept-Language": "zh,zh-TW;q=0.9,en-US;q=0.8,en;q=0.7,zh-CN;q=0.6",
        "Connection": "keep-alive",
        "Referer": "https://data.eastmoney.com/gzfx/detail/000001.html",
        "Sec-Fetch-Dest": "script",
        "Sec-Fetch-Mode": "no-cors",
        "Sec-Fetch-Site": "same-site",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36",
        "sec-ch-ua": "\"Chromium\";v=\"128\", \"Not;A=Brand\";v=\"24\", \"Google Chrome\";v=\"128\"",
        "sec-ch-ua-mobile": "?0",
        "sec-ch-ua-platform": "\"Windows\""
    }
    cookies = {
        "qgqp_b_id": "051664ad2cfc312195e31e79f6ac5499",
        "st_si": "91967039857127",
        "st_asi": "delete",
        "st_pvi": "42930591730043",
        "st_sp": "2024-08-29%2010%3A17%3A22",
        "st_inirUrl": "https%3A%2F%2Fwww.eastmoney.com%2F",
        "st_sn": "6",
        "st_psi": "20240829102750987-113300303064-7299492779",
        "JSESSIONID": "D934D4740962576C39D05D74A3998223"
    }
    bucket = oss2.Bucket(
        oss2.Auth("LTAI5tF8aeb3S3ypTRK9kxgt", "1pQGu6dS1NuEFh0pO5O0WjVGHJOWF6"),
        "oss-cn-hangzhou.aliyuncs.com",
        "wfq-gov-file",
    )
    to_db = MysqlDB(
        ip=NET_ROBOT_MYSQL_CONFIG["MYSQL_IP"],
        port=NET_ROBOT_MYSQL_CONFIG['MYSQL_PORT'],
        db=NET_ROBOT_MYSQL_CONFIG['MYSQL_DB'],
        user_name=NET_ROBOT_MYSQL_CONFIG['MYSQL_USER_NAME'],
        user_pass=NET_ROBOT_MYSQL_CONFIG['MYSQL_USER_PASS'],
    )

    def start_requests(self):
        url = "https://datacenter-web.eastmoney.com/api/data/v1/get"
        sql = "SELECT stock_code, stock_abb FROM `net_robot`.`net_listed_company_info`"
        stock_code_list = self.to_db.find(sql)
        for stock_code, stock_abb in stock_code_list:
            params = {
                "callback": "jQuery1123020061776159912847_1724898563220",
                "reportName": "RPT_VALUEANALYSIS_DET",
                "columns": "ALL",
                "quoteColumns": "",
                "pageNumber": "1",
                "pageSize": "5000",
                "sortColumns": "TRADE_DATE",
                "sortTypes": "1",
                "source": "WEB",
                "client": "WEB",
                "filter": f"(SECURITY_CODE=\"{stock_code}\")",
                "_": str(int(time.time() * 1000)),
            }
            yield scrapy.Request(url + "?" + urlencode(params), callback=self.parse_list, cookies=self.cookies, headers=self.headers, meta={'params': params, 'stock_abb': stock_abb, 'stock_code': stock_code})

    def parse_list(self, response, **kwargs):
        print(response.text[:100])
        stock_code = response.meta.get("stock_code")
        stock_abb = response.meta.get("stock_abb")
        data = json.loads(re.findall(r"jQuery[\d_]+\((.*)\)", response.text)[0])
        if '返回数据为空' in response.text:
            logger.info(f"{stock_code} 总量: 返回数据为空")
            return
        lines = data['result']['data']
        count = data['result']['count']
        logger.info(f"{stock_code} 总量: {count}")
        if count > 5000:
            raise Exception(f'需要分页 {stock_code} {count}')
        for line in lines:
            item = NetListedCompanyValuationDataDetailsItem(**{
                'stock_code': stock_code,
                'stock_abb': stock_abb,
                'time': line['TRADE_DATE'].split()[0],
                'daily_closing_price': line['CLOSE_PRICE'],
                'daily_increase_or_decrease': line['CHANGE_RATE'],

                'total_market_value': line['TOTAL_MARKET_CAP'],
                'circulation_market_value': line['NOTLIMITED_MARKETCAP_A'],
                'total_share_capital': line['TOTAL_SHARES'],
                'out_share_capital': line['FREE_SHARES_A'],
                'pettm': None if line['PE_TTM'] < 0 else line['PE_TTM'],
                'static_pe': None if line['PE_LAR'] < 0 else line['PE_LAR'],
                'pb': line['PB_MRQ'],
                'peg': None if line['PEG_CAR'] < 0 else line['PEG_CAR'],
                'pcf': line['PCF_OCF_TTM'],
                'ps': line['PS_TTM'],
            })
            yield item


if __name__ == "__main__":
    from scrapy import cmdline

    cmdline.execute("scrapy crawl eastmoney_valuation_data".split())
