#  -*- coding: utf-8 -*-

"""
普量学院量化投资课程系列案例源码包
普量学院版权所有
仅用于教学目的，严禁转发和用于盈利目的，违者必究
©Plouto-Quants All Rights Reserved

普量学院助教微信：niuxiaomi3
"""

"""
抓取财报数据，主要关注EPS、公告日期、报告期
使用最新的东方财富API接口
"""

import json
import urllib3
import urllib.parse
import time
import random
from datetime import datetime

from pymongo import UpdateOne

from database import DB_CONN
from stock_util import get_all_codes, safe_float_conversion

user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'


def generate_callback():
    """
    生成随机的jQuery回调函数名
    """
    import random
    random_num = random.randint(1000000000000000, 9999999999999999)
    timestamp = int(datetime.now().timestamp() * 1000)
    return f"jQuery{random_num}_{timestamp}"


def fetch_single_page(conn_pool, base_url, code, page_number, page_size=50):
    """
    获取单页财报数据
    
    :param conn_pool: 连接池
    :param base_url: API基础URL
    :param code: 股票代码
    :param page_number: 页码（从1开始）
    :param page_size: 每页大小
    :return: (success, data, total_pages, message)
    """
    try:
        # 生成回调函数名
        callback = generate_callback()
        
        # 构建URL参数
        params = {
            'callback': callback,
            'sortColumns': 'REPORTDATE',
            'sortTypes': '-1',
            'pageSize': str(page_size),
            'pageNumber': str(page_number),
            'columns': 'ALL',
            'filter': f'(SECURITY_CODE="{code}")',
            'reportName': 'RPT_LICO_FN_CPD'
        }
        
        # 构建完整URL
        query_string = '&'.join([f'{k}={urllib.parse.quote(str(v))}' for k, v in params.items()])
        full_url = f'{base_url}?{query_string}'
        
        # 随机选择User-Agent
        user_agents = [
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/120.0'
        ]
        
        # 设置请求头
        headers = {
            'User-Agent': random.choice(user_agents),
            'Referer': 'https://data.eastmoney.com/',
            'Accept': '*/*',
            'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
            'Accept-Encoding': 'gzip, deflate, br',
            'Connection': 'keep-alive',
            'Cache-Control': 'no-cache',
            'Pragma': 'no-cache'
        }
        
        # 发送请求
        response = conn_pool.request('GET', full_url, headers=headers)
        
        # 检查响应状态
        if response.status != 200:
            return False, [], 0, f'HTTP {response.status}'
        
        # 解析响应数据
        response_text = response.data.decode('UTF-8', errors='ignore')
        
        if not response_text.strip():
            return False, [], 0, '空响应'
        
        # 处理JSONP响应
        try:
            # 移除JSONP包装，提取JSON数据
            if response_text.startswith(callback):
                json_start = response_text.find('(') + 1
                json_end = response_text.rfind(')')
                json_str = response_text[json_start:json_end]
            else:
                json_str = response_text
            
            result = json.loads(json_str)
            
            # 检查API响应结果
            if not result.get('success', False):
                message = result.get('message', '未知错误')
                code_status = result.get('code', 0)
                
                # 特殊处理：返回数据为空（通常是退市股票）
                if code_status == 9201 or '返回数据为空' in message:
                    return True, [], 0, '无数据'  # 成功但无数据
                else:
                    return False, [], 0, f'{message} (code: {code_status})'
            
            # 获取数据
            result_data = result.get('result')
            if not result_data:
                return True, [], 0, '结果为空'
            
            data = result_data.get('data', [])
            total_pages = result_data.get('pages', 1)
            
            return True, data, total_pages, 'OK'
            
        except json.JSONDecodeError as e:
            return False, [], 0, f'JSON解析错误: {e}'
        
    except Exception as e:
        return False, [], 0, f'请求异常: {e}'


def fetch_all_pages_for_stock(conn_pool, base_url, code):
    """
    获取某只股票的所有分页财报数据
    
    :param conn_pool: 连接池
    :param base_url: API基础URL  
    :param code: 股票代码
    :return: 所有财报数据列表，如果出错返回None
    """
    all_reports = []
    page_number = 1
    page_size = 50
    
    while True:
        # 获取单页数据
        success, data, total_pages, message = fetch_single_page(
            conn_pool, base_url, code, page_number, page_size
        )
        
        if not success:
            print(f'股票 {code} 第{page_number}页: {message}')
            return None  # 表示错误
        
        if not data:
            # 第一页就没数据，说明是退市或无数据股票
            if page_number == 1:
                return []  # 表示无数据但不是错误
            else:
                # 后续页没数据，正常结束
                break
        
        # 添加数据到结果
        all_reports.extend(data)
        
        # 检查是否还有更多页
        if page_number >= total_pages:
            break
        
        # 下一页
        page_number += 1
        
        # 页面间的小延时
        time.sleep(random.uniform(0.2, 0.8))
    
    return all_reports


def crawl_finance_report():
    """
    使用新的东方财富API抓取财报数据
    """
    print("开始抓取财报数据...")
    
    # 先获取所有的股票列表
    try:
        codes = get_all_codes()
        if not codes:
            print("未获取到股票代码列表，请先运行basic_crawler.py获取基础数据")
            return
        print(f"获取到 {len(codes)} 只股票代码")
    except Exception as e:
        print(f"获取股票代码列表时出错: {e}")
        return

    # 创建连接池，添加超时设置
    conn_pool = urllib3.PoolManager(
        timeout=urllib3.Timeout(connect=10, read=30),
        retries=urllib3.Retry(total=3, backoff_factor=1)
    )

    # 新的东方财富API基础URL
    base_url = 'https://datacenter-web.eastmoney.com/api/data/v1/get'
    
    success_count = 0
    error_count = 0
    skipped_count = 0  # 跳过的股票数（退市或无数据）
    
    # 循环抓取所有股票的财务信息
    for i, code in enumerate(codes):
        try:
            # 获取该股票的所有分页数据
            all_reports = fetch_all_pages_for_stock(conn_pool, base_url, code)
            
            if all_reports is None:
                # API错误
                error_count += 1
                continue
            elif len(all_reports) == 0:
                # 无数据或退市股票
                print(f'股票 {code}: 退市或无财报数据，跳过')
                skipped_count += 1
                continue
            else:
                # 保存数据
                save_finance_data(code, all_reports)
                success_count += 1
                print(f'股票 {code}: 成功获取 {len(all_reports)} 条财报数据')
            
            # 进度显示
            if (i + 1) % 10 == 0:
                print(f'进度: {i+1}/{len(codes)}, 成功: {success_count}, 跳过: {skipped_count}, 错误: {error_count}')
            
            # 反爬虫策略：随机延时
            delay = random.uniform(1.0, 3.0)
            if (i + 1) % 50 == 0:
                # 每50个股票暂停更长时间
                delay = random.uniform(5.0, 10.0)
                print(f'已处理 {i+1} 只股票，暂停 {delay:.1f} 秒...')
            elif (i + 1) % 100 == 0:
                # 每100个股票暂停更长时间
                delay = random.uniform(15.0, 30.0)
                print(f'已处理 {i+1} 只股票，长时间暂停 {delay:.1f} 秒防止被检测...')
            
            time.sleep(delay)
            
        except Exception as e:
            print(f'抓取股票 {code} 财报数据时出错: {e}')
            error_count += 1
            continue
    
    print(f'\n财报数据抓取完成！')
    print(f'成功: {success_count} 只股票')
    print(f'跳过: {skipped_count} 只股票 (退市或无财报数据)')
    print(f'错误: {error_count} 只股票')
    print(f'总计: {len(codes)} 只股票')
    print(f'成功率: {success_count/(len(codes))*100:.1f}%')


def save_finance_data(code, reports):
    """
    保存财报数据到数据库
    
    :param code: 股票代码
    :param reports: 财报数据列表
    """
    update_requests = []
    
    for report in reports:
        try:
            # 构建文档，使用安全的数值转换
            doc = {
                'code': code,
                'security_code': report.get('SECURITY_CODE', code),
                'security_name': report.get('SECURITY_NAME_ABBR', ''),
                'report_date': report.get('REPORTDATE', '')[:10] if report.get('REPORTDATE') else '',
                'notice_date': report.get('NOTICE_DATE', '')[:10] if report.get('NOTICE_DATE') else '',
                'update_date': report.get('UPDATE_DATE', '')[:10] if report.get('UPDATE_DATE') else '',
                'basic_eps': safe_float_conversion(report.get('BASIC_EPS'), 0),  # 每股收益
                'deduct_basic_eps': safe_float_conversion(report.get('DEDUCT_BASIC_EPS'), 0),  # 扣非每股收益
                'total_operate_income': safe_float_conversion(report.get('TOTAL_OPERATE_INCOME'), 0),  # 营业总收入
                'parent_netprofit': safe_float_conversion(report.get('PARENT_NETPROFIT'), 0),  # 归母净利润
                'weightavg_roe': safe_float_conversion(report.get('WEIGHTAVG_ROE'), 0),  # 加权平均ROE
                'bps': safe_float_conversion(report.get('BPS'), 0),  # 每股净资产
                'datatype': report.get('DATATYPE', ''),  # 数据类型（年报、半年报等）
                'qdate': report.get('QDATE', ''),  # 季度
                'trade_market': report.get('TRADE_MARKET', ''),  # 交易市场
                'publishname': report.get('PUBLISHNAME', ''),  # 行业
                'crawl_time': datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            }
            
            # 兼容原有格式的字段
            doc.update({
                'announced_date': doc['notice_date'],
                'eps': doc['basic_eps']
            })
            
            update_requests.append(
                UpdateOne(
                    {'code': code, 'report_date': doc['report_date']},
                    {'$set': doc},
                    upsert=True
                )
            )
            
        except Exception as e:
            print(f'处理股票 {code} 的财报记录时出错: {e}')
            continue
    
    # 批量写入数据库
    if update_requests:
        try:
            update_result = DB_CONN['finance_report'].bulk_write(update_requests, ordered=False)
            print(f'股票 {code}: 财报数据，插入 {update_result.upserted_count} 条，更新 {update_result.modified_count} 条')
        except Exception as e:
            print(f'保存'
                  f''
                  f'股票 {code} 财报数据时出错: {e}')


if __name__ == "__main__":
    crawl_finance_report()
