import json
from openai import OpenAI
import pymysql
from pymysql import OperationalError, Error
import logging
import time
from datetime import datetime
import re
import threading
from concurrent.futures import ThreadPoolExecutor, as_completed
import queue
import random

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(threadName)s - %(message)s',
    handlers=[
        logging.FileHandler('company_analysis.log', encoding='utf-8'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger(__name__)

# 初始化
client = OpenAI(api_key="sk-88ef30402ce84ab28d48d5ff574f1045", base_url="https://api.deepseek.com")

# 数据库配置
DB_CONFIG = {
    "host": "localhost",
    "port": 3306,
    "user": "root",
    "password": "",  # 替换为你的密码
    "database": "stock_db",
    "charset": "utf8mb4"
}

# 全局变量
processed_count = 0
success_count = 0
failed_count = 0
lock = threading.Lock()
task_queue = queue.Queue()


def get_db_connection():
    """获取数据库连接"""
    try:
        conn = pymysql.connect(**DB_CONFIG)
        return conn
    except Error as e:
        logger.error(f"❌ 数据库连接失败: {e}")
        return None


def create_company_detail_table():
    """创建公司详细信息数据表"""
    conn = get_db_connection()
    if not conn:
        return False

    try:
        with conn.cursor() as cursor:
            create_table_sql = """
            CREATE TABLE IF NOT EXISTS company_detail_analysis (
                id INT AUTO_INCREMENT PRIMARY KEY,
                stock_code VARCHAR(20) NOT NULL,
                company_name VARCHAR(255) NOT NULL,
                establishment_date VARCHAR(100),
                manager VARCHAR(255),
                manager_introduction TEXT,
                registered_address TEXT,
                company_type VARCHAR(100),
                primary_industry VARCHAR(100),
                secondary_industry VARCHAR(100),
                tertiary_industry VARCHAR(100),
                business_scope TEXT,
                main_business TEXT,
                violation_time VARCHAR(100),
                violation_type VARCHAR(200),
                violation_description TEXT,
                listing_date VARCHAR(100),
                controlling_shareholder VARCHAR(255),
                business_development_analysis TEXT,
                financial_condition_analysis TEXT,
                profit_model TEXT,
                moat TEXT,
                customer_group TEXT,
                competitors TEXT,
                business_area TEXT,
                subsidiaries TEXT,
                core_products_services TEXT,
                related_upstream_downstream_listed_companies TEXT,
                main_factors_affecting_operation TEXT,
                former_names TEXT,
                csrc_warning_letters TEXT,
                major_shareholder_reduction TEXT,
                company_goodwill TEXT,
                goodwill_scale VARCHAR(100),
                asset_provision_information TEXT,
                position_in_subdivision_industry TEXT,
                debt_ratio_5years TEXT,
                revenue_5years TEXT,
                deducted_net_profit_5years TEXT,
                gross_profit_margin_5years TEXT,
                same_industry_market_value_ranking TEXT,
                dominant_products_services_in_market TEXT,
                company_highlights TEXT,
                company_risk_points TEXT,
                themes_concepts TEXT,
                what_company_does TEXT,
                major_customers TEXT,
                stock_price_surge_periods TEXT,
                performance_indicators_during_surge TEXT,
                is_cyclical VARCHAR(10),
                stock_price_characteristics TEXT,
                analysis_time DATETIME DEFAULT CURRENT_TIMESTAMP,
                batch_id INT,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                INDEX idx_stock_code (stock_code),
                INDEX idx_primary_industry (primary_industry),
                INDEX idx_batch_id (batch_id)
            ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """
            cursor.execute(create_table_sql)
        conn.commit()
        logger.info("✅ 公司详细信息数据表创建/检查完成")
        return True
    except Exception as e:
        logger.error(f"❌ 创建数据表失败: {e}")
        return False
    finally:
        conn.close()


def get_stock_basic_info(limit=100, offset=0):
    """
    获取a_stock_basic_info表中的股票基本信息
    """
    try:
        connection = pymysql.connect(**DB_CONFIG)
        with connection.cursor(pymysql.cursors.DictCursor) as cursor:
            # 排除已经分析过的公司
            # SELECT
            # a.id, a.code, a.name
            # FROM
            # a_stock_basic_info
            # a
            # WHERE
            # a.code
            # NOT
            # IN(SELECT
            # stock_code
            # FROM
            # company_detail_analysis)
            # # AND a.code NOT LIKE '9%%'
            # ORDER
            # BY
            # a.id
            # LIMIT % s
            # OFFSET % s
            sql = """
            SELECT a.id, a.code, a.name  
            FROM a_stock_basic_info a 
            WHERE a.code NOT IN (SELECT stock_code FROM company_detail_analysis)
            ORDER BY a.id 
            LIMIT %s OFFSET %s
            """
            cursor.execute(sql, (limit, offset))
            results = cursor.fetchall()

        logger.info(f"✅ 成功获取 {len(results)} 条股票记录 (offset: {offset})")
        return results

    except OperationalError as e:
        logger.error(f"❌ 数据库操作错误: {e}")
        return None
    except Exception as e:
        logger.error(f"❌ 查询失败: {e}")
        return None
    finally:
        if 'connection' in locals() and connection.open:
            connection.close()


def get_all_stock_codes(total_limit=5000):
    """
    获取所有需要处理的股票代码
    """
    all_stocks = []
    batch_size = 500
    offset = 0

    while len(all_stocks) < total_limit:
        try:
            stocks = get_stock_basic_info(limit=batch_size, offset=offset)
            if not stocks:
                break

            all_stocks.extend(stocks)
            offset += batch_size

            if len(stocks) < batch_size:
                break

        except Exception as e:
            logger.error(f"❌ 获取股票代码失败: {e}")
            break

    logger.info(f"📊 总共获取 {len(all_stocks)} 只股票")
    return all_stocks[:total_limit]


def analyze_company_detail(stock_data, batch_id, retry_count=3):
    """
    使用DeepSeek分析上市公司详细信息
    """
    stock_code = stock_data['code']
    company_name = stock_data['name']

    system_prompt = f"""
    你是一个专业的股票分析师，请详细分析以下上市公司，并以JSON格式返回分析结果。
    公司代码：{stock_code}，公司名称：{company_name}。

    请确保数据准确、客观，基于公开信息和行业常识进行分析。

    重要：请严格按照JSON格式返回，不要包含任何其他文本或说明。
    """

    user_prompt = f"""
    请详细分析股票代码：{stock_code}，公司名称：{company_name}，提供以下完整信息：

    请以JSON格式返回，包含以下字段：
    {{
        "stock_code": "股票代码",
        "company_name": "上市公司名称",
        "establishment_date": "成立日期",
        "manager": "管理者",
        "manager_introduction": "管理者介绍",
        "registered_address": "注册地址", 
        "company_type": "公司性质",
        "primary_industry": "一级行业",
        "secondary_industry": "二级行业",
        "tertiary_industry": "三级行业",
        "business_scope": "经营范围",
        "main_business": "主营业务",
        "violation_time": "违规时间",
        "violation_type": "违规性质", 
        "violation_description": "违规描述",
        "listing_date": "上市时间",
        "controlling_shareholder": "控股股东",
        "business_development_analysis": "业务发展分析",
        "financial_condition_analysis": "财务状况分析",
        "profit_model": "盈利模式",
        "moat": "护城河",
        "customer_group": "客户群体",
        "competitors": "竞争对手",
        "business_area": "业务区域",
        "subsidiaries": "子企业",
        "core_products_services": "核心产品或服务",
        "related_upstream_downstream_listed_companies": "关联上下游上市企业",
        "main_factors_affecting_operation": "经营状况主要影响因素",
        "former_names": "企业曾用名",
        "csrc_warning_letters": "被证监会发过警告函情况",
        "major_shareholder_reduction": "大股东减持情况",
        "company_goodwill": "公司商誉",
        "goodwill_scale": "商誉规模", 
        "asset_provision_information": "资产计提信息",
        "position_in_subdivision_industry": "细分行业中地位",
        "debt_ratio_5years": "近5年的负债率",
        "revenue_5years": "近5年的营业收入",
        "deducted_net_profit_5years": "近5年的扣费净利润", 
        "gross_profit_margin_5years": "近5年的毛利率",
        "same_industry_market_value_ranking": "同行业市值排名公司",
        "dominant_products_services_in_market": "市场中占主导地位的产品服务",
        "company_highlights": "公司亮点",
        "company_risk_points": "公司风险点",
        "themes_concepts": "题材/概念",
        "what_company_does": "公司干什么的",
        "major_customers": "主要客户",
        "stock_price_surge_periods": "股票大涨时间段",
        "performance_indicators_during_surge": "股票大涨时间段内业绩指标详细描述",
        "is_cyclical": "是否为周期股",
        "stock_price_characteristics": "此公司的股价变动特点"
    }}

    要求：
    1. 所有字段都必须提供
    2. 数值类数据请尽量提供具体数字和时间范围
    3. 分析要客观、专业
    4. 对于时间序列数据（如近5年数据），请按年份列出
    5. 请确保返回的是纯JSON格式，不要包含任何其他文本
    """

    messages = [
        {"role": "system", "content": system_prompt},
        {"role": "user", "content": user_prompt}
    ]

    for attempt in range(retry_count):
        try:
            logger.info(
                f"🔄 线程 {threading.current_thread().name} 正在分析 {stock_code} {company_name} (批次: {batch_id})...")

            # 随机延迟，避免并发请求过于集中
            time.sleep(3)

            response = client.chat.completions.create(
                model="deepseek-chat",
                messages=messages,
                response_format={'type': 'json_object'},
                timeout=200  # 增加超时时间
            )

            result_content = response.choices[0].message.content

            # 清理响应内容，确保是纯JSON
            result_content = result_content.strip()
            if result_content.startswith('```json'):
                result_content = result_content[7:]
            if result_content.endswith('```'):
                result_content = result_content[:-3]
            result_content = result_content.strip()

            result = json.loads(result_content)

            # 验证必要字段
            if 'stock_code' not in result:
                result['stock_code'] = stock_code
            if 'company_name' not in result:
                result['company_name'] = company_name

            logger.info(f"✅ {stock_code} {company_name} 分析完成")
            return result

        except json.JSONDecodeError as e:
            logger.error(f"❌ {stock_code} {company_name} JSON解析失败 (尝试 {attempt + 1}/{retry_count}): {e}")
            if attempt == retry_count - 1:
                logger.error(f"原始响应: {response.choices[0].message.content if 'response' in locals() else '无响应'}")
                # 返回包含基本信息的空结果
                return {
                    "stock_code": stock_code,
                    "company_name": company_name,
                    "analysis_time": datetime.now().isoformat(),
                    "batch_id": batch_id
                }
            time.sleep(3)  # 重试前等待

        except Exception as e:
            logger.error(f"❌ {stock_code} {company_name} AI分析失败 (尝试 {attempt + 1}/{retry_count}): {e}")
            if attempt == retry_count - 1:
                # 返回包含基本信息的空结果
                return {
                    "stock_code": stock_code,
                    "company_name": company_name,
                    "analysis_time": datetime.now().isoformat(),
                    "batch_id": batch_id
                }
            time.sleep(3)  # 重试前等待


def save_company_detail_to_database(company_data, batch_id):
    """
    将公司详细信息保存到数据库
    """
    conn = get_db_connection()
    if not conn:
        return False

    try:
        with conn.cursor() as cursor:
            insert_sql = """
            INSERT INTO company_detail_analysis 
            (stock_code, company_name, establishment_date, manager, manager_introduction, 
             registered_address, company_type, primary_industry, secondary_industry, tertiary_industry,
             business_scope, main_business, violation_time, violation_type, violation_description,
             listing_date, controlling_shareholder, business_development_analysis, financial_condition_analysis,
             profit_model, moat, customer_group, competitors, business_area, subsidiaries,
             core_products_services, related_upstream_downstream_listed_companies, main_factors_affecting_operation,
             former_names, csrc_warning_letters, major_shareholder_reduction, company_goodwill, goodwill_scale,
             asset_provision_information, position_in_subdivision_industry, debt_ratio_5years, revenue_5years,
             deducted_net_profit_5years, gross_profit_margin_5years, same_industry_market_value_ranking,
             dominant_products_services_in_market, company_highlights, company_risk_points, themes_concepts,
             what_company_does, major_customers, stock_price_surge_periods, performance_indicators_during_surge,
             is_cyclical, stock_price_characteristics, batch_id)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
                    %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
                    %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s)
            """

            # 创建参数元组 - 确保所有值都是字符串类型，并限制长度
            def safe_string(value, max_length=1000):
                """安全处理字符串，防止过长或异常值"""
                if value is None:
                    return "暂无信息"
                str_value = str(value)
                # 检查是否包含SQL语句或其他异常内容
                if any(sql_keyword in str_value.upper() for sql_keyword in
                       ['CREATE TABLE', 'DROP TABLE', 'INSERT INTO', 'SELECT *']):
                    logger.warning(f"⚠️ 检测到异常内容，已清理: {str_value[:100]}...")
                    return "数据异常，已清理"
                # 限制长度
                if len(str_value) > max_length:
                    return str_value[:max_length] + "..."
                return str_value

            params = (
                safe_string(company_data.get('stock_code', ''), 20),
                safe_string(company_data.get('company_name', ''), 255),
                safe_string(company_data.get('establishment_date', '暂无信息'), 100),
                safe_string(company_data.get('manager', '暂无信息'), 255),
                safe_string(company_data.get('manager_introduction', '暂无信息'), 4000),
                safe_string(company_data.get('registered_address', '暂无信息'), 1000),
                safe_string(company_data.get('company_type', '暂无信息'), 100),
                safe_string(company_data.get('primary_industry', '暂无信息'), 100),
                safe_string(company_data.get('secondary_industry', '暂无信息'), 100),
                safe_string(company_data.get('tertiary_industry', '暂无信息'), 100),
                safe_string(company_data.get('business_scope', '暂无信息'), 4000),
                safe_string(company_data.get('main_business', '暂无信息'), 4000),
                safe_string(company_data.get('violation_time', '暂无信息'), 100),
                safe_string(company_data.get('violation_type', '暂无信息'), 200),
                safe_string(company_data.get('violation_description', '暂无信息'), 4000),
                safe_string(company_data.get('listing_date', '暂无信息'), 100),
                safe_string(company_data.get('controlling_shareholder', '暂无信息'), 255),
                safe_string(company_data.get('business_development_analysis', '暂无信息'), 4000),
                safe_string(company_data.get('financial_condition_analysis', '暂无信息'), 4000),
                safe_string(company_data.get('profit_model', '暂无信息'), 4000),
                safe_string(company_data.get('moat', '暂无信息'), 4000),
                safe_string(company_data.get('customer_group', '暂无信息'), 4000),
                safe_string(company_data.get('competitors', '暂无信息'), 4000),
                safe_string(company_data.get('business_area', '暂无信息'), 4000),
                safe_string(company_data.get('subsidiaries', '暂无信息'), 4000),
                safe_string(company_data.get('core_products_services', '暂无信息'), 4000),
                safe_string(company_data.get('related_upstream_downstream_listed_companies', '暂无信息'), 4000),
                safe_string(company_data.get('main_factors_affecting_operation', '暂无信息'), 4000),
                safe_string(company_data.get('former_names', '暂无信息'), 4000),
                safe_string(company_data.get('csrc_warning_letters', '暂无信息'), 4000),
                safe_string(company_data.get('major_shareholder_reduction', '暂无信息'), 4000),
                safe_string(company_data.get('company_goodwill', '暂无信息'), 4000),
                safe_string(company_data.get('goodwill_scale', '暂无信息'), 100),
                safe_string(company_data.get('asset_provision_information', '暂无信息'), 4000),
                safe_string(company_data.get('position_in_subdivision_industry', '暂无信息'), 4000),
                safe_string(company_data.get('debt_ratio_5years', '暂无信息'), 4000),
                safe_string(company_data.get('revenue_5years', '暂无信息'), 4000),
                safe_string(company_data.get('deducted_net_profit_5years', '暂无信息'), 4000),
                safe_string(company_data.get('gross_profit_margin_5years', '暂无信息'), 4000),
                safe_string(company_data.get('same_industry_market_value_ranking', '暂无信息'), 4000),
                safe_string(company_data.get('dominant_products_services_in_market', '暂无信息'), 4000),
                safe_string(company_data.get('company_highlights', '暂无信息'), 4000),
                safe_string(company_data.get('company_risk_points', '暂无信息'), 4000),
                safe_string(company_data.get('themes_concepts', '暂无信息'), 4000),
                safe_string(company_data.get('what_company_does', '暂无信息'), 4000),
                safe_string(company_data.get('major_customers', '暂无信息'), 4000),
                safe_string(company_data.get('stock_price_surge_periods', '暂无信息'), 4000),
                safe_string(company_data.get('performance_indicators_during_surge', '暂无信息'), 4000),
                safe_string(company_data.get('is_cyclical', '暂无信息'), 10),
                safe_string(company_data.get('stock_price_characteristics', '暂无信息'), 4000),
                batch_id
            )
            cursor.execute(insert_sql, params)
            conn.commit()
            logger.info(f"💾 {company_data.get('stock_code')} {company_data.get('company_name')} 数据保存成功")
            return True

    except Exception as e:
        logger.error(f"❌ 数据保存失败 {company_data.get('stock_code')}: {e}")
        conn.rollback()
        return False
    finally:
        conn.close()


def process_single_company(stock_data, batch_id):
    """
    处理单个公司的完整流程
    """
    global processed_count, success_count, failed_count

    try:
        # AI分析
        analysis_result = analyze_company_detail(stock_data, batch_id)

        # 保存到数据库
        if save_company_detail_to_database(analysis_result, batch_id):
            with lock:
                success_count += 1
        else:
            with lock:
                failed_count += 1

        with lock:
            processed_count += 1

        # 进度报告
        if processed_count % 10 == 0:
            logger.info(f"📊 进度报告: 已处理 {processed_count} 家，成功 {success_count} 家，失败 {failed_count} 家")

        return True

    except Exception as e:
        logger.error(f"❌ 处理 {stock_data.get('code')} 失败: {e}")
        with lock:
            processed_count += 1
            failed_count += 1
        return False


def worker(worker_id, batch_id):
    """
    工作线程函数
    """
    logger.info(f"👷 工作线程 {worker_id} 启动")

    while True:
        try:
            # 获取任务，设置超时时间
            stock_data = task_queue.get(timeout=30)
            if stock_data is None:  # 结束信号
                break

            # 处理任务
            process_single_company(stock_data, batch_id)
            task_queue.task_done()

        except queue.Empty:
            logger.info(f"🛑 工作线程 {worker_id} 超时退出")
            break
        except Exception as e:
            logger.error(f"❌ 工作线程 {worker_id} 异常: {e}")
            continue

    logger.info(f"🏁 工作线程 {worker_id} 结束")


def batch_process_companies_multithread(total_limit=100, batch_size=20, num_workers=5):
    """
    多线程批量处理公司数据
    """
    logger.info(f"🚀 开始多线程批量处理 {total_limit} 条数据，工作线程数: {num_workers}")

    # 创建数据表
    if not create_company_detail_table():
        logger.error("❌ 数据表创建失败，程序退出")
        return

    # 获取所有股票数据
    all_stocks = get_all_stock_codes(total_limit)
    if not all_stocks:
        logger.error("❌ 未获取到股票数据，程序退出")
        return

    # 将任务放入队列
    for stock in all_stocks:
        task_queue.put(stock)

    total_tasks = len(all_stocks)
    logger.info(f"📦 总任务数: {total_tasks}，已加入任务队列")

    # 创建并启动工作线程
    threads = []
    batch_id = 1  # 可以按需分配批次ID

    start_time = time.time()

    for i in range(num_workers):
        thread = threading.Thread(
            target=worker,
            args=(i + 1, batch_id),
            name=f"Worker-{i + 1}",
            daemon=True
        )
        thread.start()
        threads.append(thread)
        # 为避免同时发起大量请求，可以稍微错开线程启动时间
        time.sleep(3)

    # 等待所有任务完成
    try:
        task_queue.join()
        logger.info("✅ 所有任务已完成")

        # 发送结束信号给工作线程
        for _ in range(num_workers):
            task_queue.put(None)

        # 等待所有线程结束
        for thread in threads:
            thread.join(timeout=20)

    except KeyboardInterrupt:
        logger.info("⏹️ 用户中断处理，正在清理...")
        # 清空队列
        while not task_queue.empty():
            try:
                task_queue.get_nowait()
                task_queue.task_done()
            except queue.Empty:
                break

    # 计算统计信息
    end_time = time.time()
def check_existing_data():
    """
    检查已存在的数据
    """
    conn = get_db_connection()
    if not conn:
        return set()

    try:
        with conn.cursor() as cursor:
            cursor.execute("SELECT DISTINCT stock_code FROM company_detail_analysis")
            existing_codes = {row[0] for row in cursor.fetchall()}
            logger.info(f"📊 数据库中已有 {len(existing_codes)} 家公司详细信息")
            return existing_codes
    except Exception as e:
        logger.error(f"❌ 检查现有数据失败: {e}")
        return set()
    finally:
        conn.close()


def main():
    """
    主函数
    """
    start_time = time.time()

    try:
        # 检查现有数据
        existing_codes = check_existing_data()
        if existing_codes:
            logger.info(f"📝 已分析 {len(existing_codes)} 家公司，将跳过这些公司")

        # 开始多线程批量处理
        # 参数说明: total_limit=总处理数量, batch_size=批次大小(这里用于逻辑分组), num_workers=工作线程数
        batch_process_companies_multithread(
            total_limit=128,  # 先处理100家测试
            batch_size=10,  # 逻辑批次大小
            num_workers=4  # 工作线程数，根据API限制调整
        )

        # 计算总耗时
        end_time = time.time()
        total_time = end_time - start_time
        logger.info(f"🎉 所有处理完成！总耗时: {total_time:.2f} 秒 ({total_time / 60:.2f} 分钟)")

    except KeyboardInterrupt:
        logger.info("⏹️ 用户中断处理")
    except Exception as e:
        logger.error(f"💥 程序执行失败: {e}")
    finally:
        logger.info("🏁 程序执行结束")


if __name__ == '__main__':
    main()