import time,re
import pandas as pd
import redis
import pickle
import logging
import hashlib
import configparser
import datetime
from sqlalchemy import create_engine, DateTime, String
import pymysql
import traceback

tb_info = traceback.format_exc()
pymysql.install_as_MySQLdb()


log_format = "%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s"
date_format = "%Y-%m-%d %H:%M:%S"  # 精确到秒
logging.basicConfig(level=logging.DEBUG, format=log_format, datefmt=date_format)

# 初始化配置解析器
config = configparser.ConfigParser()

import os
current_dir = os.path.dirname(os.path.abspath(__file__))
config.read(current_dir+'/config.ini')


# 获取Redis的配置信息
redis_host = config.get('Redis', 'host')
# redis_host = "192.168.249.10"

redis_port = config.getint('Redis', 'port')
redis_db = config.getint('Redis', 'db')
redis_password = config.get('Redis', 'password')
r = redis.Redis(host=redis_host, port=redis_port, db=redis_db, password=redis_password)

mysql_port = config.getint('mysql', 'port')
mysql_host = config.get('mysql', 'host')
mysql_db = config.get('mysql', 'db')
import urllib.parse
mysql_password = urllib.parse.quote(config.get('mysql', 'password'))
mysql_user = config.get('mysql', 'user')
db_url = f'mysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_db}'

engine = create_engine(db_url,pool_size=20,max_overflow=20,pool_recycle=60)

def set_zt(row):
    # 如果code以30或68开头，并且change大于19.9，则zt为1
    if (row['code'].startswith('30') or row['code'].startswith('68')) and row['change'] >= 19.9:
        return 1
    # 如果change大于9.9，则zt为1
    elif row['change'] >= 9.9 and (row['code'].startswith('60') or row['code'].startswith('00')):
        return 1
    # 其他情况，zt为0
    else:
        return 0

def get_brand_change(query_date):

    if r.get("is_trade_time") ==b"NO":
        return

    stock_fupan_df = pickle.loads(r.get(f"stock_panqian:{query_date}"))
    real_stock_info_tdx = pickle.loads(r.get("real_stock_info_tdx"))

    stock_fupan_df['所属同花顺行业'] = stock_fupan_df['所属同花顺行业'].str.split('-')
    stock_fupan_df['所属同花顺行业'] = stock_fupan_df['所属同花顺行业'].apply(lambda x: x[-1] if x else None)

    stock_fupan_df_tdx = pd.merge(stock_fupan_df, real_stock_info_tdx, left_on='股票代码', right_on="code", how='inner')
    #统计涨停的数量
    stock_fupan_df_tdx['zt'] = stock_fupan_df_tdx.apply(set_zt, axis=1)
    zt_count = stock_fupan_df_tdx.groupby('所属同花顺行业')['zt'].sum().reset_index()

    average_change = stock_fupan_df_tdx.groupby('所属同花顺行业')['change'].mean().reset_index()
    average_change = average_change.sort_values(by='change', ascending=False)


    stock_fupan_df_tdx["绝对涨幅"] = round((stock_fupan_df_tdx["price"] - stock_fupan_df_tdx["open"])/stock_fupan_df_tdx["last_close"]*100,2)
    average_change_dl = stock_fupan_df_tdx.groupby('所属同花顺行业')['绝对涨幅'].mean().reset_index()
    average_change_dl = average_change_dl.sort_values(by='绝对涨幅', ascending=False)

    average_change_count = stock_fupan_df_tdx.groupby('所属同花顺行业')['绝对涨幅'].count().reset_index().rename(
        columns={'绝对涨幅': '行业个数'})

    rdf = pd.merge(average_change, average_change_dl, on='所属同花顺行业', how='inner')
    rdf = pd.merge(rdf, average_change_count, on='所属同花顺行业', how='inner')
    rdf = rdf.round(2)

    #获取涨停家数
    rdf = pd.merge(rdf, zt_count, on='所属同花顺行业', how='inner')

    # rdf = rdf[["name", "avr_change", "avr_change_hs", "count", "zt_count"]]
    rdf.columns = ["name", "avr_change", "avr_change_hs", "count","zt_count"]



    rdf['timestamp']  = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')

    # rdf = rdf.query("count > 5")

    rdf = rdf.sort_values(by='avr_change', ascending=True)
    rdf.reset_index(drop=True, inplace=True)
    rdf['avr_change_rank'] = rdf.index + 1
    rdf['avr_change_rank'] = round(rdf['avr_change_rank']/len(rdf)*100,2)

    rdf = rdf.sort_values(by='avr_change_hs', ascending=True)
    rdf.reset_index(drop=True, inplace=True)
    rdf['avr_change_hs_rank'] = rdf.index + 1
    rdf['avr_change_hs_rank'] = round(rdf['avr_change_hs_rank'] / len(rdf) * 100, 2)


    r.set("real_market_brand_index_tdx", pickle.dumps(rdf))
    r.expire("real_market_brand_index_tdx", 3600 * 8)

    logging.info(f"插入行业数据{len(rdf)}条")
    rdf.to_sql("real_market_brand_index_tdx", engine, if_exists='append', index=False,
              dtype={'timestamp': DateTime(), 'code': String(length=8)})

    return rdf


if __name__ == "__main__":
    query_date = datetime.datetime.now().strftime('%Y%m%d')
    while True:
        try:
            start_time = time.time()
            get_brand_change(query_date)
            end_time = time.time()
            elapsed_time = end_time - start_time
            sleep_time = max(1 - elapsed_time, 0)
            time.sleep(sleep_time)
        except Exception as e:
            # logging.error(e)
            logging.error(f"An error occurred: {e}\nTraceback info:\n{tb_info}")
            time.sleep(1)
