import time,re
import pandas as pd
import redis
import pickle
import logging
import hashlib
import configparser
import datetime
from sqlalchemy import create_engine, DateTime, String
import pymysql


pymysql.install_as_MySQLdb()


log_format = "%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s"
date_format = "%Y-%m-%d %H:%M:%S"  # 精确到秒
logging.basicConfig(level=logging.DEBUG, format=log_format, datefmt=date_format)

# 初始化配置解析器
config = configparser.ConfigParser()

import os
current_dir = os.path.dirname(os.path.abspath(__file__))
config.read(current_dir+'/config.ini')


# 获取Redis的配置信息
redis_host = config.get('Redis', 'host')
# redis_host = "192.168.249.10"

redis_port = config.getint('Redis', 'port')
redis_db = config.getint('Redis', 'db')
redis_password = config.get('Redis', 'password')
r = redis.Redis(host=redis_host, port=redis_port, db=redis_db, password=redis_password)

mysql_port = config.getint('mysql', 'port')
mysql_host = config.get('mysql', 'host')
mysql_db = config.get('mysql', 'db')
import urllib.parse
mysql_password = urllib.parse.quote(config.get('mysql', 'password'))
mysql_user = config.get('mysql', 'user')
db_url = f'mysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_db}'

engine = create_engine(db_url,pool_size=20,max_overflow=20,pool_recycle=60)


def calculate_md5(row):
    row_str = ''.join(str(row[col]) for col in row.index)
    return hashlib.md5(row_str.encode('utf-8')).hexdigest()

def get_md5_summed_column(df):
    df_md5 = df.apply(calculate_md5, axis=1)
    df['md5'] = df_md5
    df['shifted_1'] = df_md5.shift(1)
    df['shifted_2'] = df_md5.shift(2)
    df['summed'] = df['md5'] + df['shifted_1'].fillna('') + df['shifted_2'].fillna('')
    return df

def get_new_entries(df_old, df_new):
    df_old = get_md5_summed_column(df_old)
    df_new = get_md5_summed_column(df_new)
    
    last_summed_value = df_old.iloc[-1]['summed']
    index = df_new.index[df_new['summed'] == last_summed_value]
    
    if not index.empty:
        df_new = df_new.loc[index[0] + 1:]
        return df_new[['trade_time', 'stock_name', 'trade_type', 'remark']] 
    else:
        return pd.DataFrame()

pre_text_value_df= pd.DataFrame()
pre_bankuai_text_value_df= pd.DataFrame()


# def get_increase_item():
    
        
while True:
    try:
        # 阻塞读取队列消息
        queue, message = r.brpop("dxjl_queue_ori")
        res = message.decode('gbk')
        res = res.replace(" ", "")
        # print(res)
        # res = res.replace("短线精灵","")
        
        pattern = re.compile(
        r'(\d{2}:\d{2}:\d{2})'
        r'(.{1,30}?)'
        r'(特大主动买|涨停价挂买|打开跌停板|特大被动买|特大被动卖|特大挂买|特大挂卖|封涨停板|封跌停板|强势封跌停|特大主动卖|笼子触涨停|急速拉升|强势封涨停|逼近涨停|打开涨停板|区间放量涨|有个股涨停|有个股跌停|急速上涨|主力急入)'
        r'(.+?)(?=\d{2}:\d{2}:\d{2}|$)')
        
        current_dict = pattern.findall(res)
        currnt_df = pd.DataFrame(current_dict, columns=['trade_time', 'stock_name', 'trade_type', 'remark'])
        
        
        currnt_df['trade_type'] = currnt_df['trade_type'].str.normalize("NFKD")
        currnt_df['stock_name'] = currnt_df['stock_name'].str.normalize("NFKD")
        currnt_df['trade_time'] = currnt_df['trade_time'].str.normalize("NFKD")
        currnt_df['remark'] = currnt_df['remark'].str.normalize("NFKD")
        
        df_huoyue = currnt_df[currnt_df["trade_type"].isin(['封涨停板', '封跌停板', '强势封涨停', '强势封跌停', '逼近涨停'])]
        for _, row in df_huoyue.iterrows():
            json_data = row.to_json()
            r.rpush('huoyue_stock_queue', json_data)
                
        
        if len(currnt_df)==0 or currnt_df.empty:
            continue
        
        trade_type = "个股"
        if currnt_df['trade_type'].isin(['有个股涨停', '有个股跌停', '急速上涨', '主力急入']).any():
            trade_type = "板块"
        
        if pre_text_value_df.empty and trade_type == "个股":
            pre_text_value_df = currnt_df.copy()
            
        if pre_bankuai_text_value_df.empty and trade_type == "板块":
            pre_bankuai_text_value_df = currnt_df.copy()
        
        
            
        increase_df = pd.DataFrame()
        try:
            if trade_type == "个股":
                increase_df = get_new_entries(pre_text_value_df.copy(), currnt_df.copy())
                
            else:
                increase_df = get_new_entries(pre_bankuai_text_value_df.copy(), currnt_df.copy())
                
        except Exception as e:
            logging.warning(e)
        
        # if len(increase_df)==0 or increase_df.empty:
        #     continue
        
        #筛选掉一些不要的记录
        if len(increase_df)>0:
            exclude_keywords = ["股权转让","安徽","传媒","密集调研","四川","共同富裕示范区","特色小镇","方舱医院","参股保险","租售同权","吉林","抗原检测","昨日ST首板股表现","ST板块","太赫兹","饰品","民营医院",
                                "恒大概念","俄乌冲突概念","其他电子","粤港澳大湾区","基础建设","宁德时代概念","天津自贸区","深圳","福建","重庆","浙江","江苏","北京","国企改革","高市盈率","高市净率","C2M概念","新冠检测",
                                "参股银行","壳资源" ,"专精特新","2021年送转填权概念","幽门螺杆菌概念","体外诊断","电机","工业4.0","软件开发","安防","统一大市场","长三角一体化","智慧城市","机床工具","军民融合",
                                "参股新三板","陕西","湖北","蚂蚁金服概念","海南","乘用车","贵州","创投","土地流转","快手概念","口罩","其他专用设备","啤酒","啤酒概念","消毒剂","专用设备","高端装备","元宇宙",
                                "电子身份证","互联网保险","其他黑色家电","黑色家电","商用载货车","江西","供应链金融","零售","汽车热管理","碳中和","影视院线","百度概念","大数据","大数据","重组蛋白","独角兽概念",
                                "昨日ST连板股表现","广东(除深圳)","广告营销","网络直播","物业管理","食品安全","超清视频","商业物业经营","燃气","计算机应用","智能物流","网络游戏","物联网","通用设备","边缘计算",
                                "智能制造","车联网","室外经济","露营经济","三季报预增","MiniLED","MicroLED概念","半导体及元件","分立器件","工业金属","第三代半导体","建筑节能","新疆振兴","石油加工","集成电路概念",
                                "非金属材料","医疗器械","家庭医生","智能交通","智慧政务","腾讯概念","腾讯概念","电力设备","新能源汽车","电子化学品","智能医疗","物流","工业互联网","阿里巴巴概念","社区团购",
                                "噪声防治","新能源发电","云计算","数据中心","国资云","光学光电子","节能照明","科创次新股","半导体材料","乡村振兴","供销社","拼多多概念","基金重仓股","自动化设备","互联网彩票","有线电视网络",
                                "数字乡村","其他金属新材料","金属新材料","印制电路板","胎压监测","智能座舱","新型工业化","通信终端及配件","广西","数据确权","电气自控设备","通信设备","其他交运设备","传感器","两轮车",
                                "国家大基金持股","天津","工程咨询服务","非汽车交运","通用航空","湖南","期货概念","数字孪生","IT服务","电子商务","包装印刷","包装","工控设备","光伏建筑一体化","云办公","东数西算(算力)",
                                "人民币贬值受益","PCB概念","新冠治疗","新冠特效药","肝炎概念","手机游戏","语音技术","煤炭开采","成飞概念","印刷","电子纸","摘帽","消费电子零部件及组装","消费电子","F5G概念","新材料概念",
                                "通信线缆及配套","雄安新区","美容护理","毛发医疗","ChatGPT概念","机器人","区块链","特斯拉","汽车电子","超级电容","农村电商","京津冀一体化","人脸识别","柔性屏","国产操作系统",
                                "标普道琼斯A股", "比亚迪概念","最近多板","沪深300", "减持新规(不可减持)","微盘股","大盘股","融资融券","中证500","上证380","'ST板块","社保新进","山东","同花顺漂亮100","证金持股",
                                "近期新高","MSCI概念","沪股通","百日新高","近期强势","综合","深股通","同花顺情绪指数","西藏","核污染防治","冷链物流","杭州亚运会","浦东","服装","网红经济","基因测序","健康中国"]  # 添加所有需要排除的关键词
            pattern = '|'.join(f'(?:{re.escape(keyword)})' for keyword in exclude_keywords)
            increase_df = increase_df[~increase_df['stock_name'].str.contains(pattern)]
        
        if trade_type == "个股":
            if len(increase_df)>0:
                logging.info(f"新增个股{len(increase_df)}条记录: {increase_df['stock_name'].values}")
            pre_text_value_df = currnt_df.copy()
        else:
            if len(increase_df)>0:
                logging.info(f"新增板块{len(increase_df)}条记录:{increase_df['stock_name'].values}")
            pre_bankuai_text_value_df = currnt_df.copy()
    
        
        if len(increase_df)==0 or increase_df.empty:
            continue
        
        

        
        
        increase_df['timestamp']  = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
        
        bjzj_df = increase_df[increase_df["trade_type"].isin(["逼近涨停","笼子触涨停"])]
        if len(bjzj_df)>0:
            stock_list = list(bjzj_df["stock_name"].values)
            for v in stock_list:
                r.lpush('bjzt_channel', v)
                r.lpush('bjzt_ani_channel', v)
        
        try:
            increase_df.to_sql("real_market_info_dxjl", engine, if_exists='append', index=False, dtype={'timestamp': DateTime()})
        except Exception as e:
            logging.warning(e)
        
        
    except redis.ConnectionError:
        # 处理连接错误
        logging.info("Connection lost. Reconnecting...")
        r = redis.Redis(host=redis_host, port=redis_port, db=redis_db, password=redis_password)
        time.sleep(1)
    except KeyboardInterrupt:
        # 处理用户中断程序
        logging.info("Interrupted by user")
        break