import time,re
import pandas as pd
import redis
import pickle
import logging
import pywencai
import akshare as ak
import pandas as pd
import configparser
import datetime
from sqlalchemy import create_engine
import pymysql
import os
import traceback
from send_email import sendMessage
from 个股行业与概念涨幅分析 import get_brand_and_concept_rank



pymysql.install_as_MySQLdb()


log_format = "%(asctime)s - %(levelname)s - %(process)d - %(filename)s:%(lineno)d - %(message)s"
date_format = "%Y-%m-%d %H:%M:%S"  # 精确到秒
logging.basicConfig(level=logging.DEBUG, format=log_format, datefmt=date_format)

pid = os.getpid()
query_date = datetime.datetime.now().strftime('%Y%m%d')

# 日志文件路径
log_file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), f'log/shouban/{pid}.log')

# 创建一个 handler，用于写入日志文件
file_handler = logging.FileHandler(log_file_path)
file_handler.setFormatter(logging.Formatter(log_format, date_format))
# 添加 handler 到 logger
logging.getLogger().addHandler(file_handler)

# 初始化配置解析器
config = configparser.ConfigParser()

# 读取配置文件
import os
current_dir = os.path.dirname(os.path.abspath(__file__))
config.read(current_dir+'/config.ini', encoding='utf-8')


# 获取Redis的配置信息
redis_host = config.get('Redis', 'host')
redis_port = config.getint('Redis', 'port')
redis_db = config.getint('Redis', 'db')
redis_password = config.get('Redis', 'password')
r = redis.Redis(host=redis_host, port=redis_port, db=redis_db, password=redis_password)

mysql_port = config.getint('mysql', 'port')
mysql_host = config.get('mysql', 'host')
mysql_db = config.get('mysql', 'db')
import urllib.parse
mysql_password = urllib.parse.quote(config.get('mysql', 'password'))
mysql_user = config.get('mysql', 'user')
db_url = f'mysql://{mysql_user}:{mysql_password}@{mysql_host}:{mysql_port}/{mysql_db}'

engine = create_engine(db_url,pool_size=20,max_overflow=20,pool_recycle=60)

# query_date ='20240429'
#
# stock_panqian = r.get(f"stock_panqian:20240426")
# r.set(f"stock_panqian:{query_date}",stock_panqian)
# r.expire(f"stock_panqian:{query_date}",3600)

def get_pre_trade_date_n(trade_date,n):
    tool_trade_date_hist_sina_df = ak.tool_trade_date_hist_sina()
    date_df = tool_trade_date_hist_sina_df[tool_trade_date_hist_sina_df["trade_date"] < pd.Timestamp(trade_date).date()]
    date_df = date_df.sort_values(by="trade_date", ascending=False)
    pre_trade_date = date_df["trade_date"].values[n-1]
    return pre_trade_date.strftime("%Y%m%d")

def stock_rps(query_date):
    pre_20_day = get_pre_trade_date_n(query_date,20)
    sql = f'''
        SELECT 股票简称, MAX(`rps20`) AS rps
        FROM stock_rps
        WHERE trade_date >= '{pre_20_day}' and trade_date<='{query_date}'
        GROUP BY 股票简称;
        '''
    df_rps = pd.read_sql(sql, engine)
    # stock_rps_list = df_rps["股票简称"].values
    r.set("stock_rps",pickle.dumps(df_rps))

def refresh_tdx(query_time):
    sql = f'''
        select * from stock.real_market_info_tdx_h where `timestamp` ='{query_time}'
        '''
    print(sql)
    real_market_info_tdx_df = pd.read_sql(sql, engine)
    if len(real_market_info_tdx_df) > 0:
        r.set("real_stock_info_tdx",pickle.dumps(real_market_info_tdx_df))


    sql = f'''
        select * from stock.real_market_concept_index_tdx_h rmcith where `timestamp` ='{query_time}'
        '''
    print(sql)
    concept_index_df = pd.read_sql(sql, engine)
    if len(concept_index_df) > 0:
        r.set("real_market_concept_index_tdx",pickle.dumps(concept_index_df))

    sql = f'''
        select * from stock.real_market_brand_index_tdx_h rmcith where `timestamp` ='{query_time}'
        '''
    print(sql)
    concept_index_df = pd.read_sql(sql, engine)
    if len(concept_index_df) > 0:
        r.set("real_market_brand_index_tdx",pickle.dumps(concept_index_df))

    sql = f'''
        select * from stock.real_market_info_dxjl_h  where `timestamp` ='{query_time}'
        '''

    print(sql)
    bjzj_df = pd.read_sql(sql, engine)
    if len(bjzj_df)>0:
        print(bjzj_df)
        stock_list = list(bjzj_df["stock_name"].values)
        for v in stock_list:
            r.lpush('bjzt_channel', v)

        # r.lpush('bjzt_channel', pickle.dumps(bjzj_df))


def get_time(start_time_str,end_time_str):
    from datetime import datetime, timedelta
    start_time = datetime.strptime(start_time_str, "%Y-%m-%d %H:%M:%S")
    end_time = datetime.strptime(end_time_str, "%Y-%m-%d %H:%M:%S")
# 当前时间初始化为开始时间
    current_time = start_time
    # List to hold all times
    times_list = []

    # Loop to generate times from start to end at the specified interval
    while current_time <= end_time:
        times_list.append(current_time.strftime("%Y-%m-%d %H:%M:%S"))
        current_time += timedelta(seconds=1)
    return times_list

if __name__ == "__main__":
    start_time = "2024-04-26 09:34:45"
    end_time = "2024-04-26 09:35:45"

    datetime_obj = datetime.datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
    formatted_date = datetime_obj.strftime("%Y%m%d")


    stock_rps(formatted_date)

    time_list =get_time(start_time,end_time)
    for v in time_list:
        start_time = time.time()
        refresh_tdx(v)
        end_time = time.time()
        elapsed_time = end_time - start_time
        sleep_time = max(1 - elapsed_time, 0)
        time.sleep(sleep_time)



# r.publish('stock_buy', "002284")
