import time
from langchain.callbacks import StreamingStdOutCallbackHandler 
from utils.upload_about import set_files_msg
from utils.do_db import set_today_visit
class MyCallbackHandler(StreamingStdOutCallbackHandler):
    cache=True
    end = False
    tokens=[]
    def __init__(self, isStream,is_file_msg,user_id,type_name):
        print('user_id=',user_id)
        self.isStream = isStream
        self.is_file_msg = is_file_msg
        self.user_id = user_id
        self.type_name = type_name
    def on_llm_new_token(self, token: str, **kwargs) -> None:  
            print(f'++ {token} ++\n')
            self.cache = False   
            self.tokens.append(token)
           
    def on_llm_end(self, response, **kwargs) -> None:
        print('llm end = \n',response)
        id = response.generations[0][0].message.id
        if not self.tokens and self.cache:
            self.tokens = [response.generations[0][0].text]
            time.sleep(0.5)
            self.end = True
        else:
            self.end=True
        # 设置文件消息对应的id和用户
        id and set_files_msg(self.is_file_msg,id,self.user_id)
        name = self.type_name if  self.type_name=='goods_info' else 'ai_chat'
        # 请求成功 且是新的接口请求 就设置访问次数
        not self.cache and set_today_visit(self.user_id,name)  
    def on_llm_error(self, error: Exception, **kwargs) -> None:
        print('====error===\n',error)
        if error:
            if error.response.status_code == 429 :
                err = 'err 429,请求过于频繁，请1s后重试'
            else :
                err = str(error)     
        self.tokens.append(err)
        
    def generate_token(self):         
        while not self.end:
            if self.tokens:
                token = self.tokens.pop(0)
                print(f'-- {token} --')
                yield token
            else:pass


