def llm_handler(client,llm='deepseek-v3:671b',dbmhandler = '',systemprompt = "You are the Administrator of a DevOps Assistant system for failure diagnosis. To solve each given issue, you should iteratively execute given tools for data analysis on telemetry files of target system. By analyzing the execution results,you should approximate the answer step-by-step."):
    def callllm(x,  usedbm = False):
        answer = ''
        if usedbm :
            try:
                answer = dbmhandler.get(x,b'').decode()
            except Exception as err:
                print(err , '\n\n\n')
                pass
        if answer == '':
            answer = client.chat.completions.create(
                model=llm,
                #model='deepseek-chat',
                messages=[
                    {"role": "system", "content": systemprompt},
                    {"role": "user", "content": x},
                ],
                stream=False,
                temperature = 0,
            ).choices[0].message.content
            if '</think>' in answer:
                answer = answer.split('</think>')[1]
        #else:
        #    print('hit !')

        if usedbm: 
            dbmhandler[x] = answer
        
        return answer
    return callllm
