Spaces:
Running
Running
File size: 3,196 Bytes
0387210 cd46058 0387210 cd46058 0387210 cd46058 0387210 cd46058 0387210 cd46058 0387210 cd46058 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
import openai
import time
import json
import yaml
from typing import Union
import os
from langchain_openai import OpenAI
from transformers import AutoTokenizer
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage
class ChatBot(object):
def __init__(self, model_name, url):
self.model_name = model_name
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
self.url = url
self.model = self._set_model()
self.chat_history = []
self.history = self._set_initial_history()
def _set_model(self):
return OpenAI(
model_name=self.model_name,
openai_api_base = f"{self.url}",
openai_api_key="EMPTY",
temperature = 0.1,
top_p = 0.95,
max_tokens = 1024,
max_retries=3
)
def _set_initial_history(self):
return ["You are an exceptionally intelligent coding assistant developed by DLI lab that consistently delivers accurate and reliable responses to user instructions. If somebody asks you who are you, answer as 'AI programming assistant based on DLI Lab'.\n\n"]
def set_model_input(self, input_text = None):
model_input = []
if input_text is not None:
self.history.append(input_text)
model_input.append({
"role": "system",
"content": self.history[0]
})
chat_history = self.history[1:]
for i in range(len(chat_history)):
if i % 2 == 0:
model_input.append({
"role": "user",
"content": chat_history[i]
})
else:
model_input.append({
"role": "assistant",
"content": chat_history[i]
})
return model_input
def chat(self, chat_history, input_text):
self.chat_history = chat_history
model_input_list = self.set_model_input(input_text)
model_input = self.tokenizer.apply_chat_template(model_input_list, tokenize=False, add_generation_prompt=True)
response = self.model.invoke(model_input)
if response is not None:
self.history.append(response)
self.chat_history = self.chat_history + [(input_text, response)]
return self.chat_history
def regenerate(self, chat_history, input_text):
self.chat_history = chat_history[:-1]
self.history = self.history[:-2]
model_input_list = self.set_model_input(None)
model_input = self.tokenizer.apply_chat_template(model_input_list, tokenize=False, add_generation_prompt=True)
response = self.model.invoke(model_input)
if response is not None:
self.history.append(response)
self.chat_history = self.chat_history + [(input_text, response)]
return self.chat_history
def clear_chat(self):
self.chat_history = []
self.history = self._set_initial_history()
return self.chat_history |