Isaak-Carter commited on
Commit
17025ed
1 Parent(s): 57e0701

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -43
app.py CHANGED
@@ -1,20 +1,22 @@
1
  import os
2
- os.system('pip install dashscope')
 
 
3
  import gradio as gr
4
- from http import HTTPStatus
5
- import dashscope
6
- from dashscope import Generation
7
- from dashscope.api_entities.dashscope_response import Role
8
  from typing import List, Optional, Tuple, Dict
9
- from urllib.error import HTTPError
10
- default_system = 'You are JOSIE which is an acronym for "Just an Outstandingly Smart Intelligent Entity", a private and super-intelligent AI assistant, created by Gökdeniz Gülmez.'
11
 
12
- YOUR_API_TOKEN = os.getenv('YOUR_API_TOKEN')
13
- dashscope.api_key = YOUR_API_TOKEN
14
 
15
  History = List[Tuple[str, str]]
16
  Messages = List[Dict[str, str]]
17
 
 
 
 
 
 
18
  def clear_session() -> History:
19
  return '', []
20
 
@@ -24,55 +26,43 @@ def modify_system_session(system: str) -> str:
24
  return system, system, []
25
 
26
  def history_to_messages(history: History, system: str) -> Messages:
27
- messages = [{'role': Role.SYSTEM, 'content': system}]
28
  for h in history:
29
- messages.append({'role': Role.USER, 'content': h[0]})
30
- messages.append({'role': Role.ASSISTANT, 'content': h[1]})
31
  return messages
32
 
33
-
34
  def messages_to_history(messages: Messages) -> Tuple[str, History]:
35
- assert messages[0]['role'] == Role.SYSTEM
36
  system = messages[0]['content']
37
  history = []
38
  for q, r in zip(messages[1::2], messages[2::2]):
39
  history.append([q['content'], r['content']])
40
  return system, history
41
 
 
 
 
 
 
 
42
 
43
- def model_chat(query: Optional[str], history: Optional[History], system: str
44
- ) -> Tuple[str, str, History]:
45
  if query is None:
46
  query = ''
47
  if history is None:
48
  history = []
49
  messages = history_to_messages(history, system)
50
- messages.append({'role': Role.USER, 'content': query})
51
- gen = Generation.call(
52
- model='J.O.S.I.E.3-Beta12-7B-slerp',
53
- messages=messages,
54
- result_format='message',
55
- stream=True
56
- )
57
- for response in gen:
58
- if response.status_code == HTTPStatus.OK:
59
- role = response.output.choices[0].message.role
60
- response = response.output.choices[0].message.content
61
- system, history = messages_to_history(messages + [{'role': role, 'content': response}])
62
- yield '', history, system
63
- else:
64
- raise ValueError('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
65
- response.request_id, response.status_code,
66
- response.code, response.message
67
- ))
68
-
69
 
70
  with gr.Blocks() as demo:
71
  gr.Markdown("""<center><font size=8>J.O.S.I.E.3-Beta12 Preview👾</center>""")
72
 
73
  with gr.Row():
74
- # with gr.Column(scale=3):
75
- # system_input = gr.Textbox(value=default_system, lines=1, label='System')
76
  with gr.Column(scale=1):
77
  modify_system = gr.Button("🛠️ Set system prompt and clear history", scale=2)
78
  system_state = gr.Textbox(value=default_system, visible=False)
@@ -81,18 +71,18 @@ with gr.Blocks() as demo:
81
 
82
  with gr.Row():
83
  clear_history = gr.Button("🧹 Clear history")
84
- sumbit = gr.Button("🚀 Send")
85
 
86
- sumbit.click(model_chat,
87
  inputs=[textbox, chatbot, system_state],
88
- outputs=[textbox, chatbot, system_input],
89
- concurrency_limit = 5)
90
  clear_history.click(fn=clear_session,
91
  inputs=[],
92
  outputs=[textbox, chatbot])
93
  modify_system.click(fn=modify_system_session,
94
- inputs=[system_input],
95
- outputs=[system_state, system_input, chatbot])
96
 
97
  demo.queue(api_open=False)
98
  demo.launch(max_threads=5)
 
1
  import os
2
+ os.system('pip install --upgrade pip')
3
+ os.system('pip install gradio transformers torch')
4
+
5
  import gradio as gr
 
 
 
 
6
  from typing import List, Optional, Tuple, Dict
7
+ from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer
8
+ import torch
9
 
10
+ default_system = 'You are JOSIE which is an acronym for "Just an Outstandingly Smart Intelligent Entity", a private and super-intelligent AI assistant, created by Gökdeniz Gülmez.'
 
11
 
12
  History = List[Tuple[str, str]]
13
  Messages = List[Dict[str, str]]
14
 
15
+ # Load model and tokenizer
16
+ model_name = 'Isaak-Carter/J.O.S.I.E.3-Beta12-7B-slerp'
17
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
18
+ model = AutoModelForCausalLM.from_pretrained(model_name)
19
+
20
  def clear_session() -> History:
21
  return '', []
22
 
 
26
  return system, system, []
27
 
28
  def history_to_messages(history: History, system: str) -> Messages:
29
+ messages = [{'role': 'system', 'content': system}]
30
  for h in history:
31
+ messages.append({'role': 'user', 'content': h[0]})
32
+ messages.append({'role': 'assistant', 'content': h[1]})
33
  return messages
34
 
 
35
  def messages_to_history(messages: Messages) -> Tuple[str, History]:
36
+ assert messages[0]['role'] == 'system'
37
  system = messages[0]['content']
38
  history = []
39
  for q, r in zip(messages[1::2], messages[2::2]):
40
  history.append([q['content'], r['content']])
41
  return system, history
42
 
43
+ def generate_response(messages: Messages) -> str:
44
+ prompt = "\n".join([f"{msg['role']}: {msg['content']}" for msg in messages])
45
+ inputs = tokenizer(prompt, return_tensors='pt')
46
+ outputs = model.generate(inputs['input_ids'], max_length=512, pad_token_id=tokenizer.eos_token_id)
47
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
48
+ return response.split('assistant:')[-1].strip()
49
 
50
+ def model_chat(query: Optional[str], history: Optional[History], system: str) -> Tuple[str, str, History]:
 
51
  if query is None:
52
  query = ''
53
  if history is None:
54
  history = []
55
  messages = history_to_messages(history, system)
56
+ messages.append({'role': 'user', 'content': query})
57
+ response = generate_response(messages)
58
+ messages.append({'role': 'assistant', 'content': response})
59
+ system, history = messages_to_history(messages)
60
+ return '', history, system
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
  with gr.Blocks() as demo:
63
  gr.Markdown("""<center><font size=8>J.O.S.I.E.3-Beta12 Preview👾</center>""")
64
 
65
  with gr.Row():
 
 
66
  with gr.Column(scale=1):
67
  modify_system = gr.Button("🛠️ Set system prompt and clear history", scale=2)
68
  system_state = gr.Textbox(value=default_system, visible=False)
 
71
 
72
  with gr.Row():
73
  clear_history = gr.Button("🧹 Clear history")
74
+ submit = gr.Button("🚀 Send")
75
 
76
+ submit.click(model_chat,
77
  inputs=[textbox, chatbot, system_state],
78
+ outputs=[textbox, chatbot, system_state],
79
+ concurrency_limit=5)
80
  clear_history.click(fn=clear_session,
81
  inputs=[],
82
  outputs=[textbox, chatbot])
83
  modify_system.click(fn=modify_system_session,
84
+ inputs=[system_state],
85
+ outputs=[system_state, system_state, chatbot])
86
 
87
  demo.queue(api_open=False)
88
  demo.launch(max_threads=5)