ffreemt commited on
Commit
7336f82
1 Parent(s): 93ccd13

Update dataclass instead of SimpleNamespace

Browse files
Files changed (1) hide show
  1. app.py +36 -15
app.py CHANGED
@@ -3,7 +3,9 @@
3
  import os
4
  import time
5
  from dataclasses import asdict, dataclass
6
- from types import SimpleNamespace
 
 
7
 
8
  import gradio as gr
9
  from about_time import about_time
@@ -12,6 +14,14 @@ from huggingface_hub import hf_hub_download
12
  from loguru import logger
13
  from mcli import predict
14
 
 
 
 
 
 
 
 
 
15
  URL = os.getenv("URL", "")
16
  MOSAICML_API_KEY = os.getenv("MOSAICML_API_KEY", "")
17
  if URL is None:
@@ -19,7 +29,15 @@ if URL is None:
19
  if MOSAICML_API_KEY is None:
20
  raise ValueError("git environment variable must be set")
21
 
22
- ns = SimpleNamespace(response="")
 
 
 
 
 
 
 
 
23
 
24
 
25
  def predict0(prompt, bot):
@@ -27,7 +45,7 @@ def predict0(prompt, bot):
27
  logger.debug(f"{prompt=}, {bot=}")
28
 
29
  ns.response = ""
30
- with about_time() as atime:
31
  try:
32
  # user_prompt = prompt
33
  generator = generate(llm, generation_config, system_prompt, prompt.strip())
@@ -37,21 +55,21 @@ def predict0(prompt, bot):
37
  buff.update(value="diggin...")
38
 
39
  for word in generator:
40
- print(word, end="", flush=True)
 
41
  response += word
42
  ns.response = response
43
  buff.update(value=response)
44
  print("")
45
  logger.debug(f"{response=}")
46
- bot[-1] = [prompt, response]
47
  except Exception as exc:
48
  logger.error(exc)
49
  response = f"{exc=}"
50
 
51
  # bot = {"inputs": [response]}
52
  _ = (
53
- f"(time elapsed: {atime.duration_human}, "
54
- f"{atime.duration/(len(prompt) + len(response)):.1f}s/char)"
55
  )
56
 
57
  # bot[-1] = [prompt, f"{response} {_}"]
@@ -144,7 +162,7 @@ class Chat:
144
  system_format = "<|im_start|>system\n{}<|im_end|>\n"
145
 
146
  def __init__(
147
- self, system: str = None, user: str = None, assistant: str = None
148
  ) -> None:
149
  if system is not None:
150
  self.set_system_prompt(system)
@@ -194,12 +212,16 @@ class Chat:
194
 
195
  return text
196
 
197
- def clear_history(self, history):
 
198
  return []
199
 
200
- def turn(self, user_input: str):
201
- self.user_turn(user_input)
202
- return self.bot_turn()
 
 
 
203
 
204
  def user_turn(self, user_input: str, history):
205
  history.append([user_input, ""])
@@ -235,8 +257,7 @@ def call_inf_server(prompt):
235
  generator = generate(
236
  llm, generation_config, system_prompt, user_prompt.strip()
237
  )
238
- # print(assistant_prefix, end=" ", flush=True)
239
- print(assistant_prefix, flush=True) # vertical
240
  for word in generator:
241
  print(word, end="", flush=True)
242
  response = word
@@ -299,7 +320,7 @@ generation_config = GenerationConfig(
299
  seed=42,
300
  reset=False, # reset history (cache)
301
  stream=True, # streaming per word/token
302
- threads=int(os.cpu_count() / 2), # adjust for your CPU
303
  stop=["<|im_end|>", "|<"],
304
  )
305
 
 
3
  import os
4
  import time
5
  from dataclasses import asdict, dataclass
6
+
7
+ # from types import SimpleNamespace
8
+ from typing import Generator
9
 
10
  import gradio as gr
11
  from about_time import about_time
 
14
  from loguru import logger
15
  from mcli import predict
16
 
17
+ # fix timezone in Linux
18
+ os.environ["TZ"] = "Asia/Shanghai"
19
+ try:
20
+ time.tzset() # type: ignore # pylint: disable=no-member
21
+ except Exception:
22
+ # Windows
23
+ logger.warning("Windows, cant run time.tzset()")
24
+
25
  URL = os.getenv("URL", "")
26
  MOSAICML_API_KEY = os.getenv("MOSAICML_API_KEY", "")
27
  if URL is None:
 
29
  if MOSAICML_API_KEY is None:
30
  raise ValueError("git environment variable must be set")
31
 
32
+
33
+ # ns = SimpleNamespace(
34
+ @dataclass
35
+ class Namespace:
36
+ response: str = ""
37
+ generator: Generator | list = []
38
+
39
+
40
+ ns = Namespace()
41
 
42
 
43
  def predict0(prompt, bot):
 
45
  logger.debug(f"{prompt=}, {bot=}")
46
 
47
  ns.response = ""
48
+ with about_time() as atime: # type: ignore
49
  try:
50
  # user_prompt = prompt
51
  generator = generate(llm, generation_config, system_prompt, prompt.strip())
 
55
  buff.update(value="diggin...")
56
 
57
  for word in generator:
58
+ # print(word, end="", flush=True)
59
+ print(word, flush=True) # vertical stream
60
  response += word
61
  ns.response = response
62
  buff.update(value=response)
63
  print("")
64
  logger.debug(f"{response=}")
 
65
  except Exception as exc:
66
  logger.error(exc)
67
  response = f"{exc=}"
68
 
69
  # bot = {"inputs": [response]}
70
  _ = (
71
+ f"(time elapsed: {atime.duration_human}, " # type: ignore
72
+ f"{atime.duration/(len(prompt) + len(response)):.1f}s/char)" # type: ignore
73
  )
74
 
75
  # bot[-1] = [prompt, f"{response} {_}"]
 
162
  system_format = "<|im_start|>system\n{}<|im_end|>\n"
163
 
164
  def __init__(
165
+ self, system: str | None = None, user: str | None = None, assistant: str | None = None
166
  ) -> None:
167
  if system is not None:
168
  self.set_system_prompt(system)
 
212
 
213
  return text
214
 
215
+ # def clear_history(self, history):
216
+ def clear_history(self):
217
  return []
218
 
219
+ # def turn(self, user_input: str):
220
+ def turn(self, user_input: str, system, history):
221
+ # self.user_turn(user_input)
222
+ self.user_turn(user_input, history)
223
+ # return self.bot_turn()
224
+ return self.bot_turn(system, history)
225
 
226
  def user_turn(self, user_input: str, history):
227
  history.append([user_input, ""])
 
257
  generator = generate(
258
  llm, generation_config, system_prompt, user_prompt.strip()
259
  )
260
+ print(assistant_prefix, end=" ", flush=True)
 
261
  for word in generator:
262
  print(word, end="", flush=True)
263
  response = word
 
320
  seed=42,
321
  reset=False, # reset history (cache)
322
  stream=True, # streaming per word/token
323
+ threads=os.cpu_count() // 2, # type: ignore # adjust for your CPU
324
  stop=["<|im_end|>", "|<"],
325
  )
326