xuqinyang commited on
Commit
4e50f6d
1 Parent(s): e7a0490

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +7 -5
model.py CHANGED
@@ -5,11 +5,13 @@ from typing import Iterator
5
 
6
  model_id = 'xuqinyang/baichuan-13b-chat-ggml-int4'
7
 
8
- from huggingface_hub import snapshot_download
9
-
10
- snapshot_download(model_id, local_dir="./")
 
 
11
  from llama_cpp import Llama
12
- llm = Llama(model_path="./ggml-model-q4_0.bin", n_ctx=4096,seed=-1,n_threads=4)
13
 
14
  def run(message: str,
15
  chat_history: list[tuple[str, str]],
@@ -26,7 +28,7 @@ def run(message: str,
26
  history.append({"role": "assistant", "content": i[1]})
27
  print(history)
28
  history.append({"role": "user", "content": message})
29
- for response in llm.create_chat_completion(history,stream=True,max_tokens=-1,temperature=temperature,top_k=top_k,top_p=top_p,repeat_penalty=1.1):
30
  if "content" in response["choices"][0]["delta"]:
31
  result = result + response["choices"][0]["delta"]["content"]
32
  yield result
 
5
 
6
  model_id = 'xuqinyang/baichuan-13b-chat-ggml-int4'
7
 
8
+ from huggingface_hub import snapshot_download,hf_hub_download
9
+ #旧
10
+ #snapshot_download(model_id, local_dir="./",revision="7f71a8abefa7b2eede3f74ce0564abe5fbe6874a")
11
+ snapshot_download(model_id, local_dir="./",revision="b2414a0ceee68fe09c99ace44446cfc9a1c52b08")
12
+ hf_hub_download(repo_id="baichuan-inc/Baichuan-13B-Chat",local_dir="./", filename="tokenizer.model")
13
  from llama_cpp import Llama
14
+ llm = Llama(model_path="./ggml-model-q4_0.bin", n_ctx=4096,seed=-1)
15
 
16
  def run(message: str,
17
  chat_history: list[tuple[str, str]],
 
28
  history.append({"role": "assistant", "content": i[1]})
29
  print(history)
30
  history.append({"role": "user", "content": message})
31
+ for response in llm.create_chat_completion(history,stop=["</s>"],stream=True,max_tokens=-1,temperature=temperature,top_k=top_k,top_p=top_p,repeat_penalty=1.1):
32
  if "content" in response["choices"][0]["delta"]:
33
  result = result + response["choices"][0]["delta"]["content"]
34
  yield result