gathnex commited on
Commit
4e06fbd
1 Parent(s): 5dab16f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +5 -11
main.py CHANGED
@@ -2,20 +2,22 @@ from ctransformers import AutoModelForCausalLM
2
  from fastapi import FastAPI, Form
3
  from pydantic import BaseModel
4
 
5
-
6
  llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q4_K_S.gguf",
7
  model_type='mistral',
8
  max_new_tokens = 1096,
9
- threads = 1,
10
  )
11
 
12
 
13
  #Pydantic object
14
  class validation(BaseModel):
15
  prompt: str
 
16
  #Fast API
17
  app = FastAPI()
18
- #Function contain tranlater API, RAG API, OpenAI API
 
19
  @app.post("/llm_on_cpu")
20
  async def stream(item: validation):
21
  system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
@@ -23,11 +25,3 @@ async def stream(item: validation):
23
  user, assistant = "<|user|>", "<|assistant|>"
24
  prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
25
  return llm(prompt)
26
-
27
- #def stream(user_prompt)
28
- # system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
29
- # E_INST = "</s>"
30
- # user, assistant = "<|user|>", "<|assistant|>"
31
- # prompt = f"{system_prompt}{E_INST}\n{user}\n{user_prompt.strip()}{E_INST}\n{assistant}\n"
32
- # for text in llm(prompt, stream=True):
33
- # print(text, end="", flush=True)
 
2
  from fastapi import FastAPI, Form
3
  from pydantic import BaseModel
4
 
5
+ #Model loading
6
  llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q4_K_S.gguf",
7
  model_type='mistral',
8
  max_new_tokens = 1096,
9
+ threads = 3,
10
  )
11
 
12
 
13
  #Pydantic object
14
  class validation(BaseModel):
15
  prompt: str
16
+
17
  #Fast API
18
  app = FastAPI()
19
+
20
+ #Zephyr completion
21
  @app.post("/llm_on_cpu")
22
  async def stream(item: validation):
23
  system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
 
25
  user, assistant = "<|user|>", "<|assistant|>"
26
  prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
27
  return llm(prompt)