marion Halgrain commited on
Commit
fd21c90
1 Parent(s): ee1e2cc

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +10 -12
main.py CHANGED
@@ -1,24 +1,22 @@
1
- from ctransformers import AutoModelForCausalLM
2
  from fastapi import FastAPI
3
  from pydantic import BaseModel
4
 
 
 
5
 
6
- llm = AutoModelForCausalLM.from_pretrained("phi-2.Q5_K_M.gguf",
7
- model_type='pi',
8
- max_new_tokens = 1096,
9
- threads = 3,
10
- )
11
-
12
- #Pydantic object
13
- class validation(BaseModel):
14
  prompt: str
15
- #Fast API
 
16
  app = FastAPI()
17
 
18
  @app.post("/llm_on_cpu")
19
- async def stream(item: validation):
20
  system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
21
  E_INST = "</s>"
22
  user, assistant = "<|user|>", "<|assistant|>"
23
  prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt}{E_INST}\n{assistant}\n"
24
- return llm(prompt)
 
 
1
+ from llama_cpp import LlamaModel
2
  from fastapi import FastAPI
3
  from pydantic import BaseModel
4
 
5
+ # Charger le modèle LLaMA
6
+ llm = LlamaModel(model_path="phi-2.Q5_K_M.gguf")
7
 
8
+ # Pydantic object
9
+ class Validation(BaseModel):
 
 
 
 
 
 
10
  prompt: str
11
+
12
+ # Fast API
13
  app = FastAPI()
14
 
15
  @app.post("/llm_on_cpu")
16
+ async def stream(item: Validation):
17
  system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
18
  E_INST = "</s>"
19
  user, assistant = "<|user|>", "<|assistant|>"
20
  prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt}{E_INST}\n{assistant}\n"
21
+ response = llm.generate(prompt, max_new_tokens=1096, num_threads=3)
22
+ return response