shahzaib201 commited on
Commit
a28fa2d
1 Parent(s): 48c11de

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +2 -2
main.py CHANGED
@@ -3,7 +3,7 @@ from fastapi import FastAPI, Form
3
  from pydantic import BaseModel
4
 
5
  #Model loading
6
- llm = AutoModelForCausalLM.from_pretrained("victunes/TherapyBeagle-11B-v1-GGUF",
7
  model_type='llama',
8
  max_new_tokens = 1096,
9
  threads = 3,
@@ -22,6 +22,6 @@ app = FastAPI()
22
  async def stream(item: validation):
23
  system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
24
  E_INST = "</s>"
25
- user, assistant = "<|user|>", "<|assistant|>"
26
  prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
27
  return llm(prompt)
 
3
  from pydantic import BaseModel
4
 
5
  #Model loading
6
+ llm = AutoModelForCausalLM.from_pretrained("carl-llama-2-13b.Q3_K_S.gguf",
7
  model_type='llama',
8
  max_new_tokens = 1096,
9
  threads = 3,
 
22
  async def stream(item: validation):
23
  system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
24
  E_INST = "</s>"
25
+ user, assistant = "<|user|>", "<|Therapy Assistant|>"
26
  prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
27
  return llm(prompt)