shahzaib201 commited on
Commit
ba1da52
1 Parent(s): 7bc5c42

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +12 -0
  2. main.py +27 -0
  3. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ WORKDIR /code
4
+
5
+ COPY ./requirements.txt /code/requirements.txt
6
+
7
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
8
+
9
+ COPY ./zephyr-7b-beta.Q4_K_S.gguf /code/zephyr-7b-beta.Q4_K_S.gguf
10
+ COPY ./main.py /code/main.py
11
+
12
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
main.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from ctransformers import AutoModelForCausalLM
2
+ from fastapi import FastAPI, Form
3
+ from pydantic import BaseModel
4
+
5
+ #Model loading
6
+ llm = AutoModelForCausalLM.from_pretrained("victunes/TherapyBeagle-11B-v1-GGUF",
7
+ model_type='llama',
8
+ max_new_tokens = 1096,
9
+ threads = 3,
10
+ )
11
+
12
+
13
+ #Pydantic object
14
+ class validation(BaseModel):
15
+ prompt: str
16
+
17
+ #Fast API
18
+ app = FastAPI()
19
+
20
+ #Zephyr completion
21
+ @app.post("/llm_on_cpu")
22
+ async def stream(item: validation):
23
+ system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
24
+ E_INST = "</s>"
25
+ user, assistant = "<|user|>", "<|assistant|>"
26
+ prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
27
+ return llm(prompt)
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ python-multipart
2
+ fastapi
3
+ pydantic
4
+ uvicorn
5
+ requests
6
+ ctransformers