Aspik101 commited on
Commit
0d383d6
1 Parent(s): c7f6416

Upload 3 files

Browse files
Files changed (3) hide show
  1. README.md +5 -5
  2. app.py +39 -0
  3. requirements.txt +3 -0
README.md CHANGED
@@ -1,10 +1,10 @@
1
  ---
2
- title: Polish Vicuna1.3
3
- emoji: 🐨
4
- colorFrom: purple
5
- colorTo: yellow
6
  sdk: gradio
7
- sdk_version: 3.39.0
8
  app_file: app.py
9
  pinned: false
10
  license: other
 
1
  ---
2
+ title: Polish Vicuna-13b-v1.3 Instruct
3
+ emoji: 📚
4
+ colorFrom: indigo
5
+ colorTo: red
6
  sdk: gradio
7
+ sdk_version: 3.38.0
8
  app_file: app.py
9
  pinned: false
10
  license: other
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import random
3
+ import time
4
+ from ctransformers import AutoModelForCausalLM
5
+
6
+ params = {
7
+ "max_new_tokens":512,
8
+ "stop":["<end>" ,"<|endoftext|>"],
9
+ "temperature":0.7,
10
+ "top_p":0.8,
11
+ "stream":True,
12
+ "batch_size": 8}
13
+
14
+
15
+ llm = AutoModelForCausalLM.from_pretrained("Lajonbot/vicuna-13b-v1.3-PL-lora_GGML", model_type="llama")
16
+
17
+ with gr.Blocks() as demo:
18
+ chatbot = gr.Chatbot()
19
+ msg = gr.Textbox()
20
+ clear = gr.Button("Clear")
21
+
22
+ def user(user_message, history):
23
+ return "", history + [[user_message, None]]
24
+
25
+ def bot(history):
26
+ stream = llm(prompt = f"Jesteś AI assystentem. Odpowiadaj po polski. <user>: {history}. <assistant>:", **params)
27
+ history[-1][1] = ""
28
+ for character in stream:
29
+ history[-1][1] += character
30
+ time.sleep(0.005)
31
+ yield history
32
+
33
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
34
+ bot, chatbot, chatbot
35
+ )
36
+ clear.click(lambda: None, None, chatbot, queue=False)
37
+
38
+ demo.queue()
39
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ torch
2
+ ctransformers
3
+ gradio