yodiaditya
commited on
Commit
•
b3c9775
1
Parent(s):
9802f86
update readme and re-train
Browse files- README.md +41 -0
- pytorch_model.bin +1 -1
README.md
CHANGED
@@ -107,6 +107,47 @@ print(result)
|
|
107 |
[{'generated_text': 'Given the question:\n{ siapa kamu? }\n---\nAnswer:\nSaya Karina, asisten virtual siap membantu seputar estimasi harga atau pertanyaan lain'}]
|
108 |
```
|
109 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
110 |
## Training procedure
|
111 |
|
112 |
|
|
|
107 |
[{'generated_text': 'Given the question:\n{ siapa kamu? }\n---\nAnswer:\nSaya Karina, asisten virtual siap membantu seputar estimasi harga atau pertanyaan lain'}]
|
108 |
```
|
109 |
|
110 |
+
### Infer in Local with Gradio
|
111 |
+
|
112 |
+
```python
|
113 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
114 |
+
from transformers import pipeline
|
115 |
+
import re
|
116 |
+
|
117 |
+
import gradio as gr
|
118 |
+
|
119 |
+
MODEL_NAME = "yodi/karina"
|
120 |
+
|
121 |
+
model_4bit = AutoModelForCausalLM.from_pretrained(MODEL_NAME, device_map="cuda:1", load_in_4bit=True)
|
122 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
|
123 |
+
|
124 |
+
prompt = f"Given the question:\n{{ siapa kamu? }}\n---\nAnswer:\n"
|
125 |
+
|
126 |
+
generator = pipeline('text-generation',
|
127 |
+
model=model_4bit,
|
128 |
+
tokenizer=tokenizer,
|
129 |
+
do_sample=False)
|
130 |
+
|
131 |
+
def preprocess(text):
|
132 |
+
return f"Given the question:\n{{ {text} }}\n---\nAnswer:\n"
|
133 |
+
|
134 |
+
def generate(text):
|
135 |
+
preprocess_result = preprocess(text)
|
136 |
+
result = generator(preprocess_result, max_length=256)
|
137 |
+
output = re.split(r'\Given the question:|Answer:|Answer #|Title:',result[0]['generated_text'])[2]
|
138 |
+
|
139 |
+
return output
|
140 |
+
|
141 |
+
with gr.Blocks() as demo:
|
142 |
+
input_text = gr.Textbox(label="Input", lines=1)
|
143 |
+
button = gr.Button("Submit")
|
144 |
+
output_text = gr.Textbox(lines=6, label="Output")
|
145 |
+
button.click(generate, inputs=[input_text], outputs=output_text)
|
146 |
+
|
147 |
+
demo.launch(enable_queue=True, debug=True)
|
148 |
+
```
|
149 |
+
And open the gradio url from browser.
|
150 |
+
|
151 |
## Training procedure
|
152 |
|
153 |
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 6005240157
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9396aa5934350d8ae5a95c7d043a7e3d3396e4b8eaf8301b8cda55929faad2d4
|
3 |
size 6005240157
|