CaiRou-Huang commited on
Commit
b307f4c
1 Parent(s): 103ed49

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -0
app.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # pip install transformers>=4.34
2
+ # pip install accelerate
3
+
4
+ import torch
5
+ from transformers import pipeline
6
+
7
+ pipe = pipeline("text-generation", model="yentinglin/Taiwan-LLM-7B-v2.0-chat", torch_dtype=torch.bfloat16, device_map="auto")
8
+
9
+ # We use the tokenizer's chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
10
+ messages = [
11
+ {
12
+ "role": "system",
13
+ "content": "你是一個人工智慧助理",
14
+ },
15
+ {"role": "user", "content": "東北季風如何影響台灣氣候?"},
16
+ ]
17
+ prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
18
+ outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
19
+ print(outputs[0]["generated_text"])