LH-Tech-AI commited on
Commit
0ee2d12
·
verified ·
1 Parent(s): f16ee11

Create use.py

Browse files
Files changed (1) hide show
  1. use.py +25 -0
use.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+ import torch
3
+
4
+ path = "spark_v4_fp16_final"
5
+ tokenizer = AutoTokenizer.from_pretrained(path)
6
+ model = AutoModelForCausalLM.from_pretrained(path).to("cuda")
7
+
8
+ prompts = [
9
+ "Artificial Intelligence is",
10
+ "The main concept of physics is",
11
+ "In the year 1969, "
12
+ ]
13
+
14
+ for prompt in prompts:
15
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
16
+ outputs = model.generate(
17
+ **inputs,
18
+ max_new_tokens=200,
19
+ do_sample=True,
20
+ top_k=25,
21
+ temperature=0.8,
22
+ pad_token_id=tokenizer.eos_token_id
23
+ )
24
+ print(f"PROMPT: {prompt}")
25
+ print(f"OUTPUT: {tokenizer.decode(outputs[0], skip_special_tokens=True)}\n{'-'*40}")