DebasishDhal99 commited on
Commit
b6c246c
·
verified ·
1 Parent(s): ac0fefb

Fix inference code

Browse files
Files changed (1) hide show
  1. README.md +19 -0
README.md CHANGED
@@ -1,3 +1,22 @@
1
  ---
2
  license: cc-by-nc-4.0
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  license: cc-by-nc-4.0
3
  ---
4
+
5
+ ```python
6
+ from transformers import AutoModelForCausalLM, AutoTokenizer
7
+ import torch
8
+
9
+ model_id = "OdiaGenAI/odiagenAI_llama7b_base_v1"
10
+
11
+ tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code = True)
12
+ prompt = "ଭାରତ ବିଷୟରେ କିଛି କୁହନ୍ତୁ"
13
+ inputs = tokenizer.encode(prompt, return_tensors="pt").to(device)
14
+
15
+ device = "cuda" if torch.cuda.is_available() else "cpu"
16
+
17
+ model = AutoModelForCausalLM.from_pretrained(model_id).to(device)
18
+
19
+ outputs = model.generate(inputs, max_new_tokens=100)
20
+
21
+ print(tokenizer.decode(outputs[0], skip_special_tokens=True))
22
+ ```