Thomas121 commited on
Commit
ffa9b49
1 Parent(s): b8f4870

Create README.md

Browse files
Files changed (1) hide show
  1. README.md +13 -0
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
3
+
4
+ tokenizer = AutoTokenizer.from_pretrained("stabilityai/StableBeluga-13B", use_fast=False)
5
+ model = AutoModelForCausalLM.from_pretrained("stabilityai/StableBeluga-13B", torch_dtype=torch.float16, low_cpu_mem_usage=True, device_map="auto")
6
+ system_prompt = "### System:\nYou are Stable Beluga 13B, an AI that follows instructions extremely well. Help as much as you can. Remember, be safe, and don't do anything illegal.\n\n"
7
+
8
+ message = "Write me a poem please"
9
+ prompt = f"{system_prompt}### User: {message}\n\n### Assistant:\n"
10
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
11
+ output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=256)
12
+
13
+ print(tokenizer.decode(output[0], skip_special_tokens=True))