rooa commited on
Commit
9de792b
1 Parent(s): 5f9eb1b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +9 -1
README.md CHANGED
@@ -39,7 +39,15 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
39
 
40
  tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-inst", trust_remote_code=True)
41
  model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-inst", torch_dtype=torch.bfloat16)
42
- inputs = tokenizer("The world is", return_tensors="pt")
 
 
 
 
 
 
 
 
43
  sample = model.generate(**inputs, max_length=128)
44
  print(tokenizer.decode(sample[0]))
45
  ```
 
39
 
40
  tokenizer = AutoTokenizer.from_pretrained("Salesforce/xgen-7b-8k-inst", trust_remote_code=True)
41
  model = AutoModelForCausalLM.from_pretrained("Salesforce/xgen-7b-8k-inst", torch_dtype=torch.bfloat16)
42
+
43
+ header = (
44
+ "A chat between a curious human and an artificial intelligence assistant. "
45
+ "The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n"
46
+ )
47
+ article = "" # insert a document here
48
+ prompt = "### Human: Please summarize the following article. {article}.\n###"
49
+
50
+ inputs = tokenizer(header + prompt, return_tensors="pt")
51
  sample = model.generate(**inputs, max_length=128)
52
  print(tokenizer.decode(sample[0]))
53
  ```