TheCraftySlayer commited on
Commit
5a40416
1 Parent(s): c600fb7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -4,8 +4,8 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
4
  tokenizer = AutoTokenizer.from_pretrained("TheCraftySlayer/Llama-2-70b-chat-hf")
5
  model = AutoModelForCausalLM.from_pretrained("TheCraftySlayer/Llama-2-70b-chat-hf")
6
  input_text =" hello how are you?"
7
- inputs= tokenizer.encode(input_text, return_tensors='pt')
8
- outputs= model.generate(inputs,max_length=50,num_return_sequences=5,temperature=0.7)
9
  print("Generated Text")
10
  for i, output in enumerate(outputs):
11
  print({f"{i}: {tokenizer.decode(output)}")
 
4
  tokenizer = AutoTokenizer.from_pretrained("TheCraftySlayer/Llama-2-70b-chat-hf")
5
  model = AutoModelForCausalLM.from_pretrained("TheCraftySlayer/Llama-2-70b-chat-hf")
6
  input_text =" hello how are you?"
7
+ inputs= tokenizer.encode(input_text, return_tensors='pt')
8
+ outputs= model.generate(inputs,max_length=50,num_return_sequences=5,temperature=0.7)
9
  print("Generated Text")
10
  for i, output in enumerate(outputs):
11
  print({f"{i}: {tokenizer.decode(output)}")