hywu
/

Text Generation
Transformers
PyTorch
English
camelidae
custom_code
Inference Endpoints
4 papers
hywu commited on
Commit
6a1a68f
1 Parent(s): 5dd03b4

update README

Browse files
Files changed (1) hide show
  1. README.md +2 -1
README.md CHANGED
@@ -62,10 +62,11 @@ tokenizer = AutoTokenizer.from_pretrained("hywu/Camelidae-8x34B", trust_remote_c
62
  # model = AutoModelForCausalLM.from_pretrained("hywu/Camelidae-8x13B", device_map="auto", trust_remote_code=True).eval()
63
  model = AutoModelForCausalLM.from_pretrained("hywu/Camelidae-8x34B", device_map="auto", trust_remote_code=True).eval()
64
 
65
- inputs = tokenizer('### Human:\nHow are you?\n ### Assistant:\n', return_tensors='pt')
66
  inputs = inputs.to(model.device)
67
  pred = model.generate(**inputs)
68
  print(tokenizer.decode(pred.cpu()[0], skip_special_tokens=True))
 
69
  ```
70
 
71
  ## Citation
 
62
  # model = AutoModelForCausalLM.from_pretrained("hywu/Camelidae-8x13B", device_map="auto", trust_remote_code=True).eval()
63
  model = AutoModelForCausalLM.from_pretrained("hywu/Camelidae-8x34B", device_map="auto", trust_remote_code=True).eval()
64
 
65
+ inputs = tokenizer('### Human:\nHow are you?\n### Assistant:\n', return_tensors='pt')
66
  inputs = inputs.to(model.device)
67
  pred = model.generate(**inputs)
68
  print(tokenizer.decode(pred.cpu()[0], skip_special_tokens=True))
69
+ # I am doing well, thank you.
70
  ```
71
 
72
  ## Citation