BAAI
/

ldwang commited on
Commit
973ff4c
1 Parent(s): 89aaa1a

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -9
README.md CHANGED
@@ -25,20 +25,18 @@ The additional details of the Aquila model will be presented in the official tec
25
  ```python
26
  from transformers import AutoTokenizer, AutoModelForCausalLM
27
  import torch
28
- device = torch.device("cuda")
29
  model_info = "BAAI/AquilaChat2-34B"
30
  tokenizer = AutoTokenizer.from_pretrained(model_info, trust_remote_code=True)
31
- model = AutoModelForCausalLM.from_pretrained(model_info, trust_remote_code=True)
32
  model.eval()
33
  model.to(device)
34
  text = "请给出10个要到北京旅游的理由。"
35
- tokens = tokenizer.encode_plus(text)['input_ids']
36
- tokens = torch.tensor(tokens)[None,].to(device)
37
- stop_tokens = ["###", "[UNK]", "</s>"]
38
- with torch.no_grad():
39
- out = model.generate(tokens, do_sample=True, max_length=512, eos_token_id=100007, bad_words_ids=[[tokenizer.encode(token)[0] for token in stop_tokens]])[0]
40
- out = tokenizer.decode(out.cpu().numpy().tolist())
41
- print(out)
42
  ```
43
 
44
 
 
25
  ```python
26
  from transformers import AutoTokenizer, AutoModelForCausalLM
27
  import torch
28
+ device = torch.device("cuda:0")
29
  model_info = "BAAI/AquilaChat2-34B"
30
  tokenizer = AutoTokenizer.from_pretrained(model_info, trust_remote_code=True)
31
+ model = AutoModelForCausalLM.from_pretrained(model_info, trust_remote_code=True, torch_dtype=torch.bfloat16)
32
  model.eval()
33
  model.to(device)
34
  text = "请给出10个要到北京旅游的理由。"
35
+ from predict import predict
36
+ out = predict(model, text, tokenizer=tokenizer, max_gen_len=200, top_p=0.95,
37
+ seed=1234, topk=100, temperature=0.9, sft=True, device=device,
38
+ model_name="AquilaChat2-34B")
39
+ print(out)
 
 
40
  ```
41
 
42