KaleiNeely commited on
Commit
e83fbb5
1 Parent(s): c28090d

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -1,4 +1,4 @@
1
- ### Run Huggingface RWKV5 World Model
2
 
3
 
4
  #### CPU
@@ -27,7 +27,7 @@ Assistant:"""
27
 
28
 
29
  model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True).to(torch.float32)
30
- tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True)
31
 
32
  text = "请介绍北京的旅游景点"
33
  prompt = generate_prompt(text)
@@ -83,7 +83,7 @@ Assistant:"""
83
 
84
 
85
  model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, torch_dtype=torch.float16).to(0)
86
- tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True)
87
 
88
  text = "介绍一下大熊猫"
89
  prompt = generate_prompt(text)
@@ -130,7 +130,7 @@ User: {instruction}
130
  Assistant:"""
131
 
132
  model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True).to(torch.float32)
133
- tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True)
134
 
135
  texts = ["请介绍北京的旅游景点", "介绍一下大熊猫", "乌兰察布"]
136
  prompts = [generate_prompt(text) for text in texts]
 
1
+ ### Run Huggingface RWKV6 World Model
2
 
3
 
4
  #### CPU
 
27
 
28
 
29
  model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True).to(torch.float32)
30
+ tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, padding_side='left')
31
 
32
  text = "请介绍北京的旅游景点"
33
  prompt = generate_prompt(text)
 
83
 
84
 
85
  model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, torch_dtype=torch.float16).to(0)
86
+ tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, padding_side='left')
87
 
88
  text = "介绍一下大熊猫"
89
  prompt = generate_prompt(text)
 
130
  Assistant:"""
131
 
132
  model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True).to(torch.float32)
133
+ tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, padding_side='left')
134
 
135
  texts = ["请介绍北京的旅游景点", "介绍一下大熊猫", "乌兰察布"]
136
  prompts = [generate_prompt(text) for text in texts]