KaleiNeely commited on
Commit
f7b77f4
1 Parent(s): 3a3a37f

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +6 -6
README.md CHANGED
@@ -27,8 +27,8 @@ User: {instruction}
27
  Assistant:"""
28
 
29
 
30
- model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True).to(torch.float32)
31
- tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, padding_side='left', pad_token="<s>")
32
 
33
  text = "请介绍北京的旅游景点"
34
  prompt = generate_prompt(text)
@@ -83,8 +83,8 @@ User: {instruction}
83
  Assistant:"""
84
 
85
 
86
- model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, torch_dtype=torch.float16).to(0)
87
- tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, padding_side='left', pad_token="<s>")
88
 
89
  text = "介绍一下大熊猫"
90
  prompt = generate_prompt(text)
@@ -130,8 +130,8 @@ User: {instruction}
130
 
131
  Assistant:"""
132
 
133
- model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True).to(torch.float32)
134
- tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-5-world-1b6", trust_remote_code=True, padding_side='left', pad_token="<s>")
135
 
136
  texts = ["请介绍北京的旅游景点", "介绍一下大熊猫", "乌兰察布"]
137
  prompts = [generate_prompt(text) for text in texts]
 
27
  Assistant:"""
28
 
29
 
30
+ model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-6-world-1b6", trust_remote_code=True).to(torch.float32)
31
+ tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-6-world-1b6", trust_remote_code=True, padding_side='left', pad_token="<s>")
32
 
33
  text = "请介绍北京的旅游景点"
34
  prompt = generate_prompt(text)
 
83
  Assistant:"""
84
 
85
 
86
+ model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-6-world-1b6", trust_remote_code=True, torch_dtype=torch.float16).to(0)
87
+ tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-6-world-1b6", trust_remote_code=True, padding_side='left', pad_token="<s>")
88
 
89
  text = "介绍一下大熊猫"
90
  prompt = generate_prompt(text)
 
130
 
131
  Assistant:"""
132
 
133
+ model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-6-world-1b6", trust_remote_code=True).to(torch.float32)
134
+ tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-6-world-1b6", trust_remote_code=True, padding_side='left', pad_token="<s>")
135
 
136
  texts = ["请介绍北京的旅游景点", "介绍一下大熊猫", "乌兰察布"]
137
  prompts = [generate_prompt(text) for text in texts]