FredZhang7 commited on
Commit
8bd55c1
1 Parent(s): a23e440

update to newer version

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -7,7 +7,7 @@ from huggingface_hub import hf_hub_download
7
  ctx_limit = 4096
8
  # title = "RWKV-5-World-1B5-v2-20231025-ctx4096"
9
  # "BlinkDL/rwkv-5-world"
10
- title = "RWKV-x060-World-1B6-v2-20240208-ctx4096"
11
  model_path = hf_hub_download(repo_id="BlinkDL/rwkv-6-world", filename=f"{title}.pth")
12
  model = RWKV(model=model_path, strategy="cpu bf16")
13
  pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
@@ -176,7 +176,7 @@ def alternative(chatbot, history):
176
  with gr.Blocks(title=title) as demo:
177
  gr.HTML(f'<div style="text-align: center;">\n<h1>🌍World - {title}</h1>\n</div>')
178
  gr.Markdown(
179
- f"100% RNN RWKV-LM **trained on 100+ natural languages**. Demo limited to ctxlen {ctx_limit}. For best results, <b>write short imperative prompts</b> like commands and requests. Example: use \"Tell me what my name is\" instead of \"What's my name?\"."
180
  + "\n\n"
181
  + f"Clone this space for faster inference if you can run the app on GPU or better CPU. To use CUDA, replace <code>strategy='cpu bf16'</code> with <code>strategy='cuda fp16'</code> in `app.py`."
182
  )
 
7
  ctx_limit = 4096
8
  # title = "RWKV-5-World-1B5-v2-20231025-ctx4096"
9
  # "BlinkDL/rwkv-5-world"
10
+ title = "RWKV-x060-World-1B6-v2.1-20240328-ctx4096.pth"
11
  model_path = hf_hub_download(repo_id="BlinkDL/rwkv-6-world", filename=f"{title}.pth")
12
  model = RWKV(model=model_path, strategy="cpu bf16")
13
  pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
 
176
  with gr.Blocks(title=title) as demo:
177
  gr.HTML(f'<div style="text-align: center;">\n<h1>🌍World - {title}</h1>\n</div>')
178
  gr.Markdown(
179
+ f"100% RNN RWKV-LM **trained on 12+ natural languages**. Demo limited to ctxlen {ctx_limit}. For best results, <b>write short imperative prompts</b> like commands and requests. Example: use \"Tell me what my name is\" instead of \"What's my name?\"."
180
  + "\n\n"
181
  + f"Clone this space for faster inference if you can run the app on GPU or better CPU. To use CUDA, replace <code>strategy='cpu bf16'</code> with <code>strategy='cuda fp16'</code> in `app.py`."
182
  )