Spaces:
Running
on
T4
Running
on
T4
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ gpu_h = nvmlDeviceGetHandleByIndex(0)
|
|
8 |
ctx_limit = 1024
|
9 |
gen_limit = 500
|
10 |
gen_limit_long = 800
|
11 |
-
title = "RWKV-x060-World-7B-
|
12 |
|
13 |
os.environ["RWKV_JIT_ON"] = '1'
|
14 |
os.environ["RWKV_CUDA_ON"] = '1' # if '1' then use CUDA kernel for seq mode (much faster)
|
@@ -24,18 +24,18 @@ from rwkv.utils import PIPELINE, PIPELINE_ARGS
|
|
24 |
pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
|
25 |
|
26 |
args = model.args
|
27 |
-
eng_name = '
|
28 |
-
eng_file = hf_hub_download(repo_id="BlinkDL/temp-latest-training-models", filename=f"{eng_name}.pth")
|
29 |
state_eng_raw = torch.load(eng_file)
|
30 |
state_eng = [None] * args.n_layer * 3
|
31 |
|
32 |
-
chn_name = '
|
33 |
-
chn_file = hf_hub_download(repo_id="BlinkDL/temp-latest-training-models", filename=f"{chn_name}.pth")
|
34 |
state_chn_raw = torch.load(chn_file)
|
35 |
state_chn = [None] * args.n_layer * 3
|
36 |
|
37 |
-
wyw_name = '
|
38 |
-
wyw_file = hf_hub_download(repo_id="BlinkDL/temp-latest-training-models", filename=f"{wyw_name}.pth")
|
39 |
state_wyw_raw = torch.load(wyw_file)
|
40 |
state_wyw = [None] * args.n_layer * 3
|
41 |
|
@@ -277,7 +277,7 @@ examples = [
|
|
277 |
[generate_prompt("Write a story using the following information.", "A man named Alex chops a tree down."), gen_limit, 1, 0.3, 0.5, 0.5],
|
278 |
["A few light taps upon the pane made her turn to the window. It had begun to snow again.", gen_limit, 1, 0.3, 0.5, 0.5],
|
279 |
['''Edward: I am Edward Elric from Fullmetal Alchemist.\n\nUser: Hello Edward. What have you been up to recently?\n\nEdward:''', gen_limit, 1, 0.3, 0.5, 0.5],
|
280 |
-
[generate_prompt("Write a simple
|
281 |
['''Japanese: 春の初め、桜の花が満開になる頃、小さな町の片隅にある古びた神社の境内は、特別な雰囲気に包まれていた。\n\nEnglish:''', gen_limit, 1, 0.3, 0.5, 0.5],
|
282 |
["En una pequeña aldea escondida entre las montañas de Andalucía, donde las calles aún conservaban el eco de antiguas leyendas, vivía un joven llamado Alejandro.", gen_limit, 1, 0.3, 0.5, 0.5],
|
283 |
["Dans le cœur battant de Paris, sous le ciel teinté d'un crépuscule d'or et de pourpre, se tenait une petite librairie oubliée par le temps.", gen_limit, 1, 0.3, 0.5, 0.5],
|
@@ -338,7 +338,7 @@ with gr.Blocks(title=title) as demo:
|
|
338 |
data.click(lambda x: x, [data], [prompt, token_count, temperature, top_p, presence_penalty, count_penalty])
|
339 |
|
340 |
with gr.Tab("=== English Q/A ==="):
|
341 |
-
gr.Markdown(f"This is [RWKV-6](https://huggingface.co/BlinkDL/rwkv-6-world) state-tuned to [English Q/A](https://huggingface.co/BlinkDL/
|
342 |
with gr.Row():
|
343 |
with gr.Column():
|
344 |
prompt = gr.Textbox(lines=2, label="Prompt", value="How can I craft an engaging story featuring vampires on Mars?")
|
@@ -358,7 +358,7 @@ with gr.Blocks(title=title) as demo:
|
|
358 |
data.click(lambda x: x, [data], [prompt, token_count, temperature, top_p, presence_penalty, count_penalty])
|
359 |
|
360 |
with gr.Tab("=== Chinese Q/A ==="):
|
361 |
-
gr.Markdown(f"This is [RWKV-6](https://huggingface.co/BlinkDL/rwkv-6-world) state-tuned to [Chinese Q/A](https://huggingface.co/BlinkDL/
|
362 |
with gr.Row():
|
363 |
with gr.Column():
|
364 |
prompt = gr.Textbox(lines=2, label="Prompt", value="怎样写一个在火星上的吸血鬼的有趣故事?")
|
@@ -378,7 +378,7 @@ with gr.Blocks(title=title) as demo:
|
|
378 |
data.click(lambda x: x, [data], [prompt, token_count, temperature, top_p, presence_penalty, count_penalty])
|
379 |
|
380 |
with gr.Tab("=== WenYanWen Q/A ==="):
|
381 |
-
gr.Markdown(f"This is [RWKV-6](https://huggingface.co/BlinkDL/rwkv-6-world) state-tuned to [WenYanWen 文言文 Q/A](https://huggingface.co/BlinkDL/
|
382 |
with gr.Row():
|
383 |
with gr.Column():
|
384 |
prompt = gr.Textbox(lines=2, label="Prompt", value="我和前男友分手了")
|
|
|
8 |
ctx_limit = 1024
|
9 |
gen_limit = 500
|
10 |
gen_limit_long = 800
|
11 |
+
title = "RWKV-x060-World-7B-v3-20241112-ctx4096"
|
12 |
|
13 |
os.environ["RWKV_JIT_ON"] = '1'
|
14 |
os.environ["RWKV_CUDA_ON"] = '1' # if '1' then use CUDA kernel for seq mode (much faster)
|
|
|
24 |
pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
|
25 |
|
26 |
args = model.args
|
27 |
+
eng_name = 'rwkv6-world-v3-7b-eng_QA-20241114-ctx2048'
|
28 |
+
eng_file = hf_hub_download(repo_id="BlinkDL/temp-latest-training-models", filename=f"states/{eng_name}.pth")
|
29 |
state_eng_raw = torch.load(eng_file)
|
30 |
state_eng = [None] * args.n_layer * 3
|
31 |
|
32 |
+
chn_name = 'rwkv6-world-v3-7b-chn_问答QA-20241114-ctx2048'
|
33 |
+
chn_file = hf_hub_download(repo_id="BlinkDL/temp-latest-training-models", filename=f"states/{chn_name}.pth")
|
34 |
state_chn_raw = torch.load(chn_file)
|
35 |
state_chn = [None] * args.n_layer * 3
|
36 |
|
37 |
+
wyw_name = 'rwkv6-world-v3-7b-chn_文言文QA-20241114-ctx2048'
|
38 |
+
wyw_file = hf_hub_download(repo_id="BlinkDL/temp-latest-training-models", filename=f"states/{wyw_name}.pth")
|
39 |
state_wyw_raw = torch.load(wyw_file)
|
40 |
state_wyw = [None] * args.n_layer * 3
|
41 |
|
|
|
277 |
[generate_prompt("Write a story using the following information.", "A man named Alex chops a tree down."), gen_limit, 1, 0.3, 0.5, 0.5],
|
278 |
["A few light taps upon the pane made her turn to the window. It had begun to snow again.", gen_limit, 1, 0.3, 0.5, 0.5],
|
279 |
['''Edward: I am Edward Elric from Fullmetal Alchemist.\n\nUser: Hello Edward. What have you been up to recently?\n\nEdward:''', gen_limit, 1, 0.3, 0.5, 0.5],
|
280 |
+
[generate_prompt("Write a simple webpage. When a user clicks the button, it shows a random joke from a list of 4 jokes."), 500, 1, 0.3, 0.5, 0.5],
|
281 |
['''Japanese: 春の初め、桜の花が満開になる頃、小さな町の片隅にある古びた神社の境内は、特別な雰囲気に包まれていた。\n\nEnglish:''', gen_limit, 1, 0.3, 0.5, 0.5],
|
282 |
["En una pequeña aldea escondida entre las montañas de Andalucía, donde las calles aún conservaban el eco de antiguas leyendas, vivía un joven llamado Alejandro.", gen_limit, 1, 0.3, 0.5, 0.5],
|
283 |
["Dans le cœur battant de Paris, sous le ciel teinté d'un crépuscule d'or et de pourpre, se tenait une petite librairie oubliée par le temps.", gen_limit, 1, 0.3, 0.5, 0.5],
|
|
|
338 |
data.click(lambda x: x, [data], [prompt, token_count, temperature, top_p, presence_penalty, count_penalty])
|
339 |
|
340 |
with gr.Tab("=== English Q/A ==="):
|
341 |
+
gr.Markdown(f"This is [RWKV-6](https://huggingface.co/BlinkDL/rwkv-6-world) state-tuned to [English Q/A](https://huggingface.co/BlinkDL/rwkv-6-misc/tree/main/states). RWKV is a 100% attention-free RNN [RWKV-LM](https://github.com/BlinkDL/RWKV-LM), and we have [300+ Github RWKV projects](https://github.com/search?o=desc&p=1&q=rwkv&s=updated&type=Repositories). Demo limited to ctxlen {ctx_limit}.")
|
342 |
with gr.Row():
|
343 |
with gr.Column():
|
344 |
prompt = gr.Textbox(lines=2, label="Prompt", value="How can I craft an engaging story featuring vampires on Mars?")
|
|
|
358 |
data.click(lambda x: x, [data], [prompt, token_count, temperature, top_p, presence_penalty, count_penalty])
|
359 |
|
360 |
with gr.Tab("=== Chinese Q/A ==="):
|
361 |
+
gr.Markdown(f"This is [RWKV-6](https://huggingface.co/BlinkDL/rwkv-6-world) state-tuned to [Chinese Q/A](https://huggingface.co/BlinkDL/rwkv-6-misc/tree/main/states). RWKV is a 100% attention-free RNN [RWKV-LM](https://github.com/BlinkDL/RWKV-LM), and we have [300+ Github RWKV projects](https://github.com/search?o=desc&p=1&q=rwkv&s=updated&type=Repositories). Demo limited to ctxlen {ctx_limit}.")
|
362 |
with gr.Row():
|
363 |
with gr.Column():
|
364 |
prompt = gr.Textbox(lines=2, label="Prompt", value="怎样写一个在火星上的吸血鬼的有趣故事?")
|
|
|
378 |
data.click(lambda x: x, [data], [prompt, token_count, temperature, top_p, presence_penalty, count_penalty])
|
379 |
|
380 |
with gr.Tab("=== WenYanWen Q/A ==="):
|
381 |
+
gr.Markdown(f"This is [RWKV-6](https://huggingface.co/BlinkDL/rwkv-6-world) state-tuned to [WenYanWen 文言文 Q/A](https://huggingface.co/BlinkDL/rwkv-6-misc/tree/main/states). RWKV is a 100% attention-free RNN [RWKV-LM](https://github.com/BlinkDL/RWKV-LM), and we have [300+ Github RWKV projects](https://github.com/search?o=desc&p=1&q=rwkv&s=updated&type=Repositories). Demo limited to ctxlen {ctx_limit}.")
|
382 |
with gr.Row():
|
383 |
with gr.Column():
|
384 |
prompt = gr.Textbox(lines=2, label="Prompt", value="我和前男友分手了")
|