qilin / app.py
rexwang8's picture
Update app.py
61602a6
raw
history blame contribute delete
No virus
1.45 kB
import gradio as gr
title = "Qilin-Lit-6B"
description = "Qilin-Lit-6B is a finetuned version of GPT-J-6B. It has been trained on webnovels. It can work as a general purpose fantasy novel storyteller."
examples = [
['I had eyes but couldn\'t see Mount Tai!'],
]
#gr.Interface.load("models/rexwang8/qilin-lit-6b", inputs="text", outputs="text",title=title,description=description, examples=examples).launch()
demo = gr.Interface.load("models/rexwang8/qilin-lit-6b", description=description, examples=examples)
demo.launch()
'''
import os
from transformers import AutoTokenizer, AutoModelForCausalLM
def GenerateResp(prompt):
model = AutoModelForCausalLM.from_pretrained('rexwang8/qilin-lit-6b')
tokenizer = AutoTokenizer.from_pretrained('rexwang8/qilin-lit-6b')
input_ids = tokenizer.encode(prompt, return_tensors='pt')
output = model.generate(input_ids, do_sample=True, temperature=1.0, top_p=0.9, repetition_penalty=1.2, max_length=len(input_ids[0])+100, pad_token_id=tokenizer.eos_token_id)
generated_text = tokenizer.decode(output[0])
return generated_text
'''
'''
inputbox = gr.Textbox(label="Input",lines=3,placeholder='Type anything. The longer the better since it gives Qilin more context. Qilin is trained on english translated eastern (mostly chinese) webnovels.')
outputbox = gr.Textbox(label="Qilin-Lit-6B",lines=8)
iface = gr.Interface(fn=GenerateResp, inputs="text", outputs="text")
iface.launch()
'''