ganeshkamath89's picture
mentioned the exact finetuned model used for NER
fc788b6 verified
raw
history blame
752 Bytes
import gradio as gr
from transformers import pipeline
def generate_story(story):
ner = pipeline("ner", model="dbmdz/bert-large-cased-finetuned-conll03-english")
return str(ner(story))
demo = gr.Interface (
fn=generate_story,
description="Named Entity Recognition Demo with BERT (dbmdz/bert-large-cased-finetuned-conll03-english)",
examples=[
["England won the 2019 world cup vs The 2019 world cup happened in England."],
["Washington is the capital of the US vs The first president of the US was Washington."],
["My name is Ganesh Kamath and I work at AMD in Bangalore."]
],
inputs=[gr.Textbox(lines=7, label="Text")],
outputs=[gr.Textbox(lines=7, label="Story NER")]
)
demo.launch(share=True)