Spaces:
Sleeping
Sleeping
| import torch | |
| from transformers import AutoTokenizer, AutoModelForQuestionAnswering | |
| import gradio as gr | |
| model_name = "checkpoint-1700" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForQuestionAnswering.from_pretrained(model_name) | |
| device = "cuda" if torch.cuda.is_available() else "cpu" | |
| model.to(device) | |
| def answer_question(question, context): | |
| if not question.strip() or not context.strip(): | |
| return "Soru ve metin boş olamaz!" | |
| inputs = tokenizer(question, context, return_tensors="pt", truncation=True) | |
| inputs = {k: v.to(device) for k, v in inputs.items()} | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| answer_start = torch.argmax(outputs.start_logits) | |
| answer_end = torch.argmax(outputs.end_logits) + 1 | |
| input_ids = inputs["input_ids"][0] | |
| answer = tokenizer.convert_tokens_to_string( | |
| tokenizer.convert_ids_to_tokens(input_ids[answer_start:answer_end].cpu()) | |
| ) | |
| return answer.strip() | |
| demo = gr.Interface( | |
| fn=answer_question, | |
| inputs=[ | |
| gr.Textbox(label="Soru", placeholder="Örn: Türkiye'nin başkenti neresidir ?"), | |
| gr.Textbox( | |
| label="Metin", | |
| placeholder="Metni buraya girin...", | |
| lines=10 | |
| ) | |
| ], | |
| outputs=gr.Textbox(label="Cevap"), | |
| title="BERT Soru-Cevap Sistemi", | |
| description="Metin ve sorunuzu girin ve BERT modeli cevabı metin içerisinden çıkarsın.", | |
| theme="default", | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |