sk75 commited on
Commit
63f55d3
·
verified ·
1 Parent(s): 989d028

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -0
app.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoTokenizer, AutoModelForQuestionAnswering
3
+ import gradio as gr
4
+
5
+ model_name = "checkpoint-1700"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForQuestionAnswering.from_pretrained(model_name)
8
+
9
+ device = "cuda" if torch.cuda.is_available() else "cpu"
10
+ model.to(device)
11
+
12
+ def answer_question(question, context):
13
+ if not question.strip() or not context.strip():
14
+ return "Soru ve metin boş olamaz!"
15
+
16
+ inputs = tokenizer(question, context, return_tensors="pt", truncation=True)
17
+ inputs = {k: v.to(device) for k, v in inputs.items()}
18
+
19
+ with torch.no_grad():
20
+ outputs = model(**inputs)
21
+
22
+ answer_start = torch.argmax(outputs.start_logits)
23
+ answer_end = torch.argmax(outputs.end_logits) + 1
24
+
25
+ input_ids = inputs["input_ids"][0]
26
+ answer = tokenizer.convert_tokens_to_string(
27
+ tokenizer.convert_ids_to_tokens(input_ids[answer_start:answer_end].cpu())
28
+ )
29
+
30
+ return answer.strip()
31
+
32
+ demo = gr.Interface(
33
+ fn=answer_question,
34
+ inputs=[
35
+ gr.Textbox(label="Soru", placeholder="Örn: Türkiye'nin başkenti neresidir ?"),
36
+ gr.Textbox(
37
+ label="Metin",
38
+ placeholder="Metni buraya girin...",
39
+ lines=10
40
+ )
41
+ ],
42
+ outputs=gr.Textbox(label="Cevap"),
43
+ title="BERT Soru-Cevap Sistemi",
44
+ description="Metin ve sorunuzu girin ve BERT modeli cevabı metin içerisinden çıkarsın.",
45
+ theme="default",
46
+ )
47
+
48
+ if __name__ == "__main__":
49
+ demo.launch()