File size: 4,017 Bytes
8c810e3
 
 
 
 
e61ddcf
8c810e3
 
 
 
bc8c903
 
 
 
 
 
 
 
e61ddcf
 
 
 
 
 
 
 
8c810e3
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
{
    "sourceFile": "app.py",
    "activeCommit": 0,
    "commits": [
        {
            "activePatchIndex": 4,
            "patches": [
                {
                    "date": 1708166138917,
                    "content": "Index: \n===================================================================\n--- \n+++ \n"
                },
                {
                    "date": 1708166825700,
                    "content": "Index: \n===================================================================\n--- \n+++ \n@@ -1,7 +1,35 @@\n import gradio as gr\n+from transformers import AutoTokenizer, AutoModelForSequenceClassification\n+import torch\n \n-def greet(name):\n-    return \"Hello \" + name + \"!!\"\n+# Load the trained model and tokenizer\n\\ No newline at end of file\n+model_path = \"path/to/save/model\"\n+tokenizer_path = \"path/to/save/tokenizer\"\n \n-iface = gr.Interface(fn=greet, inputs=\"text\", outputs=\"text\")\n-iface.launch()\n+model = AutoModelForSequenceClassification.from_pretrained(model_path)\n+tokenizer = AutoTokenizer.from_pretrained(tokenizer_path)\n+model.eval()  # Set model to evaluation mode\n+\n+def predict_paraphrase(sentence1, sentence2):\n+    # Tokenize the input sentences\n+    inputs = tokenizer(sentence1, sentence2, return_tensors=\"pt\", padding=True, truncation=True)\n+    with torch.no_grad():\n+        outputs = model(**inputs)\n+    \n+    # Get probabilities\n+    probs = torch.nn.functional.softmax(outputs.logits, dim=-1).tolist()[0]\n+    \n+    # Assuming the first class (index 0) is 'not paraphrase' and the second class (index 1) is 'paraphrase'\n+    return {\"Not Paraphrase\": probs[0], \"Paraphrase\": probs[1]}\n+\n+# Create Gradio interface\n+iface = gr.Interface(\n+    fn=predict_paraphrase,\n+    inputs=[gr.inputs.Textbox(lines=2, placeholder=\"Enter Sentence 1 Here...\"),\n+            gr.inputs.Textbox(lines=2, placeholder=\"Enter Sentence 2 Here...\")],\n+    outputs=gr.outputs.Label(num_top_classes=2),\n+    title=\"Paraphrase Identification\",\n+    description=\"This model predicts whether two sentences are paraphrases of each other.\"\n+)\n+\n+iface.launch()\n"
                },
                {
                    "date": 1708166830798,
                    "content": "Index: \n===================================================================\n--- \n+++ \n@@ -31,5 +31,5 @@\n     title=\"Paraphrase Identification\",\n     description=\"This model predicts whether two sentences are paraphrases of each other.\"\n )\n \n-iface.launch()\n\\ No newline at end of file\n+iface.launch()\n"
                },
                {
                    "date": 1708167302135,
                    "content": "Index: \n===================================================================\n--- \n+++ \n@@ -1,8 +1,10 @@\n import gradio as gr\n from transformers import AutoTokenizer, AutoModelForSequenceClassification\n import torch\n+from trainml import train_and_save_model  # Import the training function\n \n+\n # Load the trained model and tokenizer\n model_path = \"path/to/save/model\"\n tokenizer_path = \"path/to/save/tokenizer\"\n \n"
                },
                {
                    "date": 1708167312025,
                    "content": "Index: \n===================================================================\n--- \n+++ \n@@ -1,10 +1,10 @@\n import gradio as gr\n from transformers import AutoTokenizer, AutoModelForSequenceClassification\n import torch\n from trainml import train_and_save_model  # Import the training function\n+train_and_save_model()\n \n-\n # Load the trained model and tokenizer\n model_path = \"path/to/save/model\"\n tokenizer_path = \"path/to/save/tokenizer\"\n \n"
                }
            ],
            "date": 1708166138917,
            "name": "Commit-0",
            "content": "import gradio as gr\n\ndef greet(name):\n    return \"Hello \" + name + \"!!\"\n\niface = gr.Interface(fn=greet, inputs=\"text\", outputs=\"text\")\niface.launch()"
        }
    ]
}