TuringsSolutions commited on
Commit
0d2d507
1 Parent(s): 05c1e81

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -0
app.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModel, AutoTokenizer
3
+ import torch
4
+
5
+ # Load the model and tokenizer
6
+ model_name = "TuringsSolutions/TechLegalV1"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModel.from_pretrained(model_name)
9
+
10
+ # Function to make predictions
11
+ def predict(text):
12
+ inputs = tokenizer(text, return_tensors="pt")
13
+ with torch.no_grad():
14
+ outputs = model(**inputs)
15
+ # Assuming we need to extract some specific information from outputs
16
+ # Modify this part based on your model's output format
17
+ return outputs.last_hidden_state.mean(dim=1).squeeze().tolist()
18
+
19
+ # Create a Gradio interface
20
+ iface = gr.Interface(
21
+ fn=predict,
22
+ inputs=gr.inputs.Textbox(lines=2, placeholder="Enter text here..."),
23
+ outputs="json",
24
+ title="Tech Legal Model",
25
+ description="A model for analyzing tech legal documents."
26
+ )
27
+
28
+ # Launch the interface
29
+ if __name__ == "__main__":
30
+ iface.launch()