D3V1L1810 commited on
Commit
3ed9870
·
verified ·
1 Parent(s): fc82176

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +69 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ from transformers import BertTokenizer, TFBertForSequenceClassification
3
+ import numpy as np
4
+ import json
5
+ import requests
6
+ import gradio as gr
7
+ import logging
8
+
9
+ bert_tokenizer = BertTokenizer.from_pretrained('MultiTokenizer_ep10')
10
+ bert_model = TFBertForSequenceClassification.from_pretrained('MultiModel_ep10')
11
+
12
+ # def send_results_to_api(data, result_url):
13
+ # headers = {'Content-Type':'application/json'}
14
+ # response = requests.post(result_url, json = data, headers=headers)
15
+
16
+ # if response.status_code == 200:
17
+ # return response.json
18
+ # else:
19
+ # return {'error':f"failed to send result to API: {response.status_code}"}
20
+
21
+ def predict_text(params):
22
+ try:
23
+ params = json.loads(params)
24
+ except JSONDecodeError as e:
25
+ logging.error(f"Invalid JSON input: {e.msg} at line {e.lineno} column {e.colno}")
26
+ return {"error": f"Invalid JSON input: {e.msg} at line {e.lineno} column {e.colno}"}
27
+
28
+ texts = params.get("texts",[])
29
+ # api = params.get("api", "")
30
+ # job_id = params.get("job_id","")
31
+
32
+ if not texts:
33
+ return {"error": "Missing required parameters: 'urls'"}
34
+
35
+ solutions = []
36
+
37
+ for text in texts:
38
+ encoding = bert_tokenizer.encode_plus(
39
+ text,
40
+ add_special_tokens=True,
41
+ max_length=128,
42
+ return_token_type_ids=True,
43
+ padding = 'max_length',
44
+ truncation=True,
45
+ return_attention_mask=True,
46
+ return_tensors='tf'
47
+ )
48
+ input_ids = encoding['input_ids']
49
+ token_type_ids = encoding['token_type_ids']
50
+ attention_mask = encoding['attention_mask']
51
+
52
+ pred = bert_model.predict([input_ids, token_type_ids, attention_mask])
53
+ logits = pred.logits
54
+ pred_label = tf.argmax(logits, axis=1).numpy()[0]
55
+
56
+ label = {0: 'BUSINESS', 1: 'COMEDY', 2: 'CRIME', 3: 'FOOD & DRINK', 4: 'POLITICS', 5: 'SPORTS', 6: 'TRAVEL'}
57
+ result = {'text':text, 'label':[label[pred_label]]}
58
+ solutions.append(result)
59
+
60
+ # result_url = f"{api}/{job_id}"
61
+ # send_results_to_api(solutions, result_url)
62
+ return json.dumps({"solutions":solutions})
63
+
64
+
65
+ inputt = gr.Textbox(label="Parameters in Json Format... Eg. {'texts':['text1', 'text2']")
66
+ outputt = gr.JSON()
67
+
68
+ application = gr.Interface(fn = predict_text, inputs = inputt, outputs = outputt, title='Multi Text Classification with API Integration..')
69
+ application.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ tensorflow
2
+ tf-keras
3
+ transformers