menimeni123 commited on
Commit
5237bb2
1 Parent(s): fc9cdc9
Files changed (3) hide show
  1. app.py +57 -47
  2. config.json +0 -15
  3. requirements.txt +2 -1
app.py CHANGED
@@ -1,55 +1,65 @@
1
- import torch
 
2
  import joblib
3
- from flask import Flask, request, jsonify
4
  from transformers import BertTokenizer, BertForSequenceClassification
5
- import torch.nn.functional as F
6
 
7
- # Initialize Flask application
8
- app = Flask(__name__)
9
-
10
- # Load model and tokenizer
11
- model = joblib.load('model.joblib')
12
  tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
13
- model.eval()
14
 
15
- # Set device to CUDA if available
16
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
 
 
17
  model.to(device)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- # Inference function
20
- def classify_text(text):
21
- encoding = tokenizer(str(text), truncation=True, padding=True, max_length=128, return_tensors='pt')
22
- input_ids = encoding['input_ids'].to(device)
23
- attention_mask = encoding['attention_mask'].to(device)
24
-
25
- with torch.no_grad():
26
- outputs = model(input_ids, attention_mask=attention_mask)
27
- logits = outputs.logits
28
- probabilities = F.softmax(logits, dim=-1)
29
- confidence, predicted_class = torch.max(probabilities, dim=-1)
30
-
31
- class_names = ["JAILBREAK", "INJECTION", "PHISHING", "SAFE"]
32
- predicted_label = class_names[predicted_class.item()]
33
- confidence_score = confidence.item()
34
-
35
- return predicted_label, confidence_score
36
-
37
- # Define the inference route
38
- @app.route('/inference', methods=['POST'])
39
- def inference():
40
- data = request.json
41
- if 'text' not in data:
42
- return jsonify({"error": "No text provided"}), 400
43
-
44
- text = data['text']
45
- label, confidence = classify_text(text)
46
-
47
- return jsonify({
48
- 'text': text,
49
- 'classification': label,
50
- 'confidence': confidence
51
- })
52
-
53
- # Start the Flask server
54
- if __name__ == '__main__':
55
- app.run(host='0.0.0.0', port=8080)
 
1
+ # app.py
2
+ import os
3
  import joblib
4
+ import torch
5
  from transformers import BertTokenizer, BertForSequenceClassification
6
+ from torch.nn.functional import softmax
7
 
8
+ # Load the tokenizer and model
 
 
 
 
9
  tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
 
10
 
11
+ # Check if CUDA is available, otherwise use CPU
12
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
13
+
14
+ # Load the saved model
15
+ model = joblib.load('model.joblib')
16
  model.to(device)
17
+ model.eval()
18
+
19
+ # Class names
20
+ class_names = ["JAILBREAK", "INJECTION", "PHISHING", "SAFE"]
21
+
22
+ def preprocess(text):
23
+ # Tokenize the input text
24
+ encoding = tokenizer(
25
+ text,
26
+ truncation=True,
27
+ padding=True,
28
+ max_length=128,
29
+ return_tensors='pt'
30
+ )
31
+ return encoding
32
+
33
+ def inference(model_inputs):
34
+ """
35
+ This function will be called for every inference request.
36
+ """
37
+ try:
38
+ # Get the text input
39
+ text = model_inputs.get('text', None)
40
+ if text is None:
41
+ return {'message': 'No text provided for inference.'}
42
+
43
+ # Preprocess the text
44
+ encoding = preprocess(text)
45
+ input_ids = encoding['input_ids'].to(device)
46
+ attention_mask = encoding['attention_mask'].to(device)
47
+
48
+ # Perform inference
49
+ with torch.no_grad():
50
+ outputs = model(input_ids, attention_mask=attention_mask)
51
+ logits = outputs.logits
52
+ probabilities = softmax(logits, dim=-1)
53
+ confidence, predicted_class = torch.max(probabilities, dim=-1)
54
+
55
+ # Prepare the response
56
+ predicted_label = class_names[predicted_class.item()]
57
+ confidence_score = confidence.item()
58
+
59
+ return {
60
+ 'classification': predicted_label,
61
+ 'confidence': confidence_score
62
+ }
63
 
64
+ except Exception as e:
65
+ return {'error': str(e)}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config.json DELETED
@@ -1,15 +0,0 @@
1
- {
2
- "model_type": "bert",
3
- "num_labels": 4,
4
- "hidden_size": 768,
5
- "vocab_size": 30522,
6
- "hidden_act": "gelu",
7
- "initializer_range": 0.02,
8
- "layer_norm_eps": 1e-12,
9
- "max_position_embeddings": 512,
10
- "type_vocab_size": 2,
11
- "attention_probs_dropout_prob": 0.1,
12
- "hidden_dropout_prob": 0.1,
13
- "intermediate_size": 3072
14
- }
15
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
requirements.txt CHANGED
@@ -1,3 +1,4 @@
 
1
  torch
2
- transformers=4.44.2
3
  joblib
 
1
+ # requirements.txt
2
  torch
3
+ transformers
4
  joblib