Files changed (1) hide show
  1. README.md +24 -0
README.md CHANGED
@@ -65,6 +65,8 @@ $ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: applica
65
  Or Python API:
66
 
67
  ```
 
 
68
  from transformers import AutoModelForSequenceClassification, AutoTokenizer
69
 
70
  model = AutoModelForSequenceClassification.from_pretrained("madhurjindal/autonlp-Gibberish-Detector-492513457", use_auth_token=True)
@@ -74,4 +76,26 @@ tokenizer = AutoTokenizer.from_pretrained("madhurjindal/autonlp-Gibberish-Detect
74
  inputs = tokenizer("I love Machine Learning!", return_tensors="pt")
75
 
76
  outputs = model(**inputs)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  ```
 
65
  Or Python API:
66
 
67
  ```
68
+ import torch
69
+ import torch.nn.functional as F
70
  from transformers import AutoModelForSequenceClassification, AutoTokenizer
71
 
72
  model = AutoModelForSequenceClassification.from_pretrained("madhurjindal/autonlp-Gibberish-Detector-492513457", use_auth_token=True)
 
76
  inputs = tokenizer("I love Machine Learning!", return_tensors="pt")
77
 
78
  outputs = model(**inputs)
79
+
80
+ probs = F.softmax(outputs.logits, dim=-1)
81
+
82
+ predicted_index = torch.argmax(probs, dim=1).item()
83
+
84
+ predicted_prob = probs[0][predicted_index].item()
85
+
86
+ labels = model.config.id2label
87
+
88
+ predicted_label = labels[predicted_index]
89
+
90
+ for i, prob in enumerate(probs[0]):
91
+ print(f"Class: {labels[i]}, Probability: {prob:.4f}")
92
+ ```
93
+
94
+ Another simplifed solution with transformers pipline:
95
+
96
+ ```
97
+ from transformers import pipeline
98
+ selected_model = "madhurjindal/autonlp-Gibberish-Detector-492513457"
99
+ classifier = pipeline("text-classification", model=selected_model)
100
+ classifier("I love Machine Learning!")
101
  ```