FatHamster commited on
Commit
95964a8
1 Parent(s): c120869

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +13 -7
main.py CHANGED
@@ -1,10 +1,16 @@
1
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
2
-
3
  tokenizer = AutoTokenizer.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
4
  model = AutoModelForSequenceClassification.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
5
- text = "This movie is really good!"
6
- inputs = tokenizer(text, return_tensors="pt")
7
- outputs = model(**inputs)
8
-
9
- print(f"Input text: {text}")
10
- print(f"Predicted label: {outputs[0]['label']}, score: {outputs[0]['score']:.2f}")
 
 
 
 
 
 
 
1
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
2
+ import torch.nn as nn
3
  tokenizer = AutoTokenizer.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
4
  model = AutoModelForSequenceClassification.from_pretrained("vikram71198/distilroberta-base-finetuned-fake-news-detection")
5
+ #Following the same truncation & padding strategy used while training
6
+ encoded_input = tokenizer("Enter any news article to be classified. Can be a list of articles too.", truncation = True, padding = "max_length", max_length = 512, return_tensors='pt')
7
+ output = model(**encoded_input)["logits"]
8
+ #detaching the output from the computation graph
9
+ detached_output = output.detach()
10
+ #Applying softmax here for single label classification
11
+ softmax = nn.Softmax(dim = 1)
12
+ prediction_probabilities = list(softmax(detached_output).detach().numpy())
13
+ predictions = []
14
+ for x,y in prediction_probabilities:
15
+ predictions.append("not_fake_news") if x > y else predictions.append("fake_news")
16
+ print(predictions)