ihgn's picture
Update README.md
5a06c94
---
language:
- en
metrics:
- f1
pipeline_tag: text-classification
---
# Initialize tokenizer and model
tokenizer = BartTokenizer.from_pretrained('ihgn/paraphrase-detection')
model = BartForConditionalGeneration.from_pretrained("ihgn/paraphrase-detection").to(device)
source_sentence = "This was a series of nested angular standards , so that measurements in azimuth and elevation could be done directly in polar coordinates relative to the ecliptic."
target_paraphrase = "This was a series of nested polar scales , so that measurements in azimuth and elevation could be performed directly in angular coordinates relative to the ecliptic"
def paraphrase_detection(model, tokenizer, source_sentence, target_paraphrase):
# Tokenize the input sentence
inputs = tokenizer.encode_plus(source_sentence + ' <sep> ' + target_paraphrase, return_tensors='pt')
# Classify the input using the model
with torch.no_grad():
outputs = model.generate(inputs['input_ids'].to(device))
# Get the predicted label
predicted_label = 1 if generated_text == '1' else 0
print("Predicted Label:", predicted_label)
paraphrase_detection(model, tokenizer, source_sentence, target_paraphrase)