def scaling(x, min_x, max_x, r1, r2):
x_s = x
x_s = (x_s - min_x) * (r2 - r1) / (max_x - min_x)
x_s = r1 + x_s
return x_s
def descaling(x_s, min_x, max_x, r1, r2):
x = x_s
x = (x - r1) * (max_x - min_x) / (r2 - r1) + min_x
return x
with torch.no_grad():
x = "They are equally important, absolutely, and just as real as each other."
x = tokenizer([x], return_tensors="pt", add_special_tokens=True, padding=True)
y_hat = model(**x.to(device)).logits
y_hat = torch.tanh(y_hat).cpu()
l_hat = descaling(y_hat, 1, 7, -1, 1)[0].numpy()
print(l_hat)