from torch.autograd import Variable
from transformers import AutoModelWithLMHead,AutoTokenizer,pipeline

import torch

mode_name = 'liam168/trans-opus-mt-en-zh'
model = AutoModelWithLMHead.from_pretrained(mode_name)
tokenizer = AutoTokenizer.from_pretrained(mode_name)
translation = pipeline("translation_en_to_zh", model=model, tokenizer=tokenizer,device=torch.device('cpu'))
result = translation('I like to study Data Science and Machine Learning.', max_length=400)

print("hello word!!!")
print(result)

print(torch.cpu.current_device())

x = Variable(torch.randn(5, 5))
y = Variable(torch.randn(5, 5))
z = Variable(torch.randn(5, 5), requires_grad=True)
a = x + y
print(a.requires_grad)

b = a + z
print(b.requires_grad)



