"""
模型下载：
HuggingFace上申请Token
可直接用命令下载:
huggingface-cli download Unbabel/wmt22-cometkiwi-da --local-dir /root/autodl-tmp/wmt22-cometkiwi-da --token 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
huggingface-cli download Unbabel/wmt23-cometkiwi-da-xl --local-dir /root/autodl-tmp/wmt23-cometkiwi-da-xl --token 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
huggingface-cli download Unbabel/wmt23-cometkiwi-da-xxl --local-dir /root/autodl-tmp/wmt23-cometkiwi-da-xxl --token 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

或者程序.
"""

import os
os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"
os.environ["HF_HOME"] = "/root/autodl-tmp"

from huggingface_hub import login
# login()

from comet import download_model, load_from_checkpoint
import json
from loguru import logger

# model_path = download_model("Unbabel/wmt23-cometkiwi-da-xl")
# print(model_path)
model_path = '/root/autodl-tmp/wmt23-cometkiwi-da-xl/checkpoints/model.ckpt'
# model_path = '/root/autodl-tmp/wmt22-cometkiwi-da/checkpoints/model.ckpt'
model = load_from_checkpoint(model_path)

logger.info('comet start')
with open('/root/TransVoice/simultaneous/translated_result.json', 'r', encoding='utf-8') as fs:
    results = json.load(fs)

data = [
    {
        "src": item['text'],
        "mt": item['translated']
    } for item in results
]

model_output = model.predict(data, batch_size=8, gpus=1)
print (model_output)
logger.info('comet end')