import os
import requests
from transformers import AutoModel, AutoTokenizer
from src.constance import model_download

class HuggingFace:

    @staticmethod
    def example1():
        response = requests.post(
            url="https://api-inference.huggingface.co/models/gpt2",
            json={"inputs": "hello，Hugging Face!"},
            headers={
                'Authorization': f'Bearer {os.getenv('HUGGING_FACE_API_KEY')}'
            }
        )
        print(response.json())


    """ 模型是由（模型部分、词向量部分）一起构成的。 """
    @staticmethod
    def local_model_hugging_face():
        model_name = 'bert-base-chinese'
        cache_dir = model_download(model_name)
        model = AutoModel.from_pretrained(model_name, cache_dir=cache_dir)
        tokenizer = AutoTokenizer.from_pretrained(model_name, cache_dir=cache_dir)

    def start(self):
        self.local_model_hugging_face()