import json import requests API_URL = "https://api-inference.huggingface.co/models/google/tapas-tiny-finetuned-tabfact" headers = {"Authorization": f"Bearer {API_TOKEN}"} def query(payload): data = json.dumps(payload) response = requests.request("POST", API_URL, headers=headers, data=data) return json.loads(response.content.decode("utf-8")) data = query({"inputs": "I like you. I love you"})
from transformers import AutoTokenizer, AutoModelForSequenceClassification tokenizer = AutoTokenizer.from_pretrained("google/tapas-tiny-finetuned-tabfact") model = AutoModelForSequenceClassification.from_pretrained("google/tapas-tiny-finetuned-tabfact")
git lfs install git clone https://huggingface.co/google/tapas-tiny-finetuned-tabfact
# if you want to clone without large files – just their pointers # prepend your git clone with the following env var: GIT_LFS_SKIP_SMUDGE=1