import os from dotenv import load_dotenv from supabase import create_client import requests # Load environment variables from .env file load_dotenv() # Supabase API Config SUPABASE_URL = "https://lmpazoxzucnlqqxjoihi.supabase.co" SUPABASE_KEY = os.getenv("SUPABASE_API_KEY") if not SUPABASE_KEY: raise ValueError("SUPABASE_KEY is not set in the environment variables.") supabase = create_client(SUPABASE_URL, SUPABASE_KEY) HF_MODELS = { "gemma": "https://api-inference.huggingface.co/models/google/gemma-7b", "bart": "https://api-inference.huggingface.co/models/facebook/bart-large-cnn" } # Hugging Face API Config #HF_API_URL = "https://router.huggingface.co/hf-inference/models/google/gemma-7b" HF_API_TOKEN = os.getenv("HF_API_TOKEN") HF_HEADERS = {"Authorization": f"Bearer HF_API_TOKEN"} # Ensure the API key is loaded if not HF_API_TOKEN: raise ValueError("Missing Hugging Face API key. Check your .env file.") # def query(payload, model="gemma"): """ Sends a request to the selected Hugging Face model API. :param payload: The input data for inference. :param model: Choose either 'gemma' (for google/gemma-7b) or 'bart' (for facebook/bart-large-cnn). :return: The model's response in JSON format, or None if the request fails. """ if model not in HF_MODELS: raise ValueError("Invalid model name. Choose 'gemma' or 'bart'.") api_url = f"https://api-inference.huggingface.co/models/{HF_MODELS[model]}" try: response = requests.post(api_url, headers=HF_HEADERS, json=payload) if response.status_code == 401: print(f"Error querying Hugging Face model '{model}': 401 Unauthorized. Check API key.") return None # Handle authentication failure response.raise_for_status() # Raise an error for failed requests (e.g., 500 errors) return response.json() # Return the parsed JSON response except requests.exceptions.RequestException as e: print(f"Error querying Hugging Face model '{model}': {e}") return None # Return None if API call fails # Bart query def query(payload, model="bart"): """ Sends a request to the selected Hugging Face model API. :param payload: The input data for inference. :param model: Choose either 'gemma' (for google/gemma-7b) or 'bart' (for facebook/bart-large-cnn). :return: The model's response in JSON format, or None if the request fails. """ if model not in HF_MODELS: raise ValueError("Invalid model name. Choose 'gemma' or 'bart'.") api_url = f"https://api-inference.huggingface.co/models/{HF_MODELS[model]}" try: response = requests.post(api_url, headers=HF_HEADERS, json=payload) if response.status_code == 401: print(f"Error querying Hugging Face model '{model}': 401 Unauthorized. Check API key.") return None # Handle authentication failure response.raise_for_status() # Raise an error for failed requests (e.g., 500 errors) return response.json() # Return the parsed JSON response except requests.exceptions.RequestException as e: print(f"Error querying Hugging Face model '{model}': {e}") return None # Return None if API call fails