from fastapi import FastAPI from transformers import pipeline from huggingface_hub import snapshot_download from pathlib import Path import os from huggingface_hub import login # Authenticate using the token login(token=os.getenv("HF_TOKEN")) app = FastAPI() # Define the path where the model will be downloaded mistral_models_path = Path("/app/mistralai/Mistral-7B-v0.3") mistral_models_path.mkdir(parents=True, exist_ok=True) # Download the model if not already present if not (mistral_models_path / "params.json").exists(): snapshot_download( repo_id="mistralai/Mistral-7B-v0.3", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path, token=os.getenv('HUGGINGFACE_HUB_TOKEN') # Use the environment variable for authentication ) # Load the model pipe_mistral = pipeline("text2text-generation", model=str(mistral_models_path)) @app.get("/mistral") def mistral_endpoint(input: str): output = pipe_mistral(input) return {"output": output[0]["generated_text"]} @app.get("/") def greet_json(): return {"Hello": "World!"}