Transformers
English
Inference Endpoints
A newer version of this model is available: deepseek-ai/Janus-Pro-7B

from flask import Flask, request, jsonify from transformers import pipeline import openai from newsapi import NewsApiClient from notion_client import Client from datetime import datetime, timedelta import torch from diffusers import StableDiffusionPipeline

Initialize Flask app

app = Flask(name)

Load Hugging Face Question-Answering model

qa_pipeline = pipeline("question-answering", model="distilbert-base-uncased-distilled-squad")

OpenAI API Key (Replace with your own)

openai.api_key = "your_openai_api_key"

NewsAPI Key (Replace with your own)

newsapi = NewsApiClient(api_key="your_news_api_key")

Notion API Key (Replace with your own)

notion = Client(auth="your_notion_api_key")

Load Stable Diffusion for Image Generation

device = "cuda" if torch.cuda.is_available() else "cpu" sd_model = StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5").to(device)

=== FUNCTION 1: Answer Student Questions ===

@app.route("/ask", methods=["POST"]) def answer_question(): data = request.json question = data.get("question", "") context = "This AI is trained to assist students with questions related to various subjects."

if not question:
    return jsonify({"error": "Please provide a question."}), 400

answer = qa_pipeline(question=question, context=context)
return jsonify({"question": question, "answer": answer["answer"]})

=== FUNCTION 2: Generate Code ===

@app.route("/generate_code", methods=["POST"]) def generate_code(): data = request.json prompt = data.get("prompt", "")

if not prompt:
    return jsonify({"error": "Please provide a prompt for code generation."}), 400

response = openai.Completion.create(
    engine="code-davinci-002",
    prompt=prompt,
    max_tokens=100
)
return jsonify({"code": response.choices[0].text.strip()})

=== FUNCTION 3: Get Daily News ===

@app.route("/news", methods=["GET"]) def get_news(): headlines = newsapi.get_top_headlines(language="en", category="technology") news_list = [{"title": article["title"], "url": article["url"]} for article in headlines["articles"]]

return jsonify({"news": news_list})

=== FUNCTION 4: Create a Planner Task ===

@app.route("/planner", methods=["POST"]) def create_planner(): data = request.json task = data.get("task", "") days = int(data.get("days", 1))

if not task:
    return jsonify({"error": "Please provide a task."}), 400

due_date = datetime.now() + timedelta(days=days)

return jsonify({"task": task, "due_date": due_date.strftime("%Y-%m-%d")})

=== FUNCTION 5: Save Notes to Notion ===

@app.route("/notion", methods=["POST"]) def save_notion_note(): data = request.json title = data.get("title", "Untitled Note") content = data.get("content", "")

if not content:
    return jsonify({"error": "Please provide content for the note."}), 400

notion.pages.create(
    parent={"database_id": "your_notion_database_id"},
    properties={"title": {"title": [{"text": {"content": title}}]}},
    children=[{"object": "block", "type": "paragraph", "paragraph": {"text": [{"type": "text", "text": {"content": content}}]}}]
)

return jsonify({"message": "Note added successfully to Notion!"})

=== FUNCTION 6: Generate AI Images ===

@app.route("/generate_image", methods=["POST"]) def generate_image(): data = request.json prompt = data.get("prompt", "")

if not prompt:
    return jsonify({"error": "Please provide an image prompt."}), 400

image = sd_model(prompt).images[0]
image_path = "generated_image.png"
image.save(image_path)

return jsonify({"message": "Image generated successfully!", "image_path": image_path})

=== RUN THE APP ===

if name == "main": app.run(debug=True)

Downloads last month

-

Downloads are not tracked for this model. How to track
Inference Providers NEW
This model is not currently available via any of the supported Inference Providers.
The model cannot be deployed to the HF Inference API: The model has no pipeline_tag.

Model tree for Random7878/Life

Finetuned
(135)
this model

Datasets used to train Random7878/Life