Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI | |
| import requests | |
| from bs4 import BeautifulSoup | |
| from googlesearch import search | |
| from huggingface_hub import InferenceClient | |
| import os | |
| app = FastAPI() | |
| client = InferenceClient("google/gemma-3-4b-it", token=os.getenv("HF_TOKEN")) | |
| def search_and_scrape(question): | |
| # نفس منطق البحث والاستخراج السابق | |
| full_query = f"site:islamweb.net {question}" | |
| try: | |
| results = search(full_query, num_results=1, lang="ar") | |
| link = next(results) | |
| headers = {'User-Agent': 'Mozilla/5.0'} | |
| resp = requests.get(link, headers=headers, timeout=5) | |
| soup = BeautifulSoup(resp.text, 'html.parser') | |
| content = soup.find('div', {'class': 'item'}).get_text(strip=True)[:3000] | |
| return content, link | |
| except: | |
| return None, None | |
| async def ask_ai(data: dict): | |
| question = data.get("question") | |
| context, link = search_and_scrape(question) | |
| if not context: | |
| return {"answer": "لم أجد نتائج.", "source": ""} | |
| system_msg = f"Answer only from context: {context}" | |
| # طلب الإجابة من الموديل | |
| response = client.chat_completion( | |
| messages=[ | |
| {"role": "system", "content": system_msg}, | |
| {"role": "user", "content": question} | |
| ], | |
| max_tokens=500 | |
| ) | |
| return { | |
| "answer": response.choices[0].message.content, | |
| "source": link | |
| } | |
| # تشغيل FastAPI بدلاً من Gradio | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) |