|
|
|
|
|
import gradio as gr |
|
|
import google.generativeai as genai |
|
|
from duckduckgo_search import DDGS |
|
|
import os |
|
|
import textwrap |
|
|
import traceback |
|
|
import time |
|
|
|
|
|
|
|
|
is_api_configured = False |
|
|
GOOGLE_API_KEY = None |
|
|
|
|
|
print("βοΈ Attempting to configure Google API Key from HF Space secret...") |
|
|
try: |
|
|
GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY') |
|
|
if GOOGLE_API_KEY: |
|
|
genai.configure(api_key=GOOGLE_API_KEY) |
|
|
print("β
Google API Key configured successfully from HF secret.") |
|
|
is_api_configured = True |
|
|
else: |
|
|
print("β Error: GOOGLE_API_KEY secret not found or is empty in Space settings.") |
|
|
print("β‘οΈ Please go to your Space Settings -> Secrets and ensure 'GOOGLE_API_KEY' is added.") |
|
|
is_api_configured = False |
|
|
except Exception as e: |
|
|
print(f"β An unexpected error occurred during API Key configuration: {e}") |
|
|
is_api_configured = False |
|
|
traceback.print_exc() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def search_web(query, num_results=7, search_timeout=20): |
|
|
"""Searches the web using DuckDuckGo and returns formatted results.""" |
|
|
print(f"π Searching the web for: '{query}' (Timeout: {search_timeout}s)...") |
|
|
try: |
|
|
|
|
|
with DDGS(timeout=search_timeout) as ddgs: |
|
|
results = list(ddgs.text(query, region='wt-wt', safesearch='off', max_results=num_results)) |
|
|
if not results: |
|
|
print("β οΈ No search results found.") |
|
|
return "No relevant search results found for the query." |
|
|
|
|
|
|
|
|
context = f"Search results for query '{query}':\n\n" |
|
|
for i, result in enumerate(results): |
|
|
context += f"Source [{i+1}]: {result.get('title', 'N/A')}\n" |
|
|
context += f" URL: {result.get('href', 'N/A')}\n" |
|
|
snippet = result.get('body', 'N/A') |
|
|
context += f" Snippet: {snippet}\n\n" |
|
|
|
|
|
print(f"β
Found {len(results)} results.") |
|
|
return context |
|
|
except Exception as e: |
|
|
print(f"β Error during web search: {e}") |
|
|
traceback.print_exc() |
|
|
|
|
|
error_detail = f"Details: {e}" |
|
|
if "timed out" in str(e): |
|
|
error_detail = f"Details: The connection to the search engine timed out after {search_timeout} seconds. This might be due to temporary network issues. Error: {e}" |
|
|
return f"Error occurred during web search. {error_detail}" |
|
|
|
|
|
|
|
|
def generate_case_study(topic, search_context): |
|
|
"""Generates a case study using Gemini based on the topic and search context.""" |
|
|
print(f"π€ Generating case study for: '{topic}'...") |
|
|
|
|
|
|
|
|
if not is_api_configured: |
|
|
print("β Cannot generate: Google API Key not configured.") |
|
|
return "Error: Google API Key not configured successfully. Check HF Space secrets." |
|
|
|
|
|
|
|
|
if "Error occurred during web search" in search_context or "No relevant search results found" in search_context: |
|
|
print(f"β Cannot generate: Problem with search results.") |
|
|
return f"Cannot generate case study due to search issues:\n{search_context}" |
|
|
|
|
|
|
|
|
model_name = 'gemini-1.5-flash-latest' |
|
|
try: |
|
|
print(f" Using model: {model_name}") |
|
|
model = genai.GenerativeModel(model_name) |
|
|
except Exception as e: |
|
|
print(f"β Error initializing GenerativeModel '{model_name}': {e}") |
|
|
traceback.print_exc() |
|
|
error_message = f"Error setting up the AI model '{model_name}': {e}." |
|
|
|
|
|
return error_message |
|
|
|
|
|
|
|
|
prompt = f""" |
|
|
You are an expert business analyst and case study writer. |
|
|
Your task is to generate a comprehensive case study based on the following topic: "{topic}" |
|
|
|
|
|
Use the provided search results as your *only* source of information. Synthesize the information into a well-structured case study. |
|
|
|
|
|
**Required Case Study Format:** |
|
|
|
|
|
**1. Title:** Create a concise and informative title. |
|
|
**2. Introduction/Executive Summary:** Briefly introduce the subject and core topic. State key outcome from sources. |
|
|
**3. The Company/Subject:** Background info from search results only. |
|
|
**4. The Challenge/Problem:** Specific issue mentioned in sources. |
|
|
**5. The Solution:** Implemented solution based only on sources. |
|
|
**6. Implementation/Process:** (Optional) Describe only if available in sources. |
|
|
**7. Results/Impact:** Quantify results using data from sources. State if none mentioned. |
|
|
**8. Conclusion:** Summarize key takeaways based on provided info. |
|
|
**9. Sources:** List relevant URLs from search results. |
|
|
|
|
|
**Instructions:** |
|
|
* Adhere strictly to the format (use Markdown `##`). |
|
|
* Base writing ***exclusively*** on "Provided Search Context". Do not invent. |
|
|
* If details missing, state: "Information not available in the provided sources." |
|
|
* Maintain objective tone. |
|
|
* Format using Markdown. |
|
|
|
|
|
**Provided Search Context:** |
|
|
--- |
|
|
{search_context} |
|
|
--- |
|
|
|
|
|
Now, please generate the case study for "{topic}". |
|
|
""" |
|
|
|
|
|
|
|
|
try: |
|
|
response = model.generate_content(prompt) |
|
|
|
|
|
if response.parts: |
|
|
generated_text = "".join(part.text for part in response.parts) |
|
|
print("β
Case study generated successfully.") |
|
|
return generated_text |
|
|
elif response.prompt_feedback and response.prompt_feedback.block_reason: |
|
|
block_reason = response.prompt_feedback.block_reason |
|
|
print(f"β οΈ Generation blocked due to: {block_reason}") |
|
|
return f"Error: Generation failed. Blocked due to '{block_reason}'. Check content policies." |
|
|
elif not response.candidates: |
|
|
finish_reason = response.candidates[0].finish_reason if response.candidates else "UNKNOWN" |
|
|
print(f"β οΈ Generation finished without valid content (Reason: {finish_reason}).") |
|
|
return f"Error: AI model finished but produced no usable content (Reason: {finish_reason})." |
|
|
else: |
|
|
print("β οΈ Generation produced no text content.") |
|
|
return "Error: AI model generated an empty response." |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Error during case study generation: {e}") |
|
|
traceback.print_exc() |
|
|
error_message = f"An unexpected error occurred during AI generation: {e}" |
|
|
|
|
|
if "API key not valid" in str(e) or "PermissionDenied" in str(e): |
|
|
error_message = "Error: Invalid/Missing API Key. Check GOOGLE_API_KEY secret and Gemini API enablement." |
|
|
elif "Model not found" in str(e): |
|
|
error_message = f"Error: AI model ('{model_name}') not found/unsupported." |
|
|
elif "Resource has been exhausted" in str(e) or "Quota" in str(e): |
|
|
error_message = "Error: API quota exceeded. Check Google Cloud Console." |
|
|
return error_message |
|
|
|
|
|
|
|
|
|
|
|
def create_case_study(company_or_topic): |
|
|
"""Orchestrates the web search (with retries) and case study generation.""" |
|
|
print("-" * 60) |
|
|
if not company_or_topic or not company_or_topic.strip(): |
|
|
print("β οΈ Input validation failed: Empty topic.") |
|
|
return "Please enter a valid company name or topic." |
|
|
|
|
|
cleaned_topic = company_or_topic.strip() |
|
|
print(f"β‘οΈ Processing request for: '{cleaned_topic}'") |
|
|
|
|
|
|
|
|
search_results_context = None |
|
|
max_retries = 2 |
|
|
retry_delay_seconds = 3 |
|
|
search_timeout_seconds = 25 |
|
|
|
|
|
for attempt in range(max_retries + 1): |
|
|
print(f" Attempting web search ({attempt + 1}/{max_retries + 1})...") |
|
|
search_results_context = search_web(cleaned_topic, search_timeout=search_timeout_seconds) |
|
|
|
|
|
|
|
|
if search_results_context and "Error occurred during web search" not in search_results_context: |
|
|
print(" Web search successful.") |
|
|
break |
|
|
|
|
|
|
|
|
if attempt < max_retries: |
|
|
print(f" Search attempt failed. Waiting {retry_delay_seconds}s before retrying...") |
|
|
time.sleep(retry_delay_seconds) |
|
|
else: |
|
|
|
|
|
print(f" Search failed after {max_retries + 1} attempts.") |
|
|
|
|
|
print("-" * 60) |
|
|
return f"Failed to retrieve search results after multiple attempts.\nLast error: {search_results_context}" |
|
|
|
|
|
|
|
|
|
|
|
case_study_markdown = generate_case_study(cleaned_topic, search_results_context) |
|
|
|
|
|
print("-" * 60) |
|
|
return case_study_markdown |
|
|
|
|
|
|
|
|
print("\nβοΈ Setting up Gradio interface...") |
|
|
|
|
|
if not is_api_configured: |
|
|
print("\n" + "="*60 + "\nβΌοΈ WARNING: API Key not configured at startup. Generation will fail. Check Secrets.\n" + "="*60 + "\n") |
|
|
|
|
|
iface = gr.Interface( |
|
|
fn=create_case_study, |
|
|
inputs=gr.Textbox( |
|
|
lines=2, |
|
|
placeholder="Enter a company name or topic (e.g., 'Acme Corp uses AI for customer support')", |
|
|
label="Company Name or Topic" |
|
|
), |
|
|
outputs=gr.Markdown(label="Generated Case Study"), |
|
|
title="π AI Case Study Generator (Gemini + DuckDuckGo)", |
|
|
description="Enter a topic. The app searches the web (DDG) and uses Gemini AI to write a case study based *only* on the search results.\n**Requires `GOOGLE_API_KEY` secret in HF Space Settings.**", |
|
|
allow_flagging="never", |
|
|
examples=[ |
|
|
["How Spotify uses AI for music recommendations"], |
|
|
["Tesla Autopilot development challenges"], |
|
|
["Use of AI in drug discovery by Pfizer"], |
|
|
], |
|
|
theme=gr.themes.Soft() |
|
|
) |
|
|
|
|
|
print("π Launching Gradio interface...") |
|
|
try: |
|
|
|
|
|
iface.launch() |
|
|
except Exception as e: |
|
|
print(f"β Failed to launch Gradio interface: {e}") |
|
|
traceback.print_exc() |