Spaces:
Runtime error
Runtime error
""" | |
Integration with AI Co-Scientist. | |
This module provides functions to integrate the MCP agents with the AI Co-Scientist workflow. | |
It handles communication with the AI Co-Scientist API and enhances hypotheses. | |
""" | |
import asyncio | |
import json | |
import logging | |
import os | |
import requests | |
from typing import List, Dict, Any, Optional | |
from integration import detect_research_domain, ResearchDomain | |
from deep_research_integration import enhance_research_hypotheses | |
# Configure logging | |
logging.basicConfig( | |
level=logging.INFO, | |
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" | |
) | |
logger = logging.getLogger("ai-co-scientist-integration") | |
# Get the AI Co-Scientist API URL from environment variable or use default | |
AI_CO_SCIENTIST_URL = os.environ.get("AI_CO_SCIENTIST_URL", "http://ai-co-scientist:8000") | |
async def generate_hypotheses(query: str, research_goal: str, num_hypotheses: int = 3) -> List[Dict[str, Any]]: | |
""" | |
Generate hypotheses using the AI Co-Scientist API. | |
Args: | |
query (str): The research query. | |
research_goal (str): The research goal. | |
num_hypotheses (int): The number of hypotheses to generate. | |
Returns: | |
List[Dict[str, Any]]: The generated hypotheses. | |
""" | |
logger.info(f"Generating hypotheses for query: {query}") | |
try: | |
# First, set the research goal | |
response = requests.post( | |
f"{AI_CO_SCIENTIST_URL}/research_goal", | |
json={ | |
"description": research_goal, | |
"constraints": {}, | |
"num_hypotheses": num_hypotheses, | |
"generation_temperature": 0.7, | |
"llm_model": os.environ.get("AGENT_MODEL", "gpt-4o-mini") | |
}, | |
timeout=30 | |
) | |
if not response.ok: | |
logger.error(f"Error setting research goal: {response.text}") | |
return [] | |
# Then, run a cycle to generate hypotheses | |
response = requests.post( | |
f"{AI_CO_SCIENTIST_URL}/run_cycle", | |
timeout=120 # Longer timeout for hypothesis generation | |
) | |
if not response.ok: | |
logger.error(f"Error running cycle: {response.text}") | |
return [] | |
cycle_details = response.json() | |
# Extract the generated hypotheses | |
if "steps" in cycle_details and "generation" in cycle_details["steps"]: | |
hypotheses = cycle_details["steps"]["generation"].get("hypotheses", []) | |
logger.info(f"Generated {len(hypotheses)} hypotheses") | |
return hypotheses | |
else: | |
logger.error("No hypotheses found in cycle details") | |
return [] | |
except Exception as e: | |
logger.error(f"Error generating hypotheses: {str(e)}") | |
return [] | |
async def generate_and_enhance_hypotheses(query: str, research_goal: str, num_hypotheses: int = 3) -> Dict[str, Any]: | |
""" | |
Generate hypotheses using the AI Co-Scientist API and enhance them using the MCP agents. | |
Args: | |
query (str): The research query. | |
research_goal (str): The research goal. | |
num_hypotheses (int): The number of hypotheses to generate. | |
Returns: | |
Dict[str, Any]: The enhanced hypotheses. | |
""" | |
# Generate hypotheses | |
hypotheses = await generate_hypotheses(query, research_goal, num_hypotheses) | |
if not hypotheses: | |
logger.error("No hypotheses generated") | |
return { | |
"domain": "general", | |
"enhanced_hypotheses": { | |
"error": "Failed to generate hypotheses" | |
} | |
} | |
# Enhance hypotheses | |
enhanced_result = await enhance_research_hypotheses(query, hypotheses, research_goal) | |
return enhanced_result | |
def get_active_hypotheses() -> List[Dict[str, Any]]: | |
""" | |
Get the active hypotheses from the AI Co-Scientist API. | |
Returns: | |
List[Dict[str, Any]]: The active hypotheses. | |
""" | |
try: | |
response = requests.get( | |
f"{AI_CO_SCIENTIST_URL}/hypotheses", | |
timeout=10 | |
) | |
if not response.ok: | |
logger.error(f"Error getting hypotheses: {response.text}") | |
return [] | |
hypotheses = response.json() | |
logger.info(f"Retrieved {len(hypotheses)} active hypotheses") | |
return hypotheses | |
except Exception as e: | |
logger.error(f"Error getting hypotheses: {str(e)}") | |
return [] | |
if __name__ == "__main__": | |
# Example usage | |
async def main(): | |
query = "How can we improve renewable energy storage?" | |
research_goal = "Investigate novel approaches to energy storage for renewable energy sources" | |
result = await generate_and_enhance_hypotheses(query, research_goal) | |
print(json.dumps(result, indent=2)) | |
asyncio.run(main()) | |