Spaces:
Runtime error
Runtime error
File size: 4,891 Bytes
cf0f589 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
"""
Integration with AI Co-Scientist.
This module provides functions to integrate the MCP agents with the AI Co-Scientist workflow.
It handles communication with the AI Co-Scientist API and enhances hypotheses.
"""
import asyncio
import json
import logging
import os
import requests
from typing import List, Dict, Any, Optional
from integration import detect_research_domain, ResearchDomain
from deep_research_integration import enhance_research_hypotheses
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger("ai-co-scientist-integration")
# Get the AI Co-Scientist API URL from environment variable or use default
AI_CO_SCIENTIST_URL = os.environ.get("AI_CO_SCIENTIST_URL", "http://ai-co-scientist:8000")
async def generate_hypotheses(query: str, research_goal: str, num_hypotheses: int = 3) -> List[Dict[str, Any]]:
"""
Generate hypotheses using the AI Co-Scientist API.
Args:
query (str): The research query.
research_goal (str): The research goal.
num_hypotheses (int): The number of hypotheses to generate.
Returns:
List[Dict[str, Any]]: The generated hypotheses.
"""
logger.info(f"Generating hypotheses for query: {query}")
try:
# First, set the research goal
response = requests.post(
f"{AI_CO_SCIENTIST_URL}/research_goal",
json={
"description": research_goal,
"constraints": {},
"num_hypotheses": num_hypotheses,
"generation_temperature": 0.7,
"llm_model": os.environ.get("AGENT_MODEL", "gpt-4o-mini")
},
timeout=30
)
if not response.ok:
logger.error(f"Error setting research goal: {response.text}")
return []
# Then, run a cycle to generate hypotheses
response = requests.post(
f"{AI_CO_SCIENTIST_URL}/run_cycle",
timeout=120 # Longer timeout for hypothesis generation
)
if not response.ok:
logger.error(f"Error running cycle: {response.text}")
return []
cycle_details = response.json()
# Extract the generated hypotheses
if "steps" in cycle_details and "generation" in cycle_details["steps"]:
hypotheses = cycle_details["steps"]["generation"].get("hypotheses", [])
logger.info(f"Generated {len(hypotheses)} hypotheses")
return hypotheses
else:
logger.error("No hypotheses found in cycle details")
return []
except Exception as e:
logger.error(f"Error generating hypotheses: {str(e)}")
return []
async def generate_and_enhance_hypotheses(query: str, research_goal: str, num_hypotheses: int = 3) -> Dict[str, Any]:
"""
Generate hypotheses using the AI Co-Scientist API and enhance them using the MCP agents.
Args:
query (str): The research query.
research_goal (str): The research goal.
num_hypotheses (int): The number of hypotheses to generate.
Returns:
Dict[str, Any]: The enhanced hypotheses.
"""
# Generate hypotheses
hypotheses = await generate_hypotheses(query, research_goal, num_hypotheses)
if not hypotheses:
logger.error("No hypotheses generated")
return {
"domain": "general",
"enhanced_hypotheses": {
"error": "Failed to generate hypotheses"
}
}
# Enhance hypotheses
enhanced_result = await enhance_research_hypotheses(query, hypotheses, research_goal)
return enhanced_result
def get_active_hypotheses() -> List[Dict[str, Any]]:
"""
Get the active hypotheses from the AI Co-Scientist API.
Returns:
List[Dict[str, Any]]: The active hypotheses.
"""
try:
response = requests.get(
f"{AI_CO_SCIENTIST_URL}/hypotheses",
timeout=10
)
if not response.ok:
logger.error(f"Error getting hypotheses: {response.text}")
return []
hypotheses = response.json()
logger.info(f"Retrieved {len(hypotheses)} active hypotheses")
return hypotheses
except Exception as e:
logger.error(f"Error getting hypotheses: {str(e)}")
return []
if __name__ == "__main__":
# Example usage
async def main():
query = "How can we improve renewable energy storage?"
research_goal = "Investigate novel approaches to energy storage for renewable energy sources"
result = await generate_and_enhance_hypotheses(query, research_goal)
print(json.dumps(result, indent=2))
asyncio.run(main())
|