Spaces:
Sleeping
Sleeping
File size: 4,797 Bytes
06520ee 6197856 06520ee 4d3bcdf 06520ee 6197856 06520ee 4d3bcdf 6197856 6040b2c 6197856 6040b2c 6197856 6040b2c 6197856 6040b2c 06520ee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
from src.agents.agents import CustomHandler
from configfile import Config
from src.llmconfig.groqllm import GroqLLM
from src.streamlitUI.loadui import LoadStreamlitUI
import os
import streamlit as st
from crewai import Crew, Process, Agent, Task
from langchain_core.callbacks import BaseCallbackHandler
from typing import Any, Dict
from src.task.tasks import CrewAITasks
from src.agents.agents import CrewAIAgents
from src.supportingtools.tools import SupportingTools
# MAIN Function START
if __name__ == "__main__":
# config
obj_config = Config()
# load ui
ui = LoadStreamlitUI()
user_input = ui.load_streamlit_ui()
# Initialize the message log in session state if not already present
if "messages" not in st.session_state:
st.session_state["messages"] = [{"role": "assistant", "content": "What do you want us to write?"}]
# Display existing messages
for msg in st.session_state.messages:
st.chat_message(msg["role"]).write(msg["content"])
# Handle user input
if prompt := st.chat_input():
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
# Configure LLM
obj_llm_config = GroqLLM(user_controls_input=user_input)
llm = obj_llm_config.groq_llm_config()
if user_input["selected_usecase"] == 'MultiAgent Coder':
# agents
obj_crewai_agents = CrewAIAgents(llm=llm)
lst_agents = obj_crewai_agents.crewai_agents()
# Define tasks for each agent
obj_crewai_tasks = CrewAITasks(llm=llm,prompt=prompt,lst_agents=lst_agents)
lst_tasks = obj_crewai_tasks.create_tasks()
# Set up the crew and process tasks hierarchically
project_crew = Crew(
tasks=lst_tasks,
agents=lst_agents,
process=Process.hierarchical,
manager_llm=llm,
manager_callbacks=[CustomHandler("Crew Manager")]
)
final = project_crew.kickoff()
elif user_input['selected_usecase']=='MultiAgent SearchTool':
print('ContentGen with Image')
# tools
# API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
obj_supporting_tools = SupportingTools()
search_tool = obj_supporting_tools.duckduckgosearchtool()
# agents
obj_crewai_agents = CrewAIAgents(llm=llm)
lst_agents = obj_crewai_agents.gen_with_crewai_agents(search_tool=search_tool)
# Define tasks for each agent
obj_crewai_tasks = CrewAITasks(llm=llm,prompt=prompt,lst_agents=lst_agents)
lst_tasks = obj_crewai_tasks.create_tasks_gen()
# Set up the crew and process tasks hierarchically
project_crew = Crew(
tasks=lst_tasks,
agents=lst_agents,
process=Process.hierarchical,
manager_llm=llm,
manager_callbacks=[CustomHandler("Crew Manager")]
)
final = project_crew.kickoff()
elif user_input['selected_usecase']=='MultiAgent Image':
print('ContentGen with Image')
# tools
API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
obj_supporting_tools = SupportingTools(API_URL=API_URL)
search_tool = obj_supporting_tools.duckduckgosearchtool()
# agents
obj_crewai_agents = CrewAIAgents(llm=llm)
lst_agents = obj_crewai_agents.gen_with_image_crewai_agents(image_generate=SupportingTools.image_generate)
# Define tasks for each agent
obj_crewai_tasks = CrewAITasks(llm=llm,prompt=prompt,lst_agents=lst_agents)
lst_tasks = obj_crewai_tasks.create_tasks_image_gen()
# Set up the crew and process tasks
project_crew = Crew(
tasks=[lst_tasks[1]],
agents=[lst_agents[1]],
process=Process.sequential,
manager_llm=llm,
manager_callbacks=[CustomHandler("Crew Manager")]
)
final = project_crew.kickoff({'input':prompt})
# Display the final result
result = f"## Here is the Final Result \n\n {final}"
st.session_state.messages.append({"role": "assistant", "content": result})
st.chat_message("assistant").write(result)
|