# from dotenv import load_dotenv # # from crewai import Agent,Task,Crew # # from crewai_tools import SerperDevTool # # from langchain_google_genai import ChatGoogleGenerativeAI # # from langchain_fireworks import Fireworks # import os # from typing import List # # from pydantic import BaseModel # from langchain_groq import ChatGroq # load_dotenv() # # search_internet=SerperDevTool() # # model_id="mixtral-8x7b-instruct" # # url=f"accounts/fireworks/models/{model_id}" # # llm=Fireworks( # # model=url, # # temperature=0.1 # # ) # # class NamedUrl(BaseModel): # # title: str # # url: str # # class ArticlesUrlList(BaseModel): # # articles: List[NamedUrl] # # research_agent=Agent( # # role="Company Research Manager", # # goal="""Look up the web and find urls and titles for 3 recent blog articles about agents powered by llm's models. It is your job to return this collected # # information in a JSON object. # # """, # # backstory="""As a Company Research Manager, you are responsible for aggregating all the researched information.""", # # llm=llm, # # tools=[search_internet], # # verbose=True, # # allow_delegation=True # # ) # # research_task=Task( # # description="""Research for 3 recent blog articles about agents powered by llm's models. # # Return this collected information in a JSON object. # # Important: # # - Once you've found the information, immediately stop searching for additional information. # # - Only return the requested information. NOTHING ELSE! # # - Do not generate fake information. Only return the information you find. Nothing else! # # - Do not stop researching until you find the requested information for each position in the company. # # """, # # agent=research_agent, # # expected_output="""A JSON object containing the researched information such as title of the article and url.""", # # output_json=NamedUrl, # # async_execution=True # # ) # # writer_agent=Agent( # # role="Company Writer Agent", # # goal="""Look up the specific information provided and return a list of JSON object, each json object must contain a title and url field""", # # backstory="""As a Company Research Agent, you are responsible for looking up especific information about articles. # # Important: # # - Once you've found the information, immediately stop searching for additional information. # # - Only return the requested information. NOTHING ELSE! # # - Do not generate fake information. Only return the information you find. Nothing else! # # """, # # tools=[search_internet], # # llm=llm, # # verbose=True # # ) # # writer_task=Task( # # description="""Look up the specific information provided and return a list of JSON object, each json object must contain a title and url field. # # Important: # # - Once you've found the information, immediately stop searching for additional information. # # - Only return the requested information. NOTHING ELSE! # # - Do not generate fake information. Only return the information you find. Nothing else! # # - Do not stop researching until you find the requested information . # # """, # # agent=writer_agent, # # expected_output="""A List of JSON objects containing the researched information .""", # # output_json=ArticlesUrlList, # # async_execution=True # # ) # # crew=Crew( # # agents=[research_agent,writer_agent], # # tasks=[research_task,writer_task], # # manager_llm=ChatGoogleGenerativeAI( # # model="gemini-pro", # # google_api_key=os.getenv("GOOGLE_API_KEY"), # # temperature=0.1, # # ), # # verbose=2 # # ) # # crew.kickoff() # from typing import List # from langchain_core.output_parsers import JsonOutputParser # search_internet=SerperDevTool() # model_mistral="mixtral-8x7b-32768" # model_llama="Llama3-8b-8192" # llm=ChatGroq( # api_key=os.environ["GROQ_API_KEY"], # model=model_llama, # ) # class Article(BaseModel): # title:str # date:str # url:str # class ListArticles(BaseModel): # articles: List[Article] # research_agent=Agent( # role="Assistant", # goal="Your work is to search for 2 new articles about agents powered by lenguaje large models and return each of them as a json object", # backstory="""As an assistant you need to search the web and find 2 articles about llm agents.""", # llm=llm, # tools=[search_internet], # verbose=True # ) # research_task=Task( # description="Search the web and return 2 articles, each article must be contain its url, title and date. If the date is not available then the default value is MISSING", # agent=research_agent, # expected_output="A list of json objects , where each object must be contain title, date and url. Only return the list of json objects nothing else", # output_json=ListArticles, # llm=llm # ) # crew=Crew( # agents=[research_agent], # tasks=[research_task], # verbose=True, # manager_llm=ChatGoogleGenerativeAI(model="gemini-pro",google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.1) # ) # from flask import Flask,jsonify,abort # from flask_cors import CORS # from langchain_core.output_parsers import StrOutputParser # app=Flask(__name__) # CORS(app, resources={r"/api/*": {"origins": "*"}}) # @app.route("/",methods=["GET"]) # def fetch_news(): # chain=llm|StrOutputParser() # response=chain.invoke(input="Tell me a joke about bears") # if not response: # abort(404,"Something were wrong durion teh petition") # print(response) # return jsonify(response) # if __name__ == '__main__': # app.run(debug=True, port=3001) from crewai import Agent, Task, Crew, Process from langchain_google_genai import ChatGoogleGenerativeAI import gradio as gr from crewai_tools import SerperDevTool from langchain_groq import ChatGroq import os search_internet=SerperDevTool() model_mistral="mixtral-8x7b-32768" model_llama="Llama3-8b-8192" llm=ChatGroq( api_key=os.environ["GROQ_API_KEY"], model=model_llama, ) # Set gemini pro as llm llm = ChatGoogleGenerativeAI(model="gemini-pro", verbose=True, temperature=0.5, google_api_key=os.environ["GOOGLE_API_KEY"]) def create_crewai_setup(topic): # Define Agents search_agent = Agent( role="Expert Researcher", goal=f"""A list of urls article about the {topic}. The articlesare suposed to be from a recent date""", backstory=f"""You are an expert at searching the web and find the latest news related with AI.""", verbose=True, llm=llm, allow_delegation=True, tools=[search_internet], ) # Check if the person has a disease search_task = Task( description=f"""Return a 3 urls from articles in teh web related the topic of {topic} The urls articles need to be recent""", expected_output="A list of 3 url from articles about the espcified topic", agent=search_agent, llm=llm ) health_crew = Crew( agents=[search_agent], tasks=[search_task], verbose=2, process=Process.sequential, ) # Create and Run the Crew crew_result = health_crew.kickoff() return crew_result # Gradio interface def run_crewai_app(age, gender, disease): crew_result = create_crewai_setup(age, gender, disease) return crew_result iface = gr.Interface( fn=run_crewai_app, inputs=["text"], outputs=["text"], title="CrewAI Basic App", description="Enter The Topic|" ) iface.launch()