Zarzamorati10 commited on
Commit
3dea3a6
1 Parent(s): 55f0aa6

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +246 -0
  2. requirements.txt +13 -0
app.py ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # from dotenv import load_dotenv
3
+ # # from crewai import Agent,Task,Crew
4
+ # # from crewai_tools import SerperDevTool
5
+ # # from langchain_google_genai import ChatGoogleGenerativeAI
6
+ # # from langchain_fireworks import Fireworks
7
+ # import os
8
+ # from typing import List
9
+ # # from pydantic import BaseModel
10
+ # from langchain_groq import ChatGroq
11
+ # load_dotenv()
12
+
13
+
14
+ # # search_internet=SerperDevTool()
15
+
16
+ # # model_id="mixtral-8x7b-instruct"
17
+ # # url=f"accounts/fireworks/models/{model_id}"
18
+
19
+ # # llm=Fireworks(
20
+ # # model=url,
21
+ # # temperature=0.1
22
+
23
+ # # )
24
+
25
+
26
+ # # class NamedUrl(BaseModel):
27
+ # # title: str
28
+ # # url: str
29
+
30
+
31
+ # # class ArticlesUrlList(BaseModel):
32
+ # # articles: List[NamedUrl]
33
+
34
+
35
+
36
+ # # research_agent=Agent(
37
+ # # role="Company Research Manager",
38
+ # # goal="""Look up the web and find urls and titles for 3 recent blog articles about agents powered by llm's models. It is your job to return this collected
39
+ # # information in a JSON object.
40
+ # # """,
41
+ # # backstory="""As a Company Research Manager, you are responsible for aggregating all the researched information.""",
42
+ # # llm=llm,
43
+ # # tools=[search_internet],
44
+ # # verbose=True,
45
+ # # allow_delegation=True
46
+ # # )
47
+ # # research_task=Task(
48
+ # # description="""Research for 3 recent blog articles about agents powered by llm's models.
49
+ # # Return this collected information in a JSON object.
50
+
51
+ # # Important:
52
+ # # - Once you've found the information, immediately stop searching for additional information.
53
+ # # - Only return the requested information. NOTHING ELSE!
54
+ # # - Do not generate fake information. Only return the information you find. Nothing else!
55
+ # # - Do not stop researching until you find the requested information for each position in the company.
56
+ # # """,
57
+ # # agent=research_agent,
58
+ # # expected_output="""A JSON object containing the researched information such as title of the article and url.""",
59
+ # # output_json=NamedUrl,
60
+ # # async_execution=True
61
+ # # )
62
+
63
+ # # writer_agent=Agent(
64
+ # # role="Company Writer Agent",
65
+ # # goal="""Look up the specific information provided and return a list of JSON object, each json object must contain a title and url field""",
66
+ # # backstory="""As a Company Research Agent, you are responsible for looking up especific information about articles.
67
+
68
+ # # Important:
69
+ # # - Once you've found the information, immediately stop searching for additional information.
70
+ # # - Only return the requested information. NOTHING ELSE!
71
+ # # - Do not generate fake information. Only return the information you find. Nothing else!
72
+ # # """,
73
+ # # tools=[search_internet],
74
+ # # llm=llm,
75
+ # # verbose=True
76
+ # # )
77
+ # # writer_task=Task(
78
+ # # description="""Look up the specific information provided and return a list of JSON object, each json object must contain a title and url field.
79
+
80
+ # # Important:
81
+ # # - Once you've found the information, immediately stop searching for additional information.
82
+ # # - Only return the requested information. NOTHING ELSE!
83
+ # # - Do not generate fake information. Only return the information you find. Nothing else!
84
+ # # - Do not stop researching until you find the requested information .
85
+ # # """,
86
+ # # agent=writer_agent,
87
+ # # expected_output="""A List of JSON objects containing the researched information .""",
88
+ # # output_json=ArticlesUrlList,
89
+ # # async_execution=True
90
+ # # )
91
+
92
+ # # crew=Crew(
93
+ # # agents=[research_agent,writer_agent],
94
+ # # tasks=[research_task,writer_task],
95
+ # # manager_llm=ChatGoogleGenerativeAI(
96
+ # # model="gemini-pro",
97
+ # # google_api_key=os.getenv("GOOGLE_API_KEY"),
98
+ # # temperature=0.1,
99
+ # # ),
100
+ # # verbose=2
101
+ # # )
102
+
103
+
104
+ # # crew.kickoff()
105
+
106
+ # from typing import List
107
+ # from langchain_core.output_parsers import JsonOutputParser
108
+
109
+ # search_internet=SerperDevTool()
110
+
111
+ # model_mistral="mixtral-8x7b-32768"
112
+ # model_llama="Llama3-8b-8192"
113
+ # llm=ChatGroq(
114
+ # api_key=os.environ["GROQ_API_KEY"],
115
+ # model=model_llama,
116
+ # )
117
+
118
+
119
+
120
+ # class Article(BaseModel):
121
+ # title:str
122
+ # date:str
123
+ # url:str
124
+
125
+ # class ListArticles(BaseModel):
126
+ # articles: List[Article]
127
+
128
+
129
+ # research_agent=Agent(
130
+ # role="Assistant",
131
+ # goal="Your work is to search for 2 new articles about agents powered by lenguaje large models and return each of them as a json object",
132
+ # backstory="""As an assistant you need to search the web and find 2 articles about llm agents.""",
133
+ # llm=llm,
134
+ # tools=[search_internet],
135
+ # verbose=True
136
+
137
+ # )
138
+ # research_task=Task(
139
+ # description="Search the web and return 2 articles, each article must be contain its url, title and date. If the date is not available then the default value is MISSING",
140
+ # agent=research_agent,
141
+ # expected_output="A list of json objects , where each object must be contain title, date and url. Only return the list of json objects nothing else",
142
+ # output_json=ListArticles,
143
+ # llm=llm
144
+
145
+ # )
146
+
147
+ # crew=Crew(
148
+ # agents=[research_agent],
149
+ # tasks=[research_task],
150
+ # verbose=True,
151
+ # manager_llm=ChatGoogleGenerativeAI(model="gemini-pro",google_api_key=os.getenv("GOOGLE_API_KEY"),temperature=0.1)
152
+ # )
153
+
154
+
155
+ # from flask import Flask,jsonify,abort
156
+ # from flask_cors import CORS
157
+ # from langchain_core.output_parsers import StrOutputParser
158
+ # app=Flask(__name__)
159
+
160
+ # CORS(app, resources={r"/api/*": {"origins": "*"}})
161
+
162
+ # @app.route("/",methods=["GET"])
163
+ # def fetch_news():
164
+ # chain=llm|StrOutputParser()
165
+ # response=chain.invoke(input="Tell me a joke about bears")
166
+ # if not response:
167
+ # abort(404,"Something were wrong durion teh petition")
168
+ # print(response)
169
+ # return jsonify(response)
170
+
171
+ # if __name__ == '__main__':
172
+ # app.run(debug=True, port=3001)
173
+ from crewai import Agent, Task, Crew, Process
174
+ from langchain_google_genai import ChatGoogleGenerativeAI
175
+ import gradio as gr
176
+ from crewai_tools import SerperDevTool
177
+ from langchain_groq import ChatGroq
178
+
179
+ import os
180
+
181
+ search_internet=SerperDevTool()
182
+
183
+ model_mistral="mixtral-8x7b-32768"
184
+ model_llama="Llama3-8b-8192"
185
+ llm=ChatGroq(
186
+ api_key=os.environ["GROQ_API_KEY"],
187
+ model=model_llama,
188
+ )
189
+
190
+
191
+ # Set gemini pro as llm
192
+ llm = ChatGoogleGenerativeAI(model="gemini-pro",
193
+ verbose=True,
194
+ temperature=0.5,
195
+ google_api_key=os.environ["GOOGLE_API_KEY"])
196
+
197
+
198
+ def create_crewai_setup(topic):
199
+ # Define Agents
200
+ search_agent = Agent(
201
+ role="Expert Researcher",
202
+ goal=f"""A list of urls article about the {topic}. The articlesare suposed to be from a recent date""",
203
+ backstory=f"""You are an expert at searching the web and find the latest news related with AI.""",
204
+ verbose=True,
205
+ llm=llm,
206
+ allow_delegation=True,
207
+ tools=[search_internet],
208
+ )
209
+
210
+
211
+ # Check if the person has a disease
212
+ search_task = Task(
213
+ description=f"""Return a 3 urls from articles in teh web related the topic of {topic}
214
+ The urls articles need to be recent""",
215
+ expected_output="A list of 3 url from articles about the espcified topic",
216
+ agent=search_agent,
217
+ llm=llm
218
+ )
219
+
220
+
221
+ health_crew = Crew(
222
+ agents=[search_agent],
223
+ tasks=[search_task],
224
+ verbose=2,
225
+ process=Process.sequential,
226
+ )
227
+
228
+ # Create and Run the Crew
229
+ crew_result = health_crew.kickoff()
230
+
231
+ return crew_result
232
+
233
+ # Gradio interface
234
+ def run_crewai_app(age, gender, disease):
235
+ crew_result = create_crewai_setup(age, gender, disease)
236
+ return crew_result
237
+
238
+ iface = gr.Interface(
239
+ fn=run_crewai_app,
240
+ inputs=["text"],
241
+ outputs=["text"],
242
+ title="CrewAI Basic App",
243
+ description="Enter The Topic|"
244
+ )
245
+
246
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ python-dotenv
2
+ langchain-google-genai
3
+ # langchain-fireworks
4
+ # langchain
5
+ pydantic
6
+ crewai
7
+ crewai[tools]
8
+ langchain-groq
9
+ # flask
10
+ # flask-cors
11
+ # gunicorn
12
+ gradio
13
+ #langchain-community