Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ from langchain_community.llms import HuggingFaceHub
|
|
6 |
|
7 |
#from langchain_community.llms import HuggingFaceHub
|
8 |
|
9 |
-
|
10 |
repo_id="HuggingFaceH4/zephyr-7b-beta",
|
11 |
task="text-generation",
|
12 |
model_kwargs={
|
@@ -33,7 +33,7 @@ researcher = Agent(
|
|
33 |
verbose=True,
|
34 |
allow_delegation=False,
|
35 |
tools=[search_tool],
|
36 |
-
llm=
|
37 |
# You can pass an optional llm attribute specifying what mode you wanna use.
|
38 |
# It can be a local model through Ollama / LM Studio or a remote
|
39 |
# model like OpenAI, Mistral, Antrophic or others (https://docs.crewai.com/how-to/LLM-Connections/)
|
|
|
6 |
|
7 |
#from langchain_community.llms import HuggingFaceHub
|
8 |
|
9 |
+
llm_zephyr_7b_beta = HuggingFaceHub(
|
10 |
repo_id="HuggingFaceH4/zephyr-7b-beta",
|
11 |
task="text-generation",
|
12 |
model_kwargs={
|
|
|
33 |
verbose=True,
|
34 |
allow_delegation=False,
|
35 |
tools=[search_tool],
|
36 |
+
llm=llm_zephyr_7b_beta
|
37 |
# You can pass an optional llm attribute specifying what mode you wanna use.
|
38 |
# It can be a local model through Ollama / LM Studio or a remote
|
39 |
# model like OpenAI, Mistral, Antrophic or others (https://docs.crewai.com/how-to/LLM-Connections/)
|