quocdat25 commited on
Commit
be6f674
1 Parent(s): d9b1770

Upload folder using huggingface_hub

Browse files
.ipynb_checkpoints/README-checkpoint.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ ---
2
+ title: crypto_crewAI
3
+ app_file: chat_gradio.py
4
+ sdk: gradio
5
+ sdk_version: 4.26.0
6
+ ---
.ipynb_checkpoints/agents-checkpoint.py CHANGED
@@ -45,7 +45,8 @@ class CryptoAnalysisAgents():
45
  'Armed with a flair for storytelling, the Content Writer transforms intricate crypto data into captivating narratives,'
46
  'illuminating the latest trends for a wide audience.'
47
  ),
48
- tools=[search_tool,
 
49
  # AskWolframAlpha(),
50
  ],
51
  allow_delegation=False,
 
45
  'Armed with a flair for storytelling, the Content Writer transforms intricate crypto data into captivating narratives,'
46
  'illuminating the latest trends for a wide audience.'
47
  ),
48
+ tools=[
49
+ # search_tool,
50
  # AskWolframAlpha(),
51
  ],
52
  allow_delegation=False,
.ipynb_checkpoints/chat_gradio-checkpoint.py CHANGED
@@ -1,32 +1,25 @@
1
- from langchain_community.llms import HuggingFaceEndpoint
2
- from langchain.prompts import PromptTemplate
3
- from langchain.schema import AIMessage, HumanMessage
4
- from langchain.chains import LLMChain
5
  import gradio as gr
6
  import os
7
 
 
 
8
  from dotenv import load_dotenv
 
9
  load_dotenv()
10
 
11
- repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
12
- llm = HuggingFaceEndpoint(
13
- repo_id = repo_id,
14
- )
15
-
16
- template = """You're a good chatbot. Answer this request: {question}
17
- Answer: Let's think step by step."""
18
- prompt = PromptTemplate.from_template(template=template)
19
- llm_chain = LLMChain(llm=llm, prompt=prompt)
20
-
21
-
22
- def predict(message, history):
23
- history_langchain_format = []
24
- # for human, ai in history:
25
- # history_langchain_format.append(HumanMessage(content=human))
26
- # history_langchain_format.append(AIMessage(content=ai))
27
- # history_langchain_format.append(HumanMessage(content=message))
28
- # gpt_response = llm(history_langchain_format)
29
- response = llm_chain.invoke(message)['text']
30
  return response
31
 
32
  gr.ChatInterface(predict).launch()
@@ -42,3 +35,4 @@ gr.ChatInterface(predict).launch()
42
 
43
 
44
 
 
 
1
+ # from langchain_community.llms import HuggingFaceEndpoint
2
+ # from langchain.prompts import PromptTemplate
3
+ # from langchain.schema import AIMessage, HumanMessage
4
+ # from langchain.chains import LLMChain
5
  import gradio as gr
6
  import os
7
 
8
+ from crew import CryptoCrew
9
+
10
  from dotenv import load_dotenv
11
+
12
  load_dotenv()
13
 
14
+ def predict(message):
15
+ # company = input(
16
+ # dedent("""
17
+ # Which cryptocurrency are you looking to delve into?
18
+ # """))
19
+
20
+ crypto_crew = CryptoCrew(company)
21
+ response = "## Here is the Report\n\n" + crypto_crew.run()
22
+
 
 
 
 
 
 
 
 
 
 
23
  return response
24
 
25
  gr.ChatInterface(predict).launch()
 
35
 
36
 
37
 
38
+
.ipynb_checkpoints/run_local-checkpoint.py CHANGED
@@ -2,8 +2,6 @@
2
  # from langchain.prompts import PromptTemplate
3
  # from langchain.schema import AIMessage, HumanMessage
4
  # from langchain.chains import LLMChain
5
- import gradio as gr
6
- import os
7
 
8
  from crew import CryptoCrew
9
 
@@ -11,41 +9,18 @@ from dotenv import load_dotenv
11
 
12
  load_dotenv()
13
 
14
- def predict(message):
15
- # company = input(
16
- # dedent("""
17
- # Which cryptocurrency are you looking to delve into?
18
- # """))
19
- # history_langchain_format = []
20
- # for human, ai in history:
21
- # history_langchain_format.append(HumanMessage(content=human))
22
- # history_langchain_format.append(AIMessage(content=ai))
23
- # history_langchain_format.append(HumanMessage(content=message))
24
- # gpt_response = llm(history_langchain_format)
25
- # response = llm_chain.invoke(message)['text']
26
 
27
  crypto_crew = CryptoCrew(company)
28
- response = "## Here is the Report\n\n" + crypto_crew.run()
29
-
30
- return response
31
-
32
- gr.ChatInterface(predict).launch()
33
-
34
- # if __name__ == "__main__":
35
- # print("## Welcome to Crypto Analysis Crew")
36
- # print('-------------------------------')
37
- # company = input(
38
- # dedent("""
39
- # Which cryptocurrency are you looking to delve into?
40
- # """))
41
-
42
- # crypto_crew = CryptoCrew(company)
43
- # result = crypto_crew.run()
44
- # print("\n\n########################")
45
- # print("## Here is the Report")
46
- # print("########################\n")
47
-
48
- # print(result)
49
 
50
 
51
 
 
2
  # from langchain.prompts import PromptTemplate
3
  # from langchain.schema import AIMessage, HumanMessage
4
  # from langchain.chains import LLMChain
 
 
5
 
6
  from crew import CryptoCrew
7
 
 
9
 
10
  load_dotenv()
11
 
12
+ if __name__ == "__main__":
13
+ print("## Welcome to Crypto Analysis Crew")
14
+ print('-------------------------------')
15
+ company = input("""Which cryptocurrency are you looking to delve into?
16
+ """)
 
 
 
 
 
 
 
17
 
18
  crypto_crew = CryptoCrew(company)
19
+ result = crypto_crew.run()
20
+ print("\n\n########################")
21
+ print("## Here is the Report")
22
+ print("########################\n")
23
+ print(result)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
 
26
 
__pycache__/agents.cpython-310.pyc CHANGED
Binary files a/__pycache__/agents.cpython-310.pyc and b/__pycache__/agents.cpython-310.pyc differ
 
__pycache__/crew.cpython-310.pyc ADDED
Binary file (1.06 kB). View file
 
agents.py CHANGED
@@ -45,7 +45,8 @@ class CryptoAnalysisAgents():
45
  'Armed with a flair for storytelling, the Content Writer transforms intricate crypto data into captivating narratives,'
46
  'illuminating the latest trends for a wide audience.'
47
  ),
48
- tools=[search_tool,
 
49
  # AskWolframAlpha(),
50
  ],
51
  allow_delegation=False,
 
45
  'Armed with a flair for storytelling, the Content Writer transforms intricate crypto data into captivating narratives,'
46
  'illuminating the latest trends for a wide audience.'
47
  ),
48
+ tools=[
49
+ # search_tool,
50
  # AskWolframAlpha(),
51
  ],
52
  allow_delegation=False,
chat_gradio.py CHANGED
@@ -1,32 +1,25 @@
1
- from langchain_community.llms import HuggingFaceEndpoint
2
- from langchain.prompts import PromptTemplate
3
- from langchain.schema import AIMessage, HumanMessage
4
- from langchain.chains import LLMChain
5
  import gradio as gr
6
  import os
7
 
 
 
8
  from dotenv import load_dotenv
 
9
  load_dotenv()
10
 
11
- repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
12
- llm = HuggingFaceEndpoint(
13
- repo_id = repo_id,
14
- )
15
-
16
- template = """You're a good chatbot. Answer this request: {question}
17
- Answer: Let's think step by step."""
18
- prompt = PromptTemplate.from_template(template=template)
19
- llm_chain = LLMChain(llm=llm, prompt=prompt)
20
-
21
-
22
- def predict(message, history):
23
- history_langchain_format = []
24
- # for human, ai in history:
25
- # history_langchain_format.append(HumanMessage(content=human))
26
- # history_langchain_format.append(AIMessage(content=ai))
27
- # history_langchain_format.append(HumanMessage(content=message))
28
- # gpt_response = llm(history_langchain_format)
29
- response = llm_chain.invoke(message)['text']
30
  return response
31
 
32
  gr.ChatInterface(predict).launch()
@@ -42,3 +35,4 @@ gr.ChatInterface(predict).launch()
42
 
43
 
44
 
 
 
1
+ # from langchain_community.llms import HuggingFaceEndpoint
2
+ # from langchain.prompts import PromptTemplate
3
+ # from langchain.schema import AIMessage, HumanMessage
4
+ # from langchain.chains import LLMChain
5
  import gradio as gr
6
  import os
7
 
8
+ from crew import CryptoCrew
9
+
10
  from dotenv import load_dotenv
11
+
12
  load_dotenv()
13
 
14
+ def predict(message):
15
+ # company = input(
16
+ # dedent("""
17
+ # Which cryptocurrency are you looking to delve into?
18
+ # """))
19
+
20
+ crypto_crew = CryptoCrew(company)
21
+ response = "## Here is the Report\n\n" + crypto_crew.run()
22
+
 
 
 
 
 
 
 
 
 
 
23
  return response
24
 
25
  gr.ChatInterface(predict).launch()
 
35
 
36
 
37
 
38
+
run_local.py CHANGED
@@ -2,8 +2,6 @@
2
  # from langchain.prompts import PromptTemplate
3
  # from langchain.schema import AIMessage, HumanMessage
4
  # from langchain.chains import LLMChain
5
- import gradio as gr
6
- import os
7
 
8
  from crew import CryptoCrew
9
 
@@ -11,41 +9,18 @@ from dotenv import load_dotenv
11
 
12
  load_dotenv()
13
 
14
- def predict(message):
15
- # company = input(
16
- # dedent("""
17
- # Which cryptocurrency are you looking to delve into?
18
- # """))
19
- # history_langchain_format = []
20
- # for human, ai in history:
21
- # history_langchain_format.append(HumanMessage(content=human))
22
- # history_langchain_format.append(AIMessage(content=ai))
23
- # history_langchain_format.append(HumanMessage(content=message))
24
- # gpt_response = llm(history_langchain_format)
25
- # response = llm_chain.invoke(message)['text']
26
 
27
  crypto_crew = CryptoCrew(company)
28
- response = "## Here is the Report\n\n" + crypto_crew.run()
29
-
30
- return response
31
-
32
- gr.ChatInterface(predict).launch()
33
-
34
- # if __name__ == "__main__":
35
- # print("## Welcome to Crypto Analysis Crew")
36
- # print('-------------------------------')
37
- # company = input(
38
- # dedent("""
39
- # Which cryptocurrency are you looking to delve into?
40
- # """))
41
-
42
- # crypto_crew = CryptoCrew(company)
43
- # result = crypto_crew.run()
44
- # print("\n\n########################")
45
- # print("## Here is the Report")
46
- # print("########################\n")
47
-
48
- # print(result)
49
 
50
 
51
 
 
2
  # from langchain.prompts import PromptTemplate
3
  # from langchain.schema import AIMessage, HumanMessage
4
  # from langchain.chains import LLMChain
 
 
5
 
6
  from crew import CryptoCrew
7
 
 
9
 
10
  load_dotenv()
11
 
12
+ if __name__ == "__main__":
13
+ print("## Welcome to Crypto Analysis Crew")
14
+ print('-------------------------------')
15
+ company = input("""Which cryptocurrency are you looking to delve into?
16
+ """)
 
 
 
 
 
 
 
17
 
18
  crypto_crew = CryptoCrew(company)
19
+ result = crypto_crew.run()
20
+ print("\n\n########################")
21
+ print("## Here is the Report")
22
+ print("########################\n")
23
+ print(result)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
 
26