Islam YAHIAOUI commited on
Commit
94e17b8
β€’
1 Parent(s): a6156ff

FIX SOME BUGS AND UPDATE UI

Browse files
Helpers.py CHANGED
@@ -1,8 +1,18 @@
 
 
1
  import requests
2
  import json
3
  import spacy
4
  import string
5
 
 
 
 
 
 
 
 
 
6
  def generate_prompt(context, question, history=None):
7
 
8
  # history_summary = ""
@@ -139,35 +149,105 @@ def get_docs_by_indices(docs, indices):
139
  """
140
  return [docs[index] for index in indices]
141
 
142
- def query_rewriter(query):
143
- headers = {"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiMTA5MTBlYTktOWYwOC00N2E2LTg3MDktOTlhODExZjkwZDA2IiwidHlwZSI6ImFwaV90b2tlbiJ9._wiFq518MhMRvG8waWbg_7Eogf50isgyzqh3e2ypvOU"}
144
-
145
- url = "https://api.edenai.run/v2/text/code_generation"
146
- payload = {
147
- "providers": "openai",
148
- # "instruction": "You are an expert at world knowledge. Your task is to step back and paraphrase a question to a more generic step-back question, which is easier to answer. Here are a few examples:Original Question: Which position did Knox Cunningham hold from May 1955 to Apr 1956? Stepback Question: Which positions have Knox Cunning- ham held in his career? , Now this Question: Who was the spouse of Anna Karina from 1968 to 1974?",
149
- "prompt": "",
150
- "model": "gpt-3.5-turbo",
151
- "instruction": f"""You are an expert in document retrieval and search optimization.
152
- Your task is to rewrite the following query to enhance its relevance and usefulness for retrieving information
153
- from a database or search engine :
154
- Original Query: {query} \n """,
155
- "temperature": 0.6,
156
- "max_tokens": 512,
157
- "fallback_providers": " ['openai']"
158
-
159
- }
160
-
161
- response = requests.post(url, json=payload, headers=headers)
162
-
163
- result = json.loads(response.text)
164
- if(len(result['openai']['generated_text']) > len(query)):
165
- try:
166
- return split_text(result['openai']['generated_text'])
167
- except:
168
- return result['openai']['generated_text']
169
- else:
170
- return query
171
  # ==============================================================================================================================================
 
172
  def split_text(text):
173
- return text.split(":")[1].strip() if ":" in text else text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import time
3
  import requests
4
  import json
5
  import spacy
6
  import string
7
 
8
+ from textblob import TextBlob
9
+ import torch
10
+ from transformers import BertTokenizer, BertModel
11
+ from sklearn.metrics.pairwise import cosine_similarity
12
+
13
+ Bert_tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
14
+ Bert_model = BertModel.from_pretrained('bert-base-uncased')
15
+
16
  def generate_prompt(context, question, history=None):
17
 
18
  # history_summary = ""
 
149
  """
150
  return [docs[index] for index in indices]
151
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
152
  # ==============================================================================================================================================
153
+
154
  def split_text(text):
155
+ return text.split(":")[1].strip() if ":" in text else text
156
+ def query_rewriter(original_query):
157
+ timing=0
158
+ if not needs_rewriting(original_query):
159
+ return original_query
160
+ try:
161
+ # headers = {"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiMTA5MTBlYTktOWYwOC00N2E2LTg3MDktOTlhODExZjkwZDA2IiwidHlwZSI6ImFwaV90b2tlbiJ9._wiFq518MhMRvG8waWbg_7Eogf50isgyzqh3e2ypvOU"}
162
+ headers = {"Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiMmYzMDE3MTEtOTJmNy00ZDU3LTg4N2MtNjU2MmE5MTU5MWZhIiwidHlwZSI6ImFwaV90b2tlbiJ9.vWvooRwxmr-uY1c61V97uugyDGpXmZGjX8oCFWKCUeM"}
163
+ url = "https://api.edenai.run/v2/text/code_generation"
164
+ payload = {
165
+ "providers": "openai",
166
+ # "instruction": "You are an expert at world knowledge. Your task is to step back and paraphrase a question to a more generic step-back question, which is easier to answer. Here are a few examples:Original Question: Which position did Knox Cunningham hold from May 1955 to Apr 1956? Stepback Question: Which positions have Knox Cunning- ham held in his career? , Now this Question: Who was the spouse of Anna Karina from 1968 to 1974?",
167
+ "prompt": "",
168
+ "model": "gpt-3.5-turbo",
169
+ "instruction": f"""You are an expert in document retrieval and search optimization.
170
+ Your task is to rewrite the following query to enhance its relevance and usefulness for retrieving accurate and
171
+ comprehensive information from a database or search engine. Ensure the rewritten query is clear, specific,
172
+ and free of ambiguities. Here are a few examples:
173
+ Original Query: who is Joe Biden? Rewritten Query: Provide detailed information about Joe Biden, including his political career, achievements, current position, and a history of his personal and professional life?
174
+ Original Query: {original_query}""" ,
175
+ "temperature": 0.6,
176
+ "max_tokens": 512,
177
+ "fallback_providers": " ['openai']"
178
+ }
179
+
180
+ response = requests.post(url, json=payload, headers=headers , timeout=20)
181
+ print("response", response)
182
+ result = json.loads(response.text)
183
+ print("result", result)
184
+
185
+ except:
186
+ print("Error in API call")
187
+ return original_query
188
+ try:
189
+ print("query revised", split_text(result['openai']['generated_text']))
190
+ rewrited_query = split_text(result['openai']['generated_text'])
191
+ except:
192
+ return original_query
193
+
194
+ if validate_revised_query(original_query ,rewrited_query):
195
+ return result['openai']['generated_text']
196
+ else:
197
+ return original_query
198
+
199
+
200
+ def score_query(query):
201
+ score = 0
202
+ salutations = ['hi', 'hello', 'hey', 'dear', 'greetings', 'good morning', 'good afternoon', 'good evening', 'good night', 'good day', 'howdy', 'what\'s up', 'sup', 'yo', 'hiya', 'hi there', 'hello there', 'hey there', 'hiya there', 'howdy there', 'what\'s up there', 'sup there', 'yo there', ]
203
+ if any(salutation in query.lower() for salutation in salutations):
204
+ return score
205
+ # Criterion 1: Length of Query
206
+ if len(query.split()) < 3 or len(query.split()) > 15:
207
+ score += 1
208
+ question_pattern = r'\b(who|what|where|when|why|how|which|whom|whose)\b'
209
+ if not re.search(question_pattern, query.lower()) :
210
+ score += 2
211
+
212
+ # Criterion 2: Spelling Errors
213
+ blob = TextBlob(query)
214
+ if len(blob.correct().words) != len(blob.words):
215
+ score += 1
216
+
217
+ # Criterion 3: Grammar Issues (simplified)
218
+ if blob.correct() != blob:
219
+ score += 1
220
+
221
+ # Criterion 4: Ambiguity (simplified example)
222
+ ambiguous_terms = ['these', 'such', 'something', 'one', 'those', 'whatchamacallit', 'doohickey', 'whosit', 'matter', 'aspect', 'case', 'concept', 'issue', 'point', 'area', 'facet''data', 'information', 'people', 'stuff', 'business', 'thingy', 'whatnot', 'deal']
223
+ if any(term in query.lower() for term in ambiguous_terms):
224
+ score += 1
225
+
226
+ # Criterion 5: Complexity
227
+ if len(set(query.split())) < len(query.split()) * 0.5:
228
+ score += 1
229
+ return score
230
+
231
+ def needs_rewriting(query, threshold=2):
232
+ print(f"Scoring query: '{query}'")
233
+ score = score_query(query)
234
+ print(f"Score: {score}")
235
+ return score > threshold
236
+
237
+ def get_bert_embeddings(text):
238
+ inputs = Bert_tokenizer(text, return_tensors='pt', truncation=True, padding=True, max_length=512)
239
+ with torch.no_grad():
240
+ outputs = Bert_model(**inputs)
241
+ embeddings = outputs.last_hidden_state.mean(dim=1).squeeze()
242
+ return embeddings
243
+
244
+ def compute_semantic_similarity(text1, text2):
245
+ embeddings1 = get_bert_embeddings(text1)
246
+ embeddings2 = get_bert_embeddings(text2)
247
+ similarity = cosine_similarity(embeddings1.reshape(1, -1), embeddings2.reshape(1, -1))
248
+ return similarity[0][0]
249
+
250
+ def validate_revised_query(original_query, revised_query, threshold=0.6):
251
+ similarity = compute_semantic_similarity(original_query, revised_query)
252
+ print(f"Similarity: {similarity}")
253
+ return similarity > threshold
__pycache__/Helpers.cpython-312.pyc CHANGED
Binary files a/__pycache__/Helpers.cpython-312.pyc and b/__pycache__/Helpers.cpython-312.pyc differ
 
__pycache__/app.cpython-312.pyc CHANGED
Binary files a/__pycache__/app.cpython-312.pyc and b/__pycache__/app.cpython-312.pyc differ
 
app.py CHANGED
@@ -1,8 +1,18 @@
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
- from rag import run_rag
5
 
 
 
 
 
 
 
 
 
 
 
6
  # ================================================================================================================================
7
  TOKEN = os.getenv("HF_TOKEN")
8
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta" , token=TOKEN)
@@ -43,37 +53,58 @@ state = Int_State()
43
  def clear_chat(chatbot ):
44
  state.history = []
45
 
46
- chatbot = gr.Chatbot(
47
- elem_id="chatbot",
48
- label="Retrieval Augmented Generation (RAG) Chatbot",
49
- height=300,
50
- layout="bubble",
51
- min_width=1200,
52
- show_copy_button=True,
53
- show_share_button=True,
54
- placeholder="Ask a question or type a message...",
55
- )
56
- yield (chatbot) + (enable_btn,) * 5
57
- # yield ("" , chatbot) + (enable_btn,) * 5
58
 
59
  # ================================================================================================================================
60
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
62
 
63
 
64
  def upvote_last_response():
65
  print("Upvoted")
 
66
  return (disable_btn,) * 3 + (enable_btn,)*2
67
 
68
  def downvote_last_response():
69
  print("Downvoted")
 
70
  return (disable_btn,) * 3 + (enable_btn,)*2
71
 
72
  def flag_last_response():
73
  print("Flagged")
 
74
  return (disable_btn,) * 3 + (enable_btn,)*2
75
 
76
-
 
 
 
 
 
 
 
 
 
 
 
 
77
  def chat(
78
  chatbot,
79
  message,
@@ -81,15 +112,10 @@ def chat(
81
  temperature,
82
  top_p,
83
  ):
84
- print("Message: ", message)
85
- print("System Message: ", system_message)
86
- print("Max Tokens: ", max_tokens)
87
- print("Temperature: ", temperature)
88
- print("Top P: ", top_p)
89
  question= message
90
-
 
91
  messages = [{"role": "system", "content": system_message}]
92
-
93
  history= state.get_history()
94
  state.save_question(message)
95
 
@@ -98,32 +124,36 @@ def chat(
98
 
99
  messages.append({"role": "user", "content": run_rag(message)})
100
  response = "This is a response to the question"
101
- # # chatbot.append((question))
102
- # for msg in client.chat_completion(
103
- # messages,
104
- # max_tokens=max_tokens,
105
- # stream=True,
106
- # temperature=temperature,
107
- # top_p=top_p,
108
- # ):
109
 
110
- # token = msg.choices[0].delta.content
111
- # response += str(token)
112
  # chatbot.append(( response, response))
113
-
114
  # yield "" , chatbot
 
115
  chatbot.append((question , response))
116
  state.save_response(response)
117
- # btn_list =enable_buttons(btn_list)
118
  yield ("" , chatbot) + (enable_btn,) * 5
119
 
120
- # yield "" , chatbot
121
 
122
 
123
  # ================================================================================================================================
124
 
125
  theme = gr.themes.Base(
126
- font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
 
 
 
 
 
127
  )
128
  EXAMPLES = [
129
  [ "Tell me about the latest news in the world ?"],
@@ -142,7 +172,7 @@ block_css = """
142
  """
143
  # ================================================================================================================================
144
  textbox = gr.Textbox(show_label=False,
145
- placeholder="Enter text and press ENTER",
146
  container=False,
147
  show_copy_button=True
148
  )
@@ -152,12 +182,9 @@ with gr.Blocks(title="RAG", theme=theme, css=block_css , fill_height=True) as de
152
  gr.Markdown("# **Retrieval Augmented Generation (RAG) Chatbot**" )
153
  gr.Markdown("This is a demo of a chatbot that uses the RAG system to generate responses to user queries. RAG is a combination of a retriever and a generator, which allows it to generate responses based on the context of the conversation. The chatbot can be used to answer questions, provide information, and engage in conversation with users.")
154
  with gr.Row(variant="panel"):
155
- # with gr.Column(scale=2):
156
- # # gr.Markdown("# ****")
157
- # new_chat=gr.Button(value=" New Chat", variant="secondary", interactive=True)
158
 
159
- # with gr.Column(scale=1):
160
- # exit_btn = gr.Button(value="πŸšͺ Exit", interactive=True , variant="stop")
161
  with gr.Column(scale=10):
162
  chatbot = gr.Chatbot(
163
  elem_id="chatbot",
@@ -178,13 +205,14 @@ with gr.Blocks(title="RAG", theme=theme, css=block_css , fill_height=True) as de
178
 
179
 
180
  with gr.Row(elem_id="buttons") as button_row:
181
- upvote_btn = gr.Button(value="πŸ‘ Upvote", interactive=False)
182
- downvote_btn = gr.Button(value="πŸ‘Ž Downvote", interactive=False)
183
- flag_btn = gr.Button(value="⚠️ Flag", interactive=False)
184
  #stop_btn = gr.Button(value="⏹️ Stop Generation", interactive=False)
185
- regenerate_btn = gr.Button(value="πŸ”„ Regenerate", interactive=False)
186
  with gr.Column(scale=3):
187
  clear_btn = gr.Button(value="πŸ—‘οΈ Clear", interactive=False , variant="stop")
 
188
  with gr.Accordion("Examples", open=True) as Examples_row:
189
  gr.Examples(examples=[
190
  [f"Tell me about the latest news in the world ?"],
@@ -224,15 +252,20 @@ with gr.Blocks(title="RAG", theme=theme, css=block_css , fill_height=True) as de
224
  [],
225
  btn_list,
226
  )
227
- # regenerate_btn.click(
228
- # chat,
229
- # [ chatbot, textbox, max_output_tokens, temperature, top_p],
230
- # [textbox, chatbot] + btn_list
231
- # )
 
 
 
 
 
232
  clear_btn.click(
233
  clear_chat,
234
  [chatbot],
235
- [chatbot ] + btn_list,
236
  )
237
 
238
  submit_btn.click(
@@ -240,8 +273,10 @@ with gr.Blocks(title="RAG", theme=theme, css=block_css , fill_height=True) as de
240
  [ chatbot, textbox , max_output_tokens, temperature, top_p],
241
  [textbox ,chatbot] + btn_list ,
242
  )
243
-
244
  # ================================================================================================================================
245
  demo.launch()
246
 
 
 
247
  # ================================================================================================================================
 
1
+ import csv
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
  import os
 
5
 
6
+ from matplotlib import colors
7
+ from rag import run_rag
8
+ from gradio.themes.utils import (
9
+ colors,
10
+ fonts,
11
+ get_matching_version,
12
+ get_theme_assets,
13
+ sizes,
14
+ )
15
+
16
  # ================================================================================================================================
17
  TOKEN = os.getenv("HF_TOKEN")
18
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta" , token=TOKEN)
 
53
  def clear_chat(chatbot ):
54
  state.history = []
55
 
56
+ chatbot.clear()
57
+
58
+ yield ("" , chatbot) + (enable_btn,) * 5
 
 
 
 
 
 
 
 
 
59
 
60
  # ================================================================================================================================
61
+ def save_chat( question, answer, upvote, downvote, flag):
62
+ file_path = "chat_data.csv"
63
+ with open(file_path, 'r', newline='') as file:
64
+ reader = csv.reader(file)
65
+ data = list(reader)
66
+
67
+ # Add new row with provided data
68
+ new_row = [question, answer, upvote, downvote, flag]
69
+ data.append(new_row)
70
+
71
+ # Write updated data back to CSV file
72
+ with open(file_path, 'w', newline='') as file:
73
+ writer = csv.writer(file)
74
+ writer.writerows(data)
75
+
76
+ print("New row added successfully to", file_path)
77
 
78
 
79
 
80
  def upvote_last_response():
81
  print("Upvoted")
82
+ save_chat(state.current_query, state.current_response, 1, 0, 0)
83
  return (disable_btn,) * 3 + (enable_btn,)*2
84
 
85
  def downvote_last_response():
86
  print("Downvoted")
87
+ save_chat(state.current_query, state.current_response, 0, 1, 0)
88
  return (disable_btn,) * 3 + (enable_btn,)*2
89
 
90
  def flag_last_response():
91
  print("Flagged")
92
+ save_chat(state.current_query, state.current_response, 0, 0, 1)
93
  return (disable_btn,) * 3 + (enable_btn,)*2
94
 
95
+ def remove_last_response(chatbot):
96
+ print("Regenerated")
97
+ textbox =state.current_query
98
+ state.history.pop()
99
+ state.history.pop()
100
+ chatbot.clear()
101
+
102
+ return (textbox ,chatbot ) + (enable_btn,) * 5
103
+ def quit_chat():
104
+ return demo.close()
105
+
106
+ # ================================================================================================================================
107
+
108
  def chat(
109
  chatbot,
110
  message,
 
112
  temperature,
113
  top_p,
114
  ):
 
 
 
 
 
115
  question= message
116
+ chatbot.append((question,""))
117
+ yield ("" , chatbot) + (disable_btn,) * 5
118
  messages = [{"role": "system", "content": system_message}]
 
119
  history= state.get_history()
120
  state.save_question(message)
121
 
 
124
 
125
  messages.append({"role": "user", "content": run_rag(message)})
126
  response = "This is a response to the question"
127
+ chatbot.append((question,""))
128
+ for msg in client.chat_completion(
129
+ messages,
130
+ max_tokens=max_tokens,
131
+ stream=True,
132
+ temperature=temperature,
133
+ top_p=top_p,
134
+ ):
135
 
136
+ token = msg.choices[0].delta.content
137
+ response += str(token)
138
  # chatbot.append(( response, response))
 
139
  # yield "" , chatbot
140
+ chatbot.clear()
141
  chatbot.append((question , response))
142
  state.save_response(response)
 
143
  yield ("" , chatbot) + (enable_btn,) * 5
144
 
145
+
146
 
147
 
148
  # ================================================================================================================================
149
 
150
  theme = gr.themes.Base(
151
+ primary_hue=colors.emerald,
152
+ secondary_hue=colors.cyan,
153
+ neutral_hue=colors.stone,
154
+ radius_size=sizes.radius_lg,
155
+ spacing_size=sizes.spacing_sm,
156
+ font=[gr.themes.GoogleFont('Poppins'), gr.themes.GoogleFont('Reddit Sans'), 'system-ui', 'sans-serif'],
157
  )
158
  EXAMPLES = [
159
  [ "Tell me about the latest news in the world ?"],
 
172
  """
173
  # ================================================================================================================================
174
  textbox = gr.Textbox(show_label=False,
175
+ placeholder="Enter a question or message...",
176
  container=False,
177
  show_copy_button=True
178
  )
 
182
  gr.Markdown("# **Retrieval Augmented Generation (RAG) Chatbot**" )
183
  gr.Markdown("This is a demo of a chatbot that uses the RAG system to generate responses to user queries. RAG is a combination of a retriever and a generator, which allows it to generate responses based on the context of the conversation. The chatbot can be used to answer questions, provide information, and engage in conversation with users.")
184
  with gr.Row(variant="panel"):
185
+
 
 
186
 
187
+
 
188
  with gr.Column(scale=10):
189
  chatbot = gr.Chatbot(
190
  elem_id="chatbot",
 
205
 
206
 
207
  with gr.Row(elem_id="buttons") as button_row:
208
+ upvote_btn = gr.Button(value="πŸ‘ Upvote", interactive=False , variant="secondary")
209
+ downvote_btn = gr.Button(value="πŸ‘Ž Downvote", interactive=False , variant="secondary")
210
+ flag_btn = gr.Button(value="⚠️ Flag", interactive=False , variant="secondary")
211
  #stop_btn = gr.Button(value="⏹️ Stop Generation", interactive=False)
212
+ regenerate_btn = gr.Button(value="πŸ”„ Regenerate", interactive=False ,variant="secondary")
213
  with gr.Column(scale=3):
214
  clear_btn = gr.Button(value="πŸ—‘οΈ Clear", interactive=False , variant="stop")
215
+
216
  with gr.Accordion("Examples", open=True) as Examples_row:
217
  gr.Examples(examples=[
218
  [f"Tell me about the latest news in the world ?"],
 
252
  [],
253
  btn_list,
254
  )
255
+ regenerate_btn.click(
256
+ remove_last_response,
257
+ [chatbot],
258
+ [textbox , chatbot] + btn_list,
259
+ ).then(
260
+ chat,
261
+ [ chatbot, textbox, max_output_tokens, temperature, top_p],
262
+ [textbox, chatbot] + btn_list
263
+ )
264
+
265
  clear_btn.click(
266
  clear_chat,
267
  [chatbot],
268
+ [textbox , chatbot] + btn_list,
269
  )
270
 
271
  submit_btn.click(
 
273
  [ chatbot, textbox , max_output_tokens, temperature, top_p],
274
  [textbox ,chatbot] + btn_list ,
275
  )
276
+
277
  # ================================================================================================================================
278
  demo.launch()
279
 
280
+
281
+
282
  # ================================================================================================================================
chat_data.csv ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Tell me about the increase in the price of Bitcoin ?,"This is a response to the questionThe recent surge in Bitcoin's price can be attributed to several key factors. Firstly, the growing interest from institutional investors and Wall Street in spot Bitcoin ETFs listed on Nasdaq has led to an influx of institutional capital into the cryptocurrency markets. This has contributed to the upward momentum of Bitcoin's price, as these investors seek to diversify their portfolios and capitalize on the potential returns of Bitcoin.
2
+
3
+ Secondly, the concentration of trading volume within these ETFs has led to a surge in Bitcoin's value. This is because these ETFs provide a regulated and secure way for investors to gain exposure to Bitcoin, which has helped to increase demand and drive up the price.
4
+
5
+ Despite the soaring price, on-chain transaction volumes in terms of US dollars remain subdued compared to the levels witnessed during the bull market of 2021. This discrepancy suggests a prevailing sentiment among investors to hold onto their Bitcoin, anticipating further price appreciation.
6
+
7
+ Analysts from Blockware Solutions have further speculated that a significant Bitcoin price movement will precipitate an increase in on-chain volume as seasoned Bitcoin holders may begin moving assets to exchanges for sale.
8
+
9
+ Moreover, even with Bitcoin hitting a record high of $73,750, there is a noticeable decline in economic transactions on its blockchain. This discrepancy between the soaring price and the muted on-chain dollar volume has led some analysts to suggest that Bitcoin's price could be detached from its underlying fundamentals.
10
+
11
+ However, projections from experts suggest that Bitcoin's price could ascend to six figures in the near term. For instance, Standard Chartered projects that it will breach $150,000 in 2024.
12
+
13
+ In summary, the recent price surge in Bitcoin can be attributed to the growing interest from institutional investors, the concentration of trading volume within ETFs, and the anticipation of further price appreciation among long-term investors. While some analysts have raised concerns about the disconnect between price and on-chain activity, the overall trend suggests that Bitcoin's value is likely to continue increasing in the coming months and years.</s>",1,0,0
14
+ Tell me about the increase in the price of Bitcoin ?,"This is a response to the questionThe recent price surge of Bitcoin has been nothing short of remarkable, with the cryptocurrency breaking through the $70,000 barrier for the first time in its history. This surge in price can be attributed to a number of factors, including growing institutional adoption, increasing mainstream acceptance, and a broader recognition of Bitcoin as a legitimate store of value and hedge against inflation.
15
+
16
+ One significant factor driving Bitcoin's price increase is the growing interest from institutional investors and Wall Street in spot Bitcoin ETFs listed on Nasdaq. This influx of institutional capital into the cryptocurrency markets has contributed to the upward momentum of Bitcoin's price, as these ETFs provide a more regulated and accessible way for institutional investors to gain exposure to Bitcoin.
17
+
18
+ Another factor contributing to Bitcoin's price surge is the increasing mainstream acceptance and integration of cryptocurrencies into traditional financial systems. This trend is being driven by major financial institutions and corporations, such as JPMorgan Chase, Goldman Sachs, and Tesla, which have all made significant investments in Bitcoin and other cryptocurrencies.
19
+
20
+ The concentration of trading volume within these ETFs has also led to a surge in Bitcoin's value, as investors seek to capitalize on the growing demand for Bitcoin as a store of value and hedge against inflation. This trend is reflected in the growing number of institutional investors who are adding Bitcoin to their portfolios as a way to diversify their holdings and mitigate the risks associated with traditional assets.
21
+
22
+ However, despite the soaring price, on-chain transaction volumes in terms of US dollars remain subdued compared to the levels witnessed during the bull market of 2021. This discrepancy suggests a prevailing sentiment among investors to hold onto their Bitcoin, anticipating further price appreciation.
23
+
24
+ In addition, even with Bitcoin hitting a record high of $73,750, there is a noticeable decline in economic transactions on its blockchain. Analysts from Blockware Solutions highlight a discrepancy between the soaring price and the muted on-chain dollar volume. Current data shows average transfer volumes to be under $200,000, a stark contrast to the 2021 bull market where figures exceeded $1 million.
25
+
26
+ However, Blockware Solutions analysts further speculate that a significant Bitcoin price movement will precipitate an increase in on-chain volume as seasoned Bitcoin holders may begin moving assets to exchanges for sale. The data also shows a growing portion of Bitcoin that has remained inactive for 3 to 4 years, reflecting the confidence of long-term investors.
27
+
28
+ Projections from experts suggest that Bitcoin's price could ascend to six figures in the near term. For instance, Standard Chartered projects that it will breach $150,000 in 2024. As Bitcoin jumps back into the limelight, new and existing investors are positioning themselves in response to these developments. Some are choosing to invest in Bitcoin directly, while others are exploring alternative options such as Bitcoin-based dog-themed projects like Bitcoin Dogs.
29
+
30
+ In conclusion, the recent price surge of Bitcoin is a testament to its growing popularity and mainstream acceptance. As more institutional investors and corporations adopt Bitcoin as a store of value and hedge against inflation, we can expect to see continued price appreciation in the coming months and years. However, it's important for investors to approach Bitcoin with caution and to do their own research before making any investment decisions. As with any high-risk, high-reward investment, there are inherent risks and uncertainties associated with investing in Bitcoin, and it's important to understand these risks before making any investment decisions.</s>",1,0,0