Spaces:
Sleeping
Sleeping
taratrankennedy
commited on
Commit
•
401487d
1
Parent(s):
1178e4b
Update app.py
Browse files
app.py
CHANGED
@@ -2,9 +2,8 @@ import gradio as gr
|
|
2 |
from sentence_transformers import SentenceTransformer, util
|
3 |
import openai
|
4 |
import os
|
5 |
-
import os
|
6 |
-
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
7 |
|
|
|
8 |
|
9 |
# Initialize paths and model identifiers for easy configuration and maintenance
|
10 |
filename = "output_chess_details.txt" # Path to the file storing chess-specific details
|
@@ -12,7 +11,6 @@ retrieval_model_name = 'output/sentence-transformer-finetuned/'
|
|
12 |
|
13 |
openai.api_key = os.environ["OPENAI_API_KEY"]
|
14 |
|
15 |
-
|
16 |
# Attempt to load the necessary models and provide feedback on success or failure
|
17 |
try:
|
18 |
retrieval_model = SentenceTransformer(retrieval_model_name)
|
@@ -38,41 +36,41 @@ segments = load_and_preprocess_text(filename)
|
|
38 |
def find_relevant_segment(user_query, segments):
|
39 |
"""
|
40 |
Find the most relevant text segment for a user's query using cosine similarity among sentence embeddings.
|
41 |
-
This version
|
42 |
"""
|
43 |
try:
|
44 |
# Lowercase the query for better matching
|
45 |
lower_query = user_query.lower()
|
46 |
-
# Filter segments to include only those containing country names mentioned in the query
|
47 |
-
country_segments = [seg for seg in segments if any(country.lower() in seg.lower() for country in ['Guatemala', 'Mexico', 'U.S.', 'United States'])]
|
48 |
-
|
49 |
-
# If no specific country segments found, default to general matching
|
50 |
-
if not country_segments:
|
51 |
-
country_segments = segments
|
52 |
|
|
|
53 |
query_embedding = retrieval_model.encode(lower_query)
|
54 |
-
segment_embeddings = retrieval_model.encode(
|
|
|
|
|
55 |
similarities = util.pytorch_cos_sim(query_embedding, segment_embeddings)[0]
|
|
|
|
|
56 |
best_idx = similarities.argmax()
|
57 |
-
|
|
|
|
|
58 |
except Exception as e:
|
59 |
print(f"Error in finding relevant segment: {e}")
|
60 |
return ""
|
61 |
|
62 |
-
|
63 |
def generate_response(user_query, relevant_segment):
|
64 |
"""
|
65 |
-
Generate a response emphasizing the bot's capability in providing
|
66 |
"""
|
67 |
try:
|
68 |
system_message = "You are a chess chatbot specialized in providing information on chess rules, strategies, and terminology."
|
69 |
-
user_message = f"Here's the information on
|
70 |
messages = [
|
71 |
{"role": "system", "content": system_message},
|
72 |
{"role": "user", "content": user_message}
|
73 |
]
|
74 |
response = openai.ChatCompletion.create(
|
75 |
-
model="gpt-3.5-turbo",
|
76 |
messages=messages,
|
77 |
max_tokens=150,
|
78 |
temperature=0.2,
|
@@ -85,14 +83,9 @@ def generate_response(user_query, relevant_segment):
|
|
85 |
print(f"Error in generating response: {e}")
|
86 |
return f"Error in generating response: {e}"
|
87 |
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
# Define and configure the Gradio application interface to interact with users.
|
92 |
-
# Define and configure the Gradio application interface to interact with users.
|
93 |
def query_model(question):
|
94 |
"""
|
95 |
-
|
96 |
"""
|
97 |
if question == "":
|
98 |
return "Welcome to ChessBot! Ask me anything about chess rules, strategies, and terminology."
|
@@ -102,10 +95,7 @@ def query_model(question):
|
|
102 |
response = generate_response(question, relevant_segment)
|
103 |
return response
|
104 |
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
# Define the welcome message and specific topics and countries the chatbot can provide information about.
|
109 |
welcome_message = """
|
110 |
# Welcome to ChessBot!
|
111 |
|
@@ -123,27 +113,6 @@ topics = """
|
|
123 |
- Chess tactics
|
124 |
"""
|
125 |
|
126 |
-
|
127 |
-
# Define and configure the Gradio application interface to interact with users.
|
128 |
-
def query_model(question):
|
129 |
-
"""
|
130 |
-
Process a question, find relevant information, and generate a response.
|
131 |
-
|
132 |
-
Args:
|
133 |
-
question (str): User input question.
|
134 |
-
|
135 |
-
Returns:
|
136 |
-
str: Generated response or a default welcome message if no question is provided.
|
137 |
-
"""
|
138 |
-
if question == "":
|
139 |
-
return welcome_message
|
140 |
-
relevant_segment = find_relevant_segment(question, segments)
|
141 |
-
response = generate_response(question, relevant_segment)
|
142 |
-
return response
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
# Setup the Gradio Blocks interface with custom layout components
|
148 |
with gr.Blocks() as demo:
|
149 |
gr.Markdown(welcome_message) # Display the formatted welcome message
|
@@ -161,4 +130,3 @@ with gr.Blocks() as demo:
|
|
161 |
|
162 |
# Launch the Gradio app to allow user interaction
|
163 |
demo.launch(share=True)
|
164 |
-
|
|
|
2 |
from sentence_transformers import SentenceTransformer, util
|
3 |
import openai
|
4 |
import os
|
|
|
|
|
5 |
|
6 |
+
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
7 |
|
8 |
# Initialize paths and model identifiers for easy configuration and maintenance
|
9 |
filename = "output_chess_details.txt" # Path to the file storing chess-specific details
|
|
|
11 |
|
12 |
openai.api_key = os.environ["OPENAI_API_KEY"]
|
13 |
|
|
|
14 |
# Attempt to load the necessary models and provide feedback on success or failure
|
15 |
try:
|
16 |
retrieval_model = SentenceTransformer(retrieval_model_name)
|
|
|
36 |
def find_relevant_segment(user_query, segments):
|
37 |
"""
|
38 |
Find the most relevant text segment for a user's query using cosine similarity among sentence embeddings.
|
39 |
+
This version finds the best match based on the content of the query.
|
40 |
"""
|
41 |
try:
|
42 |
# Lowercase the query for better matching
|
43 |
lower_query = user_query.lower()
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
|
45 |
+
# Encode the query and the segments
|
46 |
query_embedding = retrieval_model.encode(lower_query)
|
47 |
+
segment_embeddings = retrieval_model.encode(segments)
|
48 |
+
|
49 |
+
# Compute cosine similarities between the query and the segments
|
50 |
similarities = util.pytorch_cos_sim(query_embedding, segment_embeddings)[0]
|
51 |
+
|
52 |
+
# Find the index of the most similar segment
|
53 |
best_idx = similarities.argmax()
|
54 |
+
|
55 |
+
# Return the most relevant segment
|
56 |
+
return segments[best_idx]
|
57 |
except Exception as e:
|
58 |
print(f"Error in finding relevant segment: {e}")
|
59 |
return ""
|
60 |
|
|
|
61 |
def generate_response(user_query, relevant_segment):
|
62 |
"""
|
63 |
+
Generate a response emphasizing the bot's capability in providing chess information.
|
64 |
"""
|
65 |
try:
|
66 |
system_message = "You are a chess chatbot specialized in providing information on chess rules, strategies, and terminology."
|
67 |
+
user_message = f"Here's the information on chess: {relevant_segment}"
|
68 |
messages = [
|
69 |
{"role": "system", "content": system_message},
|
70 |
{"role": "user", "content": user_message}
|
71 |
]
|
72 |
response = openai.ChatCompletion.create(
|
73 |
+
model="gpt-3.5-turbo",
|
74 |
messages=messages,
|
75 |
max_tokens=150,
|
76 |
temperature=0.2,
|
|
|
83 |
print(f"Error in generating response: {e}")
|
84 |
return f"Error in generating response: {e}"
|
85 |
|
|
|
|
|
|
|
|
|
|
|
86 |
def query_model(question):
|
87 |
"""
|
88 |
+
Process a question, find relevant information, and generate a response.
|
89 |
"""
|
90 |
if question == "":
|
91 |
return "Welcome to ChessBot! Ask me anything about chess rules, strategies, and terminology."
|
|
|
95 |
response = generate_response(question, relevant_segment)
|
96 |
return response
|
97 |
|
98 |
+
# Define the welcome message and specific topics the chatbot can provide information about
|
|
|
|
|
|
|
99 |
welcome_message = """
|
100 |
# Welcome to ChessBot!
|
101 |
|
|
|
113 |
- Chess tactics
|
114 |
"""
|
115 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
# Setup the Gradio Blocks interface with custom layout components
|
117 |
with gr.Blocks() as demo:
|
118 |
gr.Markdown(welcome_message) # Display the formatted welcome message
|
|
|
130 |
|
131 |
# Launch the Gradio app to allow user interaction
|
132 |
demo.launch(share=True)
|
|