Adrian Cowham commited on
Commit
60b86d7
β€’
1 Parent(s): 0c47d68

adding ben hogan

Browse files
.gitattributes CHANGED
@@ -36,3 +36,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
36
  resources/design-by-fire.pdf filter=lfs diff=lfs merge=lfs -text
37
  resources/lets-talk.pdf filter=lfs diff=lfs merge=lfs -text
38
  resources/progit.pdf filter=lfs diff=lfs merge=lfs -text
 
 
 
 
36
  resources/design-by-fire.pdf filter=lfs diff=lfs merge=lfs -text
37
  resources/lets-talk.pdf filter=lfs diff=lfs merge=lfs -text
38
  resources/progit.pdf filter=lfs diff=lfs merge=lfs -text
39
+ resources filter=lfs diff=lfs merge=lfs -text
40
+ resources/Ben_Hogans_Five_Lessons.pdf filter=lfs diff=lfs merge=lfs -text
41
+ resources/How_To_Win_Friends_And_Influence_People_-_Dale_Carnegie.pdf filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
- title: Pyrocene
3
- emoji: πŸŒ–
4
  colorFrom: green
5
  colorTo: green
6
  sdk: gradio
 
1
  ---
2
+ title: Ben Hogan
3
+ emoji: 🏌🏽
4
  colorFrom: green
5
  colorTo: green
6
  sdk: gradio
resources/Ben_Hogans_Five_Lessons.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7131303648e0817fe35bc9fcacfe333420e8f4f4f6b5e7cf2b0ceb2914c13e9
3
+ size 3247151
src/app.py CHANGED
@@ -23,8 +23,8 @@ K = 5
23
  USE_VERBOSE = True
24
  API_KEY = os.environ["OPENAI_API_KEY"]
25
  system_template = """
26
- The context below contains excerpts from 'Design by Fire,' by Emily Elizabeth Schlickman and Brett Milligan. You must only use the information in the context below to formulate your response. If there is not enough information to formulate a response, you must respond with
27
- "I'm sorry, but I can't find the answer to your question in, the book Design by Fire."
28
 
29
  Here is the context:
30
  {context}
@@ -43,7 +43,7 @@ class AnswerConversationBufferMemory(ConversationBufferMemory):
43
  return super(AnswerConversationBufferMemory, self).save_context(inputs,{'response': outputs['answer']})
44
 
45
  def getretriever():
46
- with open("./resources/design-by-fire.pdf", 'rb') as uploaded_file:
47
  try:
48
  file = read_file(uploaded_file)
49
  except Exception as e:
@@ -95,26 +95,19 @@ def load_chain(inputs = None):
95
  combine_docs_chain_kwargs={"prompt": qa_prompt})
96
  return chain
97
 
98
- CSS ="""
99
- .contain { display: flex; flex-direction: column; }
100
- .gradio-container { height: 100vh !important; }
101
- #component-0 { height: 100%; }
102
- #chatbot { flex-grow: 1; overflow: auto;}
103
- """
104
-
105
  with gr.Blocks() as block:
106
  with gr.Row():
107
  with gr.Column(scale=0.75):
108
  with gr.Row():
109
- gr.Markdown("<h1>Design by Fire</h1>")
110
  with gr.Row():
111
- gr.Markdown("by Emily Elizabeth Schlickman and Brett Milligan")
112
  chatbot = gr.Chatbot(elem_id="chatbot").style(height=600)
113
 
114
  with gr.Row():
115
  message = gr.Textbox(
116
  label="",
117
- placeholder="Design by Fire",
118
  lines=1,
119
  )
120
  with gr.Row():
@@ -129,11 +122,11 @@ with gr.Blocks() as block:
129
  with gr.Column(scale=0.25):
130
  with gr.Row():
131
  gr.Markdown("<h1><center>Suggestions</center></h1>")
132
- ex1 = gr.Button(value="What are the main factors and trends discussed in the book that contribute to the changing behavior of wildfires?", variant="primary")
133
  ex1.click(getanswer, inputs=[chain_state, ex1, state], outputs=[chatbot, state, message])
134
- ex2 = gr.Button(value="How does the book explore the relationship between fire and different landscapes, such as wilderness and urban areas?", variant="primary")
135
  ex2.click(getanswer, inputs=[chain_state, ex2, state], outputs=[chatbot, state, message])
136
- ex3 = gr.Button(value="What are the three approaches to designing with fire that the book presents?", variant="primary")
137
  ex3.click(getanswer, inputs=[chain_state, ex3, state], outputs=[chatbot, state, message])
138
 
139
  block.launch(debug=True)
 
23
  USE_VERBOSE = True
24
  API_KEY = os.environ["OPENAI_API_KEY"]
25
  system_template = """
26
+ The context below contains excerpts from 'Ben Hogan's Five Lessions'. You must only use the information in the context below to formulate your response. If there is not enough information to formulate a response, you must respond with
27
+ "I'm sorry, but I can't find the answer to your question in, Ben Hogan's Five Lessons."
28
 
29
  Here is the context:
30
  {context}
 
43
  return super(AnswerConversationBufferMemory, self).save_context(inputs,{'response': outputs['answer']})
44
 
45
  def getretriever():
46
+ with open("./resources/Ben_Hogans_Five_Lessons.pdf", 'rb') as uploaded_file:
47
  try:
48
  file = read_file(uploaded_file)
49
  except Exception as e:
 
95
  combine_docs_chain_kwargs={"prompt": qa_prompt})
96
  return chain
97
 
 
 
 
 
 
 
 
98
  with gr.Blocks() as block:
99
  with gr.Row():
100
  with gr.Column(scale=0.75):
101
  with gr.Row():
102
+ gr.Markdown("<h1>Ben Hogan's Five Lessons</h1>")
103
  with gr.Row():
104
+ gr.Markdown("by Ben Hogan")
105
  chatbot = gr.Chatbot(elem_id="chatbot").style(height=600)
106
 
107
  with gr.Row():
108
  message = gr.Textbox(
109
  label="",
110
+ placeholder="Ask Ben...",
111
  lines=1,
112
  )
113
  with gr.Row():
 
122
  with gr.Column(scale=0.25):
123
  with gr.Row():
124
  gr.Markdown("<h1><center>Suggestions</center></h1>")
125
+ ex1 = gr.Button(value="What is this book about?", variant="primary")
126
  ex1.click(getanswer, inputs=[chain_state, ex1, state], outputs=[chatbot, state, message])
127
+ ex2 = gr.Button(value="What are the core fundamentals Ben teaches?", variant="primary")
128
  ex2.click(getanswer, inputs=[chain_state, ex2, state], outputs=[chatbot, state, message])
129
+ ex3 = gr.Button(value="How did Ben change the way the game is taught?", variant="primary")
130
  ex3.click(getanswer, inputs=[chain_state, ex3, state], outputs=[chatbot, state, message])
131
 
132
  block.launch(debug=True)
src/core/embedding.py CHANGED
@@ -1,7 +1,7 @@
1
  from typing import List, Type
2
 
3
  from langchain.docstore.document import Document
4
- from langchain.embeddings import OpenAIEmbeddings
5
  from langchain.embeddings.base import Embeddings
6
  from langchain.vectorstores import VectorStore
7
  from langchain.vectorstores.faiss import FAISS
@@ -50,27 +50,16 @@ class FolderIndex:
50
  def embed_files(
51
  files: List[File], embedding: str, vector_store: str, **kwargs
52
  ) -> FolderIndex:
53
- """Embeds a collection of files and stores them in a FolderIndex."""
54
-
55
- supported_embeddings: dict[str, Type[Embeddings]] = {
56
- "openai": OpenAIEmbeddings,
57
- "debug": FakeEmbeddings,
58
- }
59
- supported_vector_stores: dict[str, Type[VectorStore]] = {
60
- "faiss": FAISS,
61
- "debug": FakeVectorStore,
62
- }
63
-
64
- if embedding in supported_embeddings:
65
- _embeddings = supported_embeddings[embedding](**kwargs)
66
- else:
67
- raise NotImplementedError(f"Embedding {embedding} not supported.")
68
-
69
- if vector_store in supported_vector_stores:
70
- _vector_store = supported_vector_stores[vector_store]
71
- else:
72
- raise NotImplementedError(f"Vector store {vector_store} not supported.")
73
-
74
  return FolderIndex.from_files(
75
- files=files, embeddings=_embeddings, vector_store=_vector_store
76
  )
 
1
  from typing import List, Type
2
 
3
  from langchain.docstore.document import Document
4
+ from langchain.embeddings import HuggingFaceBgeEmbeddings
5
  from langchain.embeddings.base import Embeddings
6
  from langchain.vectorstores import VectorStore
7
  from langchain.vectorstores.faiss import FAISS
 
50
  def embed_files(
51
  files: List[File], embedding: str, vector_store: str, **kwargs
52
  ) -> FolderIndex:
53
+ model_name = "BAAI/bge-small-en"
54
+ model_kwargs = {'device': 'cpu'}
55
+ encode_kwargs = {'normalize_embeddings': True} # set True to compute cosine similarity
56
+ model_norm = HuggingFaceBgeEmbeddings(
57
+ model_name=model_name,
58
+ model_kwargs=model_kwargs,
59
+ encode_kwargs=encode_kwargs
60
+ )
61
+ # embeddings = OpenAIEmbeddings
62
+ embeddings = model_norm
 
 
 
 
 
 
 
 
 
 
 
63
  return FolderIndex.from_files(
64
+ files=files, embeddings=embeddings, vector_store=FAISS
65
  )