Solshine commited on
Commit
b05c019
·
verified ·
1 Parent(s): e10d649

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -31
app.py CHANGED
@@ -1,37 +1,30 @@
1
  import sys
2
  import toml
3
  from omegaconf import OmegaConf
4
- from query import VectaraQuery
5
  import os
6
  from transformers import pipeline
7
  import numpy as np
8
  import tempfile
9
- import os
10
  import openai
11
-
12
  import streamlit as st
13
  from PIL import Image
14
-
15
  from gtts import gTTS
16
  from io import BytesIO
17
 
 
 
 
 
 
 
 
 
 
18
  master_prompt = """
19
- As a Natural Farming Fertilizers Assistant, you will assist the user with any farming related question, always willing to answer any question and provide useful organic farming advice in the following format.
20
- ' ' '
21
- ** Format is: **
22
- [Short Introduction]
23
- [Nutritional Needs of the user's crops]
24
- [List of plants available locally with the needed nutrients (using the chunks provided.) At least 5 different plants.]
25
- [List of ingredients, quantities of those ingredients needed to fertilize the crop stated, and steps for multiple fertilizer Recipes (using the chunks provided as Bioaccumulators List, you will match plants on the Bioaccumulators List with plants locally growing in the user's area)]
26
- [Give three different sets of recipes using ingredients locally available for free to the user]
27
- [Tables with bioaccumulators data and crop needs data, showing wildcrafted plant nutrient levels and crop nutritional needs, in text table format (not visual)]
28
- [Instructions on using the fertilizers (SOPs)]
29
- [Fertilizer application schedule (step by step in fundamental details) and crop rotation reccomendations]
30
- [Brief Philosophical encouragement related to Natural Farming]
31
- [Alternative set of recipes using localized free ingredients]
32
  [Words of encouragement]
33
- ' ' '
34
- User prompt:
35
  """
36
 
37
  denial_response = "Database scraping is not permitted. Please abide by the terms of membership, and reach out with any collaboration requests via email"
@@ -39,17 +32,36 @@ denial_response = "Database scraping is not permitted. Please abide by the terms
39
  # Temporary file system created: used for text-to-speech
40
  fp = tempfile.TemporaryFile()
41
 
42
- def launch_bot():
43
- def generate_response(question):
44
- response = vq.submit_query(question)
45
- return response
 
 
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  if 'cfg' not in st.session_state:
48
- corpus_ids = str(os.environ['corpus_ids']).split(',')
49
  questions = list(eval(os.environ['examples']))
50
  cfg = OmegaConf.create({
51
- 'customer_id': str(os.environ['customer_id']),
52
- 'corpus_ids': corpus_ids,
53
  'api_key': str(os.environ['api_key']),
54
  'title': os.environ['title'],
55
  'description': os.environ['description'],
@@ -57,10 +69,8 @@ def launch_bot():
57
  'source_data_desc': os.environ['source_data_desc']
58
  })
59
  st.session_state.cfg = cfg
60
- st.session_state.vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_ids)
61
 
62
  cfg = st.session_state.cfg
63
- vq = st.session_state.vq
64
  st.set_page_config(page_title=cfg.title, layout="wide")
65
 
66
  # Left side content
@@ -100,14 +110,14 @@ def launch_bot():
100
  st.session_state.messages.append(message)
101
  st.chat_message("assistant")
102
  st.write(denial_response)
103
-
104
  # Generate a new response if the last message is not from assistant
105
  if st.session_state.messages[-1]["role"] != "assistant":
106
  with st.chat_message("assistant"):
107
  with st.spinner("Thinking..."):
108
  prompt2 = prompt + master_prompt
109
- response = generate_response(prompt2)
110
- st.write(response)
111
 
112
  message = {"role": "assistant", "content": response}
113
  st.session_state.messages.append(message)
 
1
  import sys
2
  import toml
3
  from omegaconf import OmegaConf
 
4
  import os
5
  from transformers import pipeline
6
  import numpy as np
7
  import tempfile
 
8
  import openai
9
+ import pinecone # Added Pinecone import
10
  import streamlit as st
11
  from PIL import Image
 
12
  from gtts import gTTS
13
  from io import BytesIO
14
 
15
+ # Pinecone and OpenAI setup
16
+ pinecone.init(api_key=os.getenv("PINECONE_API_KEY"), environment=os.getenv("PINECONE_ENVIRONMENT")) # Initialize Pinecone
17
+ openai.api_key = os.getenv("OpenAI_API") # Initialize OpenAI
18
+
19
+ # Create Pinecone index if it does not exist
20
+ index_name = "farming-assistant"
21
+ if index_name not in pinecone.list_indexes():
22
+ pinecone.create_index(index_name, dimension=1536, metric="cosine") # Adjust dimension as per embedding model
23
+
24
  master_prompt = """
25
+ As a Natural Farming Fertilizers Assistant, you will assist the user with any farming-related question, always willing to answer any question and provide useful organic farming advice in the following format.
26
+ ...
 
 
 
 
 
 
 
 
 
 
 
27
  [Words of encouragement]
 
 
28
  """
29
 
30
  denial_response = "Database scraping is not permitted. Please abide by the terms of membership, and reach out with any collaboration requests via email"
 
32
  # Temporary file system created: used for text-to-speech
33
  fp = tempfile.TemporaryFile()
34
 
35
+ def generate_response(question):
36
+ # Generate embeddings for the query using OpenAI
37
+ query_embedding = openai.Embedding.create(
38
+ input=question,
39
+ model="text-embedding-ada-002"
40
+ )["data"][0]["embedding"]
41
 
42
+ # Query Pinecone for relevant documents
43
+ index = pinecone.Index(index_name)
44
+ query_result = index.query(vector=query_embedding, top_k=5, include_metadata=True)
45
+
46
+ # Extract relevant information
47
+ contexts = [match["metadata"]["text"] for match in query_result["matches"]]
48
+ context_text = "\n".join(contexts)
49
+
50
+ # Generate a final response using OpenAI
51
+ response = openai.ChatCompletion.create(
52
+ model="gpt-3.5-turbo",
53
+ messages=[
54
+ {"role": "system", "content": master_prompt},
55
+ {"role": "user", "content": question + "\n\n" + context_text}
56
+ ]
57
+ )["choices"][0]["message"]["content"]
58
+
59
+ return response
60
+
61
+ def launch_bot():
62
  if 'cfg' not in st.session_state:
 
63
  questions = list(eval(os.environ['examples']))
64
  cfg = OmegaConf.create({
 
 
65
  'api_key': str(os.environ['api_key']),
66
  'title': os.environ['title'],
67
  'description': os.environ['description'],
 
69
  'source_data_desc': os.environ['source_data_desc']
70
  })
71
  st.session_state.cfg = cfg
 
72
 
73
  cfg = st.session_state.cfg
 
74
  st.set_page_config(page_title=cfg.title, layout="wide")
75
 
76
  # Left side content
 
110
  st.session_state.messages.append(message)
111
  st.chat_message("assistant")
112
  st.write(denial_response)
113
+
114
  # Generate a new response if the last message is not from assistant
115
  if st.session_state.messages[-1]["role"] != "assistant":
116
  with st.chat_message("assistant"):
117
  with st.spinner("Thinking..."):
118
  prompt2 = prompt + master_prompt
119
+ response = generate_response(prompt2)
120
+ st.write(response)
121
 
122
  message = {"role": "assistant", "content": response}
123
  st.session_state.messages.append(message)