File size: 7,844 Bytes
7f46a81
 
 
 
 
27e3848
 
e1c809c
aaba81b
 
7f46a81
 
 
 
7df25d3
 
 
 
757f0ab
 
950d936
 
 
 
 
 
 
 
 
 
 
 
 
 
 
757f0ab
adf3dc3
c6d1ddf
 
e1c809c
 
 
7f46a81
d26ed68
7f46a81
 
673067b
0aa3b05
 
 
 
 
 
 
 
 
 
 
 
 
 
673067b
0aa3b05
 
7f46a81
 
 
 
 
 
5b09a34
7f46a81
 
 
c700c96
5b09a34
74c5a05
7f46a81
 
 
 
62170a5
7f46a81
 
d26ed68
 
7f46a81
d26ed68
 
 
 
7f46a81
d26ed68
 
 
 
 
74c5a05
 
c6d1ddf
 
74c5a05
d26ed68
 
 
 
 
757f0ab
 
9604f44
f018a1b
f2b3842
e48cfb4
4180345
cf7147b
f6bc387
 
9604f44
f6bc387
8103164
ead5a35
 
 
4333de0
 
 
8103164
 
 
7211408
f2bd509
1ca61f0
7f45715
 
 
4840636
7211408
f6bc387
f2bd509
 
8d3beee
 
 
 
aaba81b
 
8d3beee
 
 
 
 
 
 
 
 
 
 
 
 
134de35
24f736a
7f46a81
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
import sys
import toml
from omegaconf import OmegaConf
from query import VectaraQuery
import os
from transformers import pipeline
import numpy as np
import tempfile
import os
import openai

import streamlit as st
from PIL import Image

from gtts import gTTS 
from io import BytesIO


master_prompt = """
As a Natural Farming Fertilizers Assistant, you will assist the user with any farming related question, always willing to answer any question and provide useful organic farming advice in the following format.
' ' '
** Format is: **
[Short Introduction]
[Nutritional Needs of the user's crops]
[List of plants available locally with the needed nutrients (using the chunks provided.) At least 5 different plants.]
[List of ingredients, quantities of those ingredients needed to fertilize the crop stated, and steps for multiple fertilizer Recipes  (using the chunks provided as Bioaccumulators List, you will match plants on the Bioaccumulators List with plants locally growing in the user's area)]
[Give three different sets of recipes using ingredients locally available for free to the user]
[Tables with bioaccumulators data and crop needs data, showing wildcrafted plant nutrient levels and crop nutritional needs, in text table format (not visual)]
[Instructions on using the fertilizers (SOPs)]
[Fertilizer application schedule (step by step in fundamental details) and crop rotation reccomendations]
[Brief Philosophical encouragement related to Natural Farming]
[Alternative set of recipes using localized free ingredients]
[Words of encouragement]
' ' '
User prompt:
"""

denial_response = "Database scraping is not permitted. Please abide by the terms of membership, and reach out with any collaboration requests via email"

#temporary file system created: used to text-to-speech
fp = tempfile.TemporaryFile()

def launch_bot():
    def generate_response(question):
        response = vq.submit_query(question)
        return response

    if 'cfg' not in st.session_state:
        corpus_ids = str(os.environ['corpus_ids']).split(',')
        questions = list(eval(os.environ['examples']))
        cfg = OmegaConf.create({
            'customer_id': str(os.environ['customer_id']),
            'corpus_ids': corpus_ids,
            'api_key': str(os.environ['api_key']),
            'title': os.environ['title'],
            'description': os.environ['description'],
            'examples': questions,
            'source_data_desc': os.environ['source_data_desc']
        })
        st.session_state.cfg = cfg
        st.session_state.vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_ids)

    cfg = st.session_state.cfg
    vq = st.session_state.vq
    st.set_page_config(page_title=cfg.title, layout="wide")

    # left side content
    with st.sidebar:
        image = Image.open('Vectara-logo.png')
        st.markdown(f"## Welcome to {cfg.title}\n\n"
                    f"This demo uses an AI organic farming expert and carefully currated library system to achieve greater accuracy in agronomics and agricultural methodology. Created by Copyleft Cultivars, a nonprofit, we hope you enjoy this beta-test early access version.\n\n")

        st.markdown("---")
        st.markdown(
            "## Democratizing access to farming knowledge.\n"
            "This app was built with the support of our Patreon subscribers. Thank you! [Click here to join our patreon or upgrade your membership.](https://www.patreon.com/CopyleftCultivarsNonprofit). \n"
            "Use of this app indicates agreement to our terms of membership, available on Copyleftcultivars.com as well as an agreement not to attempt to access our databases in any way. \n"
        )
        st.markdown("---")
        st.image(image, width=250)

    st.markdown(f"<center> <h2> Copyleft Cultivars AI Agriculture Assistant demo: {cfg.title} </h2> </center>", unsafe_allow_html=True)
    st.markdown(f"<center> <h4> {cfg.description} <h4> </center>", unsafe_allow_html=True)

    if "messages" not in st.session_state.keys():
        st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}]

    # Display chat messages
    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.write(message["content"])

    # User-provided prompt
    if prompt := st.chat_input():
        st.session_state.messages.append({"role": "user", "content": prompt})
        with st.chat_message("user"):
            st.write(prompt)
        if any(variant in prompt for variant in ("JSON", "json", "jsON", "jSon", "Json", "jsoN", "JSon")):
            if "ADMINISTRATION" not in prompt:
                st.chat_message(denial_response)
                message = {"role": "assistant", "content": denial_response}
                st.session_state.messages.append(message)
    
    # Generate a new response if last message is not from assistant
    if st.session_state.messages[-1]["role"] != "assistant":
        with st.chat_message("assistant"):
            with st.spinner("Thinking..."):
                prompt2 = prompt + master_prompt
                response = generate_response(prompt2) 
              #  if response == 'The returned results did not contain sufficient information to be summarized into a useful answer for your query. Please try a different search or restate your query differently.':
                    #st.write("reroute to LLM")
                    #call in Mistral
                prompt3 = master_prompt + prompt2 + "context:" + response
                print("Here's where we would call in Mistral")
                print(response)
                    # ADD IN LLM
                # st.write("Mistral:" ) #Needs finishing
              #  else:
                st.write(response) 

                message = {"role": "assistant", "content": response}
                st.session_state.messages.append(message)

               # audio_result = st.button("Convert to Audio 🔊") 
                #if audio_result:
                   # print("audio button pressed")
                   # st.session_state.messages.append({"role": "user", "content": "Convert to Audio 🔊"})
                       # with st.chat_message("user"):
                           # st.write("Convert to Audio 🔊")
                text = " :blue[Convert to Audio ]  🔊 "
            # Converts Response to Audio
                with st.expander(text, expanded=False):
                    sound_file = BytesIO()
                    tts = gTTS(response, lang='en')
                    tts.write_to_fp(sound_file)
                        
                    st.audio(sound_file)

                agree = st.checkbox('Escalate this response to Premium')

                if agree:
                    Print('OpenAI, Oh Great!')
                    with st.chat_message("assistant"):
                        with st.spinner("Thinking..."):
                # Change this to include OpenAI_API key
                # Function to get the assistant's response
                            completion = openai.ChatCompletion.create(
                                model="gpt-3.5-turbo",
                                messages=[
                                    {"role": "system", "content": "You are a helpful Natural Farming assistant with extensive experience in accessible science and technical writing, and the heart of a teacher."},
                                    {"role": "user", "content": prompt3}
                                ]
                            )

                            response = completion.choices[0].message
                            st.write(response)

                    message = {"role": "assistant", "content": response}
                    st.session_state.messages.append(message)

                
if __name__ == "__main__":
    launch_bot()