import sys import toml from omegaconf import OmegaConf from query import VectaraQuery import os from transformers import pipeline import numpy as np import tempfile import os import openai import streamlit as st from PIL import Image from gtts import gTTS from io import BytesIO master_prompt = """ As a Natural Farming Fertilizers Assistant, you will assist the user with any farming related question, always willing to answer any question and provide useful organic farming advice in the following format. ' ' ' ** Format is: ** [Short Introduction] [Nutritional Needs of the user's crops] [List of plants available locally with the needed nutrients (using the chunks provided.) At least 5 different plants.] [List of ingredients, quantities of those ingredients needed to fertilize the crop stated, and steps for multiple fertilizer Recipes (using the chunks provided as Bioaccumulators List, you will match plants on the Bioaccumulators List with plants locally growing in the user's area)] [Give three different sets of recipes using ingredients locally available for free to the user] [Tables with bioaccumulators data and crop needs data, showing wildcrafted plant nutrient levels and crop nutritional needs, in text table format (not visual)] [Instructions on using the fertilizers (SOPs)] [Fertilizer application schedule (step by step in fundamental details) and crop rotation reccomendations] [Brief Philosophical encouragement related to Natural Farming] [Alternative set of recipes using localized free ingredients] [Words of encouragement] ' ' ' User prompt: """ denial_response = "Database scraping is not permitted. Please abide by the terms of membership, and reach out with any collaboration requests via email" #temporary file system created: used to text-to-speech fp = tempfile.TemporaryFile() def launch_bot(): def generate_response(question): response = vq.submit_query(question) return response if 'cfg' not in st.session_state: corpus_ids = str(os.environ['corpus_ids']).split(',') questions = list(eval(os.environ['examples'])) cfg = OmegaConf.create({ 'customer_id': str(os.environ['customer_id']), 'corpus_ids': corpus_ids, 'api_key': str(os.environ['api_key']), 'title': os.environ['title'], 'description': os.environ['description'], 'examples': questions, 'source_data_desc': os.environ['source_data_desc'] }) st.session_state.cfg = cfg st.session_state.vq = VectaraQuery(cfg.api_key, cfg.customer_id, cfg.corpus_ids) cfg = st.session_state.cfg vq = st.session_state.vq st.set_page_config(page_title=cfg.title, layout="wide") # left side content with st.sidebar: image = Image.open('Vectara-logo.png') st.markdown(f"## Welcome to {cfg.title}\n\n" f"This demo uses an AI organic farming expert and carefully currated library system to achieve greater accuracy in agronomics and agricultural methodology. Created by Copyleft Cultivars, a nonprofit, we hope you enjoy this beta-test early access version.\n\n") st.markdown("---") st.markdown( "## Democratizing access to farming knowledge.\n" "This app was built with the support of our Patreon subscribers. Thank you! [Click here to join our patreon or upgrade your membership.](https://www.patreon.com/CopyleftCultivarsNonprofit). \n" "Use of this app indicates agreement to our terms of membership, available on Copyleftcultivars.com as well as an agreement not to attempt to access our databases in any way. \n" ) st.markdown("---") st.image(image, width=250) st.markdown(f"

Copyleft Cultivars AI Agriculture Assistant demo: {cfg.title}

", unsafe_allow_html=True) st.markdown(f"

{cfg.description}

", unsafe_allow_html=True) if "messages" not in st.session_state.keys(): st.session_state.messages = [{"role": "assistant", "content": "How may I help you?"}] # Display chat messages for message in st.session_state.messages: with st.chat_message(message["role"]): st.write(message["content"]) # User-provided prompt if prompt := st.chat_input(): st.session_state.messages.append({"role": "user", "content": prompt}) with st.chat_message("user"): st.write(prompt) if any(variant in prompt for variant in ("JSON", "json", "jsON", "jSon", "Json", "jsoN", "JSon")): if "ADMINISTRATION" not in prompt: message = {"role": "assistant", "content": denial_response} st.session_state.messages.append(message) st.chat_message("assistant") st.write(denial_response) # Generate a new response if last message is not from assistant if st.session_state.messages[-1]["role"] != "assistant": with st.chat_message("assistant"): with st.spinner("Thinking..."): prompt2 = prompt + master_prompt response = generate_response(prompt2) # if response == 'The returned results did not contain sufficient information to be summarized into a useful answer for your query. Please try a different search or restate your query differently.': #st.write("reroute to LLM") #call in Mistral prompt3 = master_prompt + prompt2 + "context:" + response print("Here's where we would call in Mistral") print(response) # ADD IN LLM # st.write("Mistral:" ) #Needs finishing # else: st.write(response) message = {"role": "assistant", "content": response} st.session_state.messages.append(message) # audio_result = st.button("Convert to Audio 🔊") #if audio_result: # print("audio button pressed") # st.session_state.messages.append({"role": "user", "content": "Convert to Audio 🔊"}) # with st.chat_message("user"): # st.write("Convert to Audio 🔊") text = " :blue[Convert to Audio ] 🔊 " # Converts Response to Audio with st.expander(text, expanded=False): sound_file = BytesIO() tts = gTTS(response, lang='en') tts.write_to_fp(sound_file) st.audio(sound_file) agree = st.checkbox('Escalate this response to Premium') if agree: Print('OpenAI, Oh Great!') with st.chat_message("assistant"): with st.spinner("Thinking..."): # Change this to include OpenAI_API key # Function to get the assistant's response completion = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=[ {"role": "system", "content": "You are a helpful Natural Farming assistant with extensive experience in accessible science and technical writing, and the heart of a teacher."}, {"role": "user", "content": prompt3} ] ) response = completion.choices[0].message st.write(response) message = {"role": "assistant", "content": response} st.session_state.messages.append(message) if __name__ == "__main__": launch_bot()