import sys import toml from omegaconf import OmegaConf import os from transformers import pipeline import numpy as np import tempfile import openai from pinecone.grpc import PineconeGRPC as Pinecone # Corrected Pinecone import from pinecone import ServerlessSpec import streamlit as st from PIL import Image from gtts import gTTS from io import BytesIO import time # For delay during index readiness check # Pinecone and OpenAI setup pinecone_api_key = os.getenv("PINECONE_API_KEY") openai.api_key = os.getenv("OPENAI_API_KEY") # Initialize Pinecone client pc = Pinecone(api_key=pinecone_api_key) # Create or retrieve Pinecone index index_name = "farming-assistant" dimension = 1536 # Adjust dimension to match OpenAI embeddings if not pc.has_index(index_name): pc.create_index( name=index_name, dimension=dimension, metric="cosine", spec=ServerlessSpec( cloud='aws', region='us-east-1' ) ) # Wait for the index to be ready while not pc.describe_index(index_name).status['ready']: time.sleep(1) index = pc.Index(index_name) # Corrected method to connect to the index master_prompt = """ As a Natural Farming Fertilizers Assistant, you will assist the user with any farming-related question, always willing to answer any question and provide useful organic farming advice in the following format. ... [Words of encouragement] """ denial_response = "Database scraping is not permitted. Please abide by the terms of membership, and reach out with any collaboration requests via email" # Temporary file system created: used for text-to-speech fp = tempfile.TemporaryFile() def generate_response(question): # Generate embeddings for the query using OpenAI query_embedding = openai.Embedding.create( input=question, model="text-embedding-ada-002" )["data"][0]["embedding"] # Query Pinecone for relevant documents query_result = index.query( vector=query_embedding, top_k=5, include_metadata=True, namespace="farming-assistant" ) # Extract relevant information contexts = [match["metadata"]["text"] for match in query_result["matches"]] context_text = "\n".join(contexts) # Generate a final response using OpenAI response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=[ {"role": "system", "content": master_prompt}, {"role": "user", "content": question + "\n\n" + context_text} ] )["choices"][0]["message"]["content"] return response def upsert_vectors(vectors): # Upsert vectors into Pinecone index index.upsert( vectors=vectors, namespace="farming-assistant" ) def launch_bot(): if 'cfg' not in st.session_state: questions = list(eval(os.environ['examples'])) cfg = OmegaConf.create({ 'api_key': str(os.environ['api_key']), 'title': os.environ['title'], 'description': os.environ['description'], 'examples': questions, 'source_data_desc': os.environ['source_data_desc'] }) st.session_state.cfg = cfg cfg = st.session_state.cfg st.set_page_config(page_title=cfg.title, layout="wide") # Left side content with st.sidebar: image = Image.open('Pinecone-logo.png') # Update with appropriate logo st.markdown(f"## Welcome to {cfg.title}\n\n" f"This demo uses an AI organic farming expert and carefully curated library system to achieve greater accuracy in agronomics and agricultural methodology. Created by Copyleft Cultivars, a nonprofit, we hope you enjoy this beta-test early access version.\n\n") st.markdown("---") st.markdown( "## Democratizing access to farming knowledge.\n" "This app was built with the support of our Patreon subscribers. Thank you! [Click here to join our patreon or upgrade your membership.](https://www.patreon.com/CopyleftCultivarsNonprofit). \n" "Use of this app indicates agreement to our terms of membership, available on Copyleftcultivars.com as well as an agreement not to attempt to access our databases in any way. \n" ) st.markdown("---") st.image(image, width=250) st.markdown(f"