Ari commited on
Commit
9918408
·
verified ·
1 Parent(s): 227c1b8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +75 -38
app.py CHANGED
@@ -3,31 +3,47 @@ import streamlit as st
3
  import pandas as pd
4
  import sqlite3
5
  import openai
6
- from langchain_openai import AzureChatOpenAI, OpenAIEmbeddings
7
- from langchain.agents import create_sql_agent
8
- from langchain.sql_database import SQLDatabase
9
- from langchain.vectorstores import FAISS
 
 
10
  from langchain.chains import RetrievalQA
11
- from langchain.document_loaders import CSVLoader
12
- from langchain.prompts import ChatPromptTemplate, FewShotPromptTemplate
13
- from langchain.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate
14
  import sqlparse
15
  import logging
 
16
 
17
- # Load environment variables for Azure OpenAI
18
- openai.api_key = os.getenv("OPENAI_API_KEY")
 
 
19
  api_key = os.getenv("OPENAI_API_KEY")
20
- endpoint = os.getenv("azure_endpoint")
21
- api_type = os.getenv("OPENAI_API_TYPE")
22
- api_version = os.getenv("OPENAI_API_VERSION", "2023-05-15")
 
 
 
 
 
 
 
 
 
 
23
 
24
- # Models
25
- chat_model = os.getenv("chat_model")
26
- embed_model = os.getenv("embed_model")
27
- chat_deployment = os.getenv("chat_deployment")
28
- embed_deployment = os.getenv("embed_deployment")
 
 
 
 
29
 
30
- # Load CSV file for data
31
  csv_file = st.file_uploader("Upload your CSV file", type=["csv"])
32
  if csv_file is None:
33
  data = pd.read_csv("default_data.csv") # Use default CSV if no file is uploaded
@@ -37,7 +53,8 @@ else:
37
  st.write(f"Data Preview ({csv_file.name}):")
38
  st.dataframe(data.head())
39
 
40
- # Use a persistent SQLite database instead of in-memory
 
41
  db_file = 'my_database.db'
42
  conn = sqlite3.connect(db_file)
43
  table_name = csv_file.name.split('.')[0] if csv_file else "default_table"
@@ -45,59 +62,79 @@ data.to_sql(table_name, conn, index=False, if_exists='replace')
45
 
46
  # SQL table metadata (for validation and schema)
47
  valid_columns = list(data.columns)
 
 
48
  st.write(f"Valid columns: {valid_columns}")
49
 
50
- # Set up the SQL Database for LangChain with AzureOpenAI configuration
51
- llm = AzureChatOpenAI(
52
- temperature=0,
53
- model=chat_model,
54
- deployment_name=chat_deployment,
55
- api_key=api_key,
56
- azure_endpoint=endpoint,
57
- api_version=api_version
58
- )
59
  db = SQLDatabase.from_uri(f'sqlite:///{db_file}')
60
- db.raw_connection = conn
61
 
62
- # Create the SQL agent with prompt and toolkit for SQL querying
63
  sql_agent = create_sql_agent(
64
- llm=llm,
65
  db=db,
66
  verbose=True,
67
  max_iterations=20, # Increased iteration limit
68
  max_execution_time=90 # Set timeout limit to 90 seconds
69
  )
70
 
71
- # Set up FAISS for retrieval using OpenAIEmbeddings
72
- embeddings = OpenAIEmbeddings()
 
 
 
 
 
 
73
  loader = CSVLoader(file_path=csv_file.name if csv_file else "default_data.csv")
74
  documents = loader.load()
 
75
  vector_store = FAISS.from_documents(documents, embeddings)
76
  retriever = vector_store.as_retriever()
77
  rag_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
78
 
79
- # Insight extraction and SQL query generation
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  user_prompt = st.text_input("Enter your natural language prompt:")
81
  if user_prompt:
82
  try:
83
- # Add column hints to the user prompt
84
  column_hints = f" Use only these columns: {', '.join(valid_columns)}"
85
  prompt_with_columns = user_prompt + column_hints
86
 
87
- # Retrieve context using FAISS and RAG
88
  context = rag_chain.run(prompt_with_columns)
89
  st.write(f"Retrieved Context: {context}")
90
 
91
- # Generate SQL query using SQL agent
92
  generated_sql = sql_agent.run(f"{prompt_with_columns} {context}")
 
 
93
  st.write(f"Generated SQL Query: {generated_sql}")
94
 
95
- # Validate SQL query and execute
96
  if not validate_sql_with_sqlparse(generated_sql):
97
  st.write("Generated SQL is not valid.")
98
  elif not validate_sql(generated_sql, valid_columns):
99
  st.write("Generated SQL references invalid columns.")
100
  else:
 
101
  result = pd.read_sql(generated_sql, conn)
102
  st.write("Query Results:")
103
  st.dataframe(result)
 
3
  import pandas as pd
4
  import sqlite3
5
  import openai
6
+ from langchain_openai import AzureChatOpenAI
7
+ from langchain_community.agent_toolkits.sql.base import create_sql_agent
8
+ from langchain_community.utilities import SQLDatabase
9
+ from langchain_community.document_loaders import CSVLoader
10
+ from langchain_community.vectorstores import FAISS
11
+ from langchain_openai.embeddings import AzureOpenAIEmbeddings
12
  from langchain.chains import RetrievalQA
 
 
 
13
  import sqlparse
14
  import logging
15
+ from dotenv import load_dotenv
16
 
17
+ # Load environment variables from .env file
18
+ load_dotenv()
19
+
20
+ # Set up API credentials and environment variables
21
  api_key = os.getenv("OPENAI_API_KEY")
22
+ azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
23
+ api_version = os.getenv("OPENAI_API_VERSION", "2023-05-15") # Set a default if not provided
24
+ chat_model = os.getenv("CHAT_MODEL")
25
+ chat_deployment = os.getenv("CHAT_DEPLOYMENT")
26
+ embed_model = os.getenv("EMBED_MODEL")
27
+ embed_deployment = os.getenv("EMBED_DEPLOYMENT")
28
+
29
+ # Default to a specific endpoint if the environment variable is missing
30
+ if not azure_endpoint:
31
+ azure_endpoint = "https://<your-azure-endpoint>.openai.azure.com" # Replace with your actual endpoint
32
+
33
+ # OpenAI API key (ensure it is securely stored)
34
+ openai.api_key = api_key
35
 
36
+ # Initialize Azure OpenAI LLM (Language Model)
37
+ llm = AzureChatOpenAI(
38
+ temperature=0,
39
+ model=chat_model,
40
+ deployment_name=chat_deployment,
41
+ api_key=api_key,
42
+ azure_endpoint=azure_endpoint,
43
+ api_version=api_version
44
+ )
45
 
46
+ # Step 1: Upload CSV data file (or use default)
47
  csv_file = st.file_uploader("Upload your CSV file", type=["csv"])
48
  if csv_file is None:
49
  data = pd.read_csv("default_data.csv") # Use default CSV if no file is uploaded
 
53
  st.write(f"Data Preview ({csv_file.name}):")
54
  st.dataframe(data.head())
55
 
56
+ # Step 2: Load CSV data into a persistent SQLite database
57
+ # Use a persistent database file instead of in-memory to retain schema context
58
  db_file = 'my_database.db'
59
  conn = sqlite3.connect(db_file)
60
  table_name = csv_file.name.split('.')[0] if csv_file else "default_table"
 
62
 
63
  # SQL table metadata (for validation and schema)
64
  valid_columns = list(data.columns)
65
+
66
+ # Debug: Display valid columns for user to verify
67
  st.write(f"Valid columns: {valid_columns}")
68
 
69
+ # Step 3: Set up the SQL Database for LangChain
 
 
 
 
 
 
 
 
70
  db = SQLDatabase.from_uri(f'sqlite:///{db_file}')
71
+ db.raw_connection = conn # Use the persistent database connection for LangChain
72
 
73
+ # Step 4: Create the SQL agent with increased iteration and time limits
74
  sql_agent = create_sql_agent(
75
+ llm,
76
  db=db,
77
  verbose=True,
78
  max_iterations=20, # Increased iteration limit
79
  max_execution_time=90 # Set timeout limit to 90 seconds
80
  )
81
 
82
+ # Step 5: Use FAISS with RAG for context retrieval
83
+ embeddings = AzureOpenAIEmbeddings(
84
+ model=embed_model,
85
+ deployment_name=embed_deployment,
86
+ azure_endpoint=azure_endpoint,
87
+ api_key=api_key,
88
+ api_version=api_version
89
+ )
90
  loader = CSVLoader(file_path=csv_file.name if csv_file else "default_data.csv")
91
  documents = loader.load()
92
+
93
  vector_store = FAISS.from_documents(documents, embeddings)
94
  retriever = vector_store.as_retriever()
95
  rag_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
96
 
97
+ # Step 6: Define SQL validation helpers
98
+ def validate_sql(query, valid_columns):
99
+ """Validates the SQL query by ensuring it references only valid columns."""
100
+ parsed = sqlparse.parse(query)
101
+ for token in parsed[0].tokens:
102
+ if token.ttype is None: # If it's a column name
103
+ column_name = str(token).strip()
104
+ if column_name not in valid_columns:
105
+ return False
106
+ return True
107
+
108
+ def validate_sql_with_sqlparse(query):
109
+ """Validates SQL syntax using sqlparse."""
110
+ parsed_query = sqlparse.parse(query)
111
+ return len(parsed_query) > 0
112
+
113
+ # Step 7: Generate SQL query based on user input and run it with LangChain SQL Agent
114
  user_prompt = st.text_input("Enter your natural language prompt:")
115
  if user_prompt:
116
  try:
117
+ # Step 8: Add valid column names to the prompt
118
  column_hints = f" Use only these columns: {', '.join(valid_columns)}"
119
  prompt_with_columns = user_prompt + column_hints
120
 
121
+ # Step 9: Retrieve context using RAG
122
  context = rag_chain.run(prompt_with_columns)
123
  st.write(f"Retrieved Context: {context}")
124
 
125
+ # Step 10: Generate SQL query using SQL agent
126
  generated_sql = sql_agent.run(f"{prompt_with_columns} {context}")
127
+
128
+ # Debug: Display generated SQL query for inspection
129
  st.write(f"Generated SQL Query: {generated_sql}")
130
 
131
+ # Step 11: Validate SQL query
132
  if not validate_sql_with_sqlparse(generated_sql):
133
  st.write("Generated SQL is not valid.")
134
  elif not validate_sql(generated_sql, valid_columns):
135
  st.write("Generated SQL references invalid columns.")
136
  else:
137
+ # Step 12: Execute SQL query
138
  result = pd.read_sql(generated_sql, conn)
139
  st.write("Query Results:")
140
  st.dataframe(result)