Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from dotenv import load_dotenv # Import load_dotenv to load environment variables | |
from langchain import HuggingFaceHub | |
# Load environment variables from the .env file | |
load_dotenv() | |
# Set your Hugging Face API token from the environment variable | |
HUGGINGFACE_API_TOKEN = os.getenv("HUGGINGFACE_API_TOKEN") | |
# Function to return the SQL query from natural language input | |
def load_sql_query(question): | |
try: | |
# Initialize the Hugging Face model using LangChain's HuggingFaceHub class | |
llm = HuggingFaceHub( | |
repo_id="Salesforce/grappa_large_jnt", # Hugging Face model repo for text-to-SQL | |
huggingfacehub_api_token=HUGGINGFACE_API_TOKEN, # Pass your API token | |
model_kwargs={"temperature": 0.3} # Optional: Adjust response randomness | |
) | |
# Call the model with the user's question and get the SQL query | |
sql_query = llm.predict(question) | |
return sql_query | |
except Exception as e: | |
# Capture and return any exceptions or errors | |
return f"Error: {str(e)}" | |
# Streamlit App UI starts here | |
st.set_page_config(page_title="Text-to-SQL Demo", page_icon=":robot:") | |
st.header("Text-to-SQL Demo") | |
# Function to get user input | |
def get_text(): | |
input_text = st.text_input("Ask a question (related to a database):", key="input") | |
return input_text | |
# Get user input | |
user_input = get_text() | |
# Create a button for generating the SQL query | |
submit = st.button('Generate SQL') | |
# If the generate button is clicked and user input is not empty | |
if submit and user_input: | |
response = load_sql_query(user_input) | |
st.subheader("Generated SQL Query:") | |
st.write(response) | |
elif submit: | |
st.warning("Please enter a question.") # Warning for empty input | |