Delhi_Irish_visa_decisions / loading_file.py
SR05's picture
Update loading_file.py
a990f92 verified
raw
history blame
2.24 kB
import requests
import pandas as pd
from io import BytesIO
from bs4 import BeautifulSoup
import streamlit as st
# Cache the data loading process for efficiency
@st.cache_data(ttl=3600)
def fetch_data():
# URL of the website to scrape
url = "https://www.ireland.ie/en/india/newdelhi/services/visas/processing-times-and-decisions/"
headers = {
"User-Agent": (
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36"
)
}
# Fetch the webpage
response = requests.get(url, headers=headers)
if response.status_code != 200:
st.error("Failed to fetch the webpage. Please try again later.")
return None
# Parse the HTML to find the .ods link
soup = BeautifulSoup(response.content, "html.parser")
file_url = None
for link in soup.find_all("a"):
if "Visa decisions made from" in link.get_text():
file_url = link.get("href")
if not file_url.startswith("http"):
file_url = requests.compat.urljoin(url, file_url)
break
if not file_url:
st.error("Could not find the visa decisions file link on the website.")
return None
# Fetch the .ods file
ods_response = requests.get(file_url, headers=headers)
if ods_response.status_code != 200:
st.error("Failed to download the visa decisions file.")
return None
# Process the .ods file
ods_file = BytesIO(ods_response.content)
df = pd.read_excel(ods_file, engine="odf")
df.dropna(how="all", inplace=True)
df.reset_index(drop=True, inplace=True)
# Check columns and rename appropriately
print("Columns before renaming:", df.columns.tolist()) # For debugging purposes
# Attempt to rename columns only if there are sufficient columns
if len(df.columns) >= 2:
df.columns = ["Application Number", "Decision"]
else:
st.error("Insufficient data columns detected.")
return None
# Only keep relevant columns
df = df[["Application Number", "Decision"]]
df["Application Number"] = df["Application Number"].astype(str)
return df
# Load the data
df = fetch_data()