import streamlit as st
import os
import glob
from datetime import datetime
from besins_selenium_downloaders.config import get_website_names
from besins_selenium_downloaders.file_manager import *
from besins_selenium_downloaders.streamlit_tools import configure_page_navigation


class LogViewer:
    def __init__(self):
        self.setup_page_config()
        configure_page_navigation("log_viewer")
        st.markdown(
            '<h1 class="common-title">📋 Log Viewer</h1>',
            unsafe_allow_html=True,
        )
        self.main()

    def setup_page_config(self):
        st.set_page_config(
            page_title="Log Viewer",
            page_icon="📋",
            layout="centered",
            initial_sidebar_state="expanded",
        )

        # Load and inject CSS
        common_css_path = os.path.join(os.path.dirname(__file__), "../css/common.css")
        with open(common_css_path) as f:
            st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)

        css_path = os.path.join(os.path.dirname(__file__), "../css/log_viewer.css")
        with open(css_path) as f:
            st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)

    def main(self):
        # Get URL parameters
        query_params = st.query_params
        default_site = query_params.get("site", [None])
        default_date = query_params.get("date", [None])

        # Get the most recent folder as default
        date_folders = get_list_date_folders()
        if not date_folders:
            st.error("No date folders found in the current directory.")
            return
        date_folders.sort(reverse=True)

        # Set default folder based on URL parameter or most recent
        if default_date and default_date in date_folders:
            default_folder = default_date
            default_folder_index = date_folders.index(default_folder)
        else:
            default_folder = date_folders[0]
            default_folder_index = 0

        # Create two columns for the selectors
        col1, col2 = st.columns(2)

        with col1:
            folder_path = st.selectbox(
                "📅 Select Date Folder",
                options=date_folders,
                index=default_folder_index,
                help="Choose the date folder to view logs",
            )

        if folder_path:
            # Check if the selected folder contains relevant log files
            relevant_log_files = glob.glob(os.path.join(folder_path, "*.log"))
            if not relevant_log_files:  # Check if there are no relevant log files
                st.warning("The selected folder does not contain any log files. Please select a folder with logs.")
                return  # Exit the method if the folder is empty

            # Load and process data
            try:
                df = get_date_folder_analysis_as_dataframe(folder_path)

                # Check if the DataFrame is empty or does not contain 'SiteName'
                if df.empty:
                    st.warning("No valid log files found in the selected folder.")
                    return

                if 'SiteName' not in df.columns:
                    st.warning("The DataFrame does not contain the 'SiteName' column.")
                    return

                sites = sorted(df["SiteName"].unique().tolist())

                if not sites:
                    st.warning("No sites found in the selected folder.")
                    return

                # Find the index of the default site
                if default_site and default_site in sites:
                    default_site_index = sites.index(default_site)
                else:
                    default_site_index = 0

                with col2:
                    selected_site = st.selectbox(
                        "🌐 Select Site",
                        options=sites,
                        index=default_site_index,
                        help="Choose the site to view logs",
                    )

                self.display_logs(folder_path, selected_site)

            except Exception as e:
                st.error(f"Error processing folder: {str(e)}")
                st.exception(e)

    def parse_log_line(self, line):
        """Parse a log line to extract timestamp, level, and message."""
        try:
            # Parse format: "2024-03-14 10:30:45,123 - INFO - Message content"
            parts = line.split(" - ", 2)
            if len(parts) == 3:
                timestamp = parts[0]
                level = parts[1]
                message = parts[2]
                # Escape HTML characters
                message = message.replace("<", "&lt;").replace(">", "&gt;")
                return timestamp, level, message
            return None, "UNKNOWN", line.replace("<", "&lt;").replace(">", "&gt;")
        except:
            return None, "UNKNOWN", line.replace("<", "&lt;").replace(">", "&gt;")

    def analyze_log_content(self, content):
        """Analyze log content for statistics."""
        lines = content.split("\n")
        stats = {
            "total_lines": len(lines),
            "error_count": 0,
            "warning_count": 0,
            "info_count": 0,
            "debug_count": 0,
        }

        for line in lines:
            if "[ERROR]" in line or "ERROR" in line:  # Match both patterns
                stats["error_count"] += 1
            elif "[WARNING]" in line or "WARNING" in line:  # Match both patterns
                stats["warning_count"] += 1
            elif "[INFO]" in line or "INFO" in line:  # Match both patterns
                stats["info_count"] += 1
            elif "[DEBUG]" in line or "DEBUG" in line:  # Match both patterns
                stats["debug_count"] += 1

        return stats

    def display_logs(self, folder_path, site_name):
        # Look for log files
        log_pattern = os.path.join(folder_path, f"{site_name}*.log")
        log_files = glob.glob(log_pattern)

        if not log_files:
            st.warning(f"No log files found for {site_name} in this folder.")
            return

        # Sort log files by modification time (most recent first)
        log_files.sort(key=lambda x: os.path.getmtime(x), reverse=True)

        for log_file in log_files:
            try:
                with open(log_file, "r", encoding="utf-8") as f:
                    content = f.read()

                # Analyze log content
                stats = self.analyze_log_content(content)

                # Display log statistics
                st.markdown(
                    f"""
                    <div class="log-stats">
                        <div class="stat-item">
                            <div class="stat-value">{stats['total_lines']}</div>
                            <div class="stat-label">Total Lines</div>
                        </div>
                        <div class="stat-item">
                            <div class="stat-value log-error">{stats['error_count']}</div>
                            <div class="stat-label">Errors</div>
                        </div>
                        <div class="stat-item">
                            <div class="stat-value log-warning">{stats['warning_count']}</div>
                            <div class="stat-label">Warnings</div>
                        </div>
                        <div class="stat-item">
                            <div class="stat-value log-info">{stats['info_count']}</div>
                            <div class="stat-label">Info</div>
                        </div>
                    </div>
                    """,
                    unsafe_allow_html=True,
                )

                # Add search functionality
                search_term = st.text_input(
                    "🔍 Search in logs", key=f"search_{log_file}"
                )

                # Filter options
                show_only = st.multiselect(
                    "Show only:",
                    ["ERROR", "WARNING", "INFO", "DEBUG"],
                    default=["ERROR", "WARNING", "INFO", "DEBUG"],
                    key=f"filter_{log_file}"
                )

                # Process and display log content
                log_lines = []
                for line in content.split("\n"):
                    timestamp, level, message = self.parse_log_line(line)

                    # Apply filters
                    if level.upper() not in show_only:
                        continue

                    # Apply search
                    if search_term and search_term.lower() not in line.lower():
                        continue

                    # Add appropriate styling based on log level
                    css_class = f"log-{level.lower()}"

                    # Highlight search term if present
                    if search_term:
                        message = message.replace(
                            search_term, f'<span class="highlight">{search_term}</span>'
                        )

                    # Ensure consistent double quotes in span elements
                    log_lines.append(
                        f'<span class="{css_class}">[{timestamp}] [{level}] {message}</span>'
                    )

                # Display log content with consistent formatting
                log_content = '<div class="log-content">'
                if log_lines:
                    log_content += '<br>'.join(log_lines)
                else:
                    log_content += '<span class="log-info">No log entries match the current filters.</span>'
                log_content += '</div>'

                st.markdown(
                    f"""
                    <div class="log-container">
                        <div class="log-header">
                            <span>{os.path.basename(log_file)}</span>
                            <span class="log-timestamp">Last modified: {datetime.fromtimestamp(os.path.getmtime(log_file)).strftime('%Y-%m-%d %H:%M:%S')}</span>
                        </div>
                        {log_content}
                    </div>
                    """,
                    unsafe_allow_html=True,
                )

            except Exception as e:
                st.error(
                    f"Error reading log file {os.path.basename(log_file)}: {str(e)}"
                )


if __name__ == "__main__":
    log_viewer = LogViewer()
