from flask import Blueprint, render_template_string, request

from collections import deque
from pathlib import Path
import json
import os

from app.middleware import require_auth_static_user
from config import GlobalContextData

app = Blueprint('log', __name__, url_prefix='/log')

log_dir = Path.cwd() / 'static' / 'log'
log_file_path = GlobalContextData.LogPath

LOG_CACHE = deque()
CACHE_SIZE_LIMIT = 10 * 1024 * 1024  # 10MB
last_modification_time = None
last_file_size = 0
cache_size = 0

def load_new_logs():
    global last_file_size, cache_size

    try:
        current_file_size = os.path.getsize(log_file_path)

        if current_file_size <= last_file_size:
            return  # No new data

        with open(log_file_path, 'r', encoding='utf-8') as f:
            f.seek(last_file_size)

            new_logs = []
            for line in f:
                line = line.strip()
                if not line:
                    continue
                try:
                    log_obj = json.loads(line)
                    new_logs.append(log_obj)
                except json.JSONDecodeError:
                    continue

            last_file_size = f.tell()

        # Incorporate new logs into the cache
        for log in new_logs:
            log_size = len(json.dumps(log))
            if cache_size + log_size > CACHE_SIZE_LIMIT:
                break  # Prevent exceeding cache size

            LOG_CACHE.append(log)
            cache_size += log_size

        # Maintain cache size
        while len(LOG_CACHE) > 0 and cache_size > CACHE_SIZE_LIMIT:
            removed_log = LOG_CACHE.popleft()
            cache_size -= len(json.dumps(removed_log))
    except Exception as e:
        print(f"Error loading logs: {e}")

print(f' * Startup Logger search webui: http://localhost:5000/log/')

# http://localhost:5000/log/
@app.route('/')
@require_auth_static_user(GlobalContextData.LogManageUsername, GlobalContextData.LogManagePassword)
def index():
    load_new_logs()

    search_query = request.args.get('q', '').lower()
    page = int(request.args.get('page', 1))
    per_page = int(request.args.get('per_page', 10))

    logs = list(LOG_CACHE)
    logs.reverse()
    if search_query:
        logs = [log for log in logs if search_query in json.dumps(log).lower()]

    total_logs = len(logs)
    start = (page - 1) * per_page
    end = start + per_page
    logs = logs[start:end]

    pagination_info = {
        'current_page': page,
        'total_pages': (total_logs // per_page) + (1 if total_logs % per_page > 0 else 0),
        'total_logs': total_logs,
        'has_prev': page > 1,
        'has_next': end < total_logs
    }

    return render_template_string((log_dir / 'index.html').read_text('utf-8'), logs=logs, pagination_info=pagination_info)