#!/usr/bin/env python3
import http.server
import socketserver
import requests
import threading
from urllib.parse import urlparse, urlunparse
import os
import time
import io

from cachemanager import CACHE_MANAGER
from configmanager import CONFIG
from logger import LOGGER
from response_intrerface import ResponseManager
from response_handlers import init_response_handlers

init_response_handlers()


class ProxyHandler(http.server.SimpleHTTPRequestHandler):
    active_connections = 0
    max_connections = CONFIG.server.max_connections
    connections_lock = threading.Lock()
    response_handlers = []

    def __init__(self, *args, **kwargs):
        with self.connections_lock:
            if self.active_connections >= self.max_connections:
                raise ConnectionError("Maximum connections reached")
            self.active_connections += 1
        super().__init__(*args, **kwargs)

    def handle(self):
        try:
            super().handle()
        finally:
            with self.connections_lock:
                self.active_connections -= 1

    def _build_target_url(self, proxy_path, target_base):
        target_path = self.path[len(proxy_path):]
        return urlunparse(('https', urlparse(target_base).netloc,
                           urlparse(target_base).path.rstrip('/') + '/' + target_path,
                           '', '', ''))

    def _forward_request(self, target_url):
        headers = {
            'User-Agent': CONFIG.server.user_agent,
            # 关键修改：不接受上游压缩数据
            'Accept-Encoding': 'identity'  # 只接受未压缩数据
        }
        # 转发客户端请求头(过滤掉Host头)
        for header, value in self.headers.items():
            if header.lower() != 'host':
                # 移除客户端的Accept-Encoding，避免上游误解
                if header.lower() != 'accept-encoding':
                    headers[header] = value
        self.log_message(f"Proxying to: {target_url}")
        return requests.get(
            target_url,
            stream=True,
            timeout=(
                CONFIG.server.timeouts.connect,
                CONFIG.server.timeouts.read,
            ),
            headers=headers
        )

    def _process_and_send_response(self, content_source, is_cache=False, original_resp=None):
        """统一处理并发送响应（不使用压缩）"""
        send_bytes = 0
        last_data_time = time.time()

        # 发送响应头
        self.send_response(original_resp.status_code if original_resp else 200)

        for key, value in (original_resp.headers.items() if original_resp else {}):
            key_lower = key.lower()
            # 过滤不需要的响应头
            if key_lower in ('connection', 'transfer-encoding', 'content-encoding'):
                continue
            self.send_header(key, value)

        # 处理并发送内容
        try:
            if is_cache:
                # 处理缓存内容
                send_bytes = len(content_source)
                self.send_header('Content-Length', str(send_bytes))
                self.end_headers()
                self.wfile.write(content_source)
            else:
                # 处理流式响应
                content_length = original_resp.headers.get('content-length', 0)
                if content_length:
                    self.send_header('Content-Length', str(content_length))
                self.end_headers()
                for chunk in content_source.iter_content(chunk_size=CONFIG.server.chunk_size):
                    if chunk:
                        self.wfile.write(chunk)
                        send_bytes += len(chunk)
                        last_data_time = time.time()
                    elif time.time() - last_data_time > CONFIG.server.timeouts.no_data:
                        raise requests.exceptions.Timeout("No data received for too long")

            LOGGER.info(f"Sent {round(send_bytes / 1024, 2)} KB")
        except ConnectionResetError:
            LOGGER.warning("Client connection reset during data transfer")
            raise

    def _cache_response_if_needed(self, target_url, resp):
        if not CACHE_MANAGER or resp.status_code != 200:
            return resp

        content_type = resp.headers.get('content-type', '')
        if not CACHE_MANAGER.should_cache(content_type):
            return resp

        # 生成流式缓存对象（存储未压缩数据）
        temp_file, commit = CACHE_MANAGER.stream_set(target_url)

        class StreamingCachedResponse:
            def __init__(self, original_resp, temp_file, commit):
                self.original_resp = original_resp
                self.temp_file = temp_file
                self.commit = commit
                self.headers = dict(original_resp.headers)
                self.headers.pop('Content-Length', None)
                self.status_code = original_resp.status_code

            def iter_content(self, chunk_size=1024):
                try:
                    for chunk in self.original_resp.iter_content(chunk_size=chunk_size):
                        if chunk:
                            self.temp_file.write(chunk)
                            yield chunk
                finally:
                    self.commit()

        return StreamingCachedResponse(resp, temp_file, commit)

    def do_GET(self):
        for proxy_path, target_base in CONFIG.mirrors.items():
            if self.path.startswith(proxy_path):
                try:
                    target_url = self._build_target_url(proxy_path, target_base)

                    # 检查缓存
                    cached_content = CACHE_MANAGER.get(target_url) if CACHE_MANAGER else None
                    if cached_content:
                        self._process_and_send_response(cached_content, is_cache=True)
                        return

                    # 处理远程请求
                    resp = self._forward_request(target_url)
                    intercepted_resp = ResponseManager.handle(proxy_path, self.headers['host'], resp)
                    if intercepted_resp:
                        self._process_and_send_response(intercepted_resp)
                        return

                    resp = self._cache_response_if_needed(target_url, resp)
                    self._process_and_send_response(resp, original_resp=resp)
                    return
                except ConnectionResetError:
                    return
                except ConnectionAbortedError:
                    return
                except requests.RequestException as e:
                    LOGGER.error(f"Request error: {self._sanitize_error(e)}", exc_info=True)
                    self.send_error(500, f"Request error: {self._sanitize_error(e)}")
                    return
                except Exception as e:
                    LOGGER.error(f"Unexpected error: {self._sanitize_error(e)}", exc_info=True)
                    self.send_error(500, f"Unexpected error: {self._sanitize_error(e)}")
                    return

        self.send_error(404, "Not Found")

    def _sanitize_error(self, error):
        return ''.join(c for c in str(error) if ord(c) < 256)

    def log_message(self, format, *args):
        if args:
            LOGGER.info(f"{self.log_date_time_string()} {format % args}")
        else:
            LOGGER.info(f"{self.log_date_time_string()} {format}")


def run_server():
    try:
        with socketserver.ThreadingTCPServer((
                CONFIG.server.host, CONFIG.server.port
        ), ProxyHandler) as httpd:
            LOGGER.info(f"Starting proxy server on port {CONFIG.server.port}")
            LOGGER.info("Available proxy paths:\n* " + "\n* ".join(list(CONFIG.mirrors.keys())))
            LOGGER.info('--------------------------------------------------')
            httpd.serve_forever()
    except KeyboardInterrupt:
        LOGGER.info("Proxy server stopped by user")
        os._exit(0)
    except Exception as e:
        LOGGER.error(f"Proxy server error: {str(e)}", exc_info=True)
        os._exit(1)


if __name__ == "__main__":
    run_server()