# uncompyle6 version 3.2.3
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.8 |Anaconda custom (64-bit)| (default, Feb 21 2019, 18:30:04) [MSC v.1916 64 bit (AMD64)]
# Embedded file name: server\routers\static.py
__author__ = "Attila Gerendi (Sanyi)"
import datetime, gzip, logging, mimetypes, os
from fnmatch import fnmatch
from io import BytesIO
from threading import Lock
from zlib import compress
from werkzeug.http import http_date, parse_date
from server.http_responses.http_errors import Http404
from server.routers import Router

logger = logging.getLogger("server.static.router")


def gzip_compress(file_content):
    out = BytesIO()
    zipfile = gzip.GzipFile(fileobj=out, mode="w")
    zipfile.write(file_content)
    zipfile.flush()
    zipfile.close()
    gzip_content = out.getvalue()
    out.close()
    return gzip_content


class StaticRouter(Router):
    """
    This class is based on from werkzeug.wsgi import SharedDataMiddleware
    """

    def __init__(
        self,
        root_path,
        base_path=None,
        index="index.html",
        host=None,
        schemes=None,
        excludes=None,
        enable_cashing_headers=True,
        cache_timeout=43200,
        fallback_mime_type="text/plain",
        enable_gzip_compression=False,
        enable_zlib_compression=False,
        max_http_chunk_length=8192,
        extra_headers=None,
    ):
        logger.info("creating route for %s (base path %s)", root_path, base_path)
        super().__init__(
            base_path=base_path, methods={"head", "get"}, host=host, schemes=schemes
        )
        self.extra_headers = extra_headers
        self.excludes = excludes if excludes else tuple()
        self.enable_cashing_headers = enable_cashing_headers
        self.cache_timeout = cache_timeout
        self.fallback_mime_type = fallback_mime_type
        self.index = index
        self.enable_gzip_compression = enable_gzip_compression
        self.enable_zlib_compression = enable_zlib_compression
        self.max_http_chunk_length = max_http_chunk_length
        self.file_cache_lock = Lock()
        root_path = root_path.replace("\\", "/")
        if root_path[-1] != "/":
            root_path += "/"
        l = len(root_path) - 1
        self.root_path = root_path
        self.files = dict()
        files_indexed = 0
        files_skipped = 0
        for dir_name, sub_dir_list, file_list in os.walk(root_path):
            relative_dir_name = dir_name[l + 1 :]
            for file_name in file_list:
                file_path = relative_dir_name + "/" + file_name
                for exclude in self.excludes:
                    if fnmatch(file_path, exclude):
                        files_skipped += 1
                else:
                    self.process_file(file_path)
                    files_indexed += 1

        logger.info("indexed %d files", files_indexed)
        logger.info("skipped %d files", files_skipped)

    def process_file(self, file_path, accepted_compression=None):
        for exclude in self.excludes:
            if fnmatch(file_path, exclude):
                return False

        file_changed = False
        with self.file_cache_lock:
            real_file_path = self.root_path + file_path
            if not os.path.isfile(real_file_path):
                try:
                    del self.files[file_path]
                except KeyError:
                    pass

                return False
            file_timestamp = os.path.getmtime(real_file_path)
            file_size = int(os.path.getsize(real_file_path))
            guessed_type = mimetypes.guess_type(real_file_path)
            mime_type = guessed_type[0] or self.fallback_mime_type
            if file_path not in self.files:
                file_content = open(real_file_path, "rb").read()
                self.files[file_path] = dict(
                    mime_type=mime_type,
                    size=file_size,
                    timestamp=file_timestamp,
                    raw=(file_content, len(file_content)),
                    gzip=None,
                    deflate=None,
                )
                file_changed = True
            else:
                if (
                    file_size != self.files[file_path]["size"]
                    or file_timestamp != self.files[file_path]["timestamp"]
                ):
                    file_content = open(real_file_path, "rb").read()
                    self.files[file_path]["raw"] = (
                        file_content,
                        str(len(file_content)),
                    )
                    self.files[file_path]["timestamp"] = file_timestamp
                    self.files[file_path]["gzip"] = None
                    self.files[file_path]["deflate"] = None
            if accepted_compression:
                if "text/" in mime_type:
                    if (
                        file_changed
                        or self.files[file_path][accepted_compression] is None
                    ):
                        if accepted_compression == "gzip":
                            data = gzip_compress(self.files[file_path]["raw"][0])
                            self.files[file_path]["gzip"] = (data, str(len(data)))
                        else:
                            if accepted_compression == "deflate":
                                data = compress(self.files[file_path]["raw"][0])
                                self.files[file_path]["deflate"] = (
                                    data,
                                    str(len(data)),
                                )
        return True

    def handle_request(self, method, scheme, path, environment, start_response):
        file_path = path.replace("\\", "/").strip("/")
        file_path = "/" + ("/").join(
            (x for x in file_path.split("/") if x if x not in frozenset({".", ".."}))
        )
        if path != file_path:
            raise Http404()
        file_path = file_path[self._len_base_path :]
        if file_path in ("/", ""):
            file_path = self.index
        accepted_encodings = environment.get("HTTP_ACCEPT_ENCODING", "")
        accepted_compression = None
        if "gzip" in accepted_encodings:
            accepted_compression = "gzip"
        else:
            if "deflate" in accepted_encodings:
                accepted_compression = "deflate"
        if not (
            self.process_file(file_path, accepted_compression=accepted_compression)
        ):
            raise Http404()
        headers = [
            (
                "Content-Type",
                "%s; charset=utf8" % (self.files[file_path]["mime_type"],),
            ),
            ("Date", http_date()),
            ("Last-Modified", http_date(self.files[file_path]["timestamp"])),
        ]
        if self.extra_headers:
            headers += self.extra_headers.items()
        modified_since = environment.get("HTTP_IF_MODIFIED_SINCE", None)
        if modified_since:
            modified_since = parse_date(modified_since)
            last_modified = datetime.datetime.fromtimestamp(
                self.files[file_path]["timestamp"]
            )
            if modified_since > last_modified:
                start_response("304 Not Modified", headers)
                return b""
        if accepted_compression == "gzip":
            if self.files[file_path]["gzip"] is not None:
                headers.append(
                    ("Content-Length", str(self.files[file_path]["gzip"][1]))
                )
                headers.append(("Content-Encoding", "gzip"))
                data = self.files[file_path]["gzip"][0]
            if accepted_compression == "deflate":
                headers.append(
                    ("Content-Length", str(self.files[file_path]["deflate"][1]))
                )
                headers.append(("Content-Encoding", "deflate"))
                data = self.files[file_path]["deflate"][0]
            else:
                headers.append(("Content-Length", str(self.files[file_path]["raw"][1])))
                data = self.files[file_path]["raw"][0]
            start_response("200 OK", headers)
            if method == "head":
                return b""
            start = 0
            while start < len(data):
                end = start + self.max_http_chunk_length
                chunk = data[start:end]
                yield chunk
                start = end
