import logging as _logging
from typing import (
    TypeVar as _TypeVar,
    Iterator as _Iterator,
    Iterable as _Iterable,
    Sequence as _Sequence,
    BinaryIO as _BinaryIO,
)
from collections import (
    deque as _deque,
)
from itertools import (
    cycle as _cycle,
    zip_longest as _zip,
)
from functools import (
    cache as _cache,
)
from pathlib import (
    Path as _Path,
)



_T = _TypeVar("_T")
RAW_LEN_MIN: int = 1
RAW_LEN_MAX: int = 32
MATCH_LEN_MIN: int = 3
MATCH_LEN_DELTA: int = MATCH_LEN_MIN - 0b001
MATCH_LEN_MAX: int = 0b111 + MATCH_LEN_DELTA + 255
MATCH_DISTANCE_MIN: int = 1
MATCH_DISTANCE_MAX: int = 8192
LOGGER_FORMATTER = _logging.Formatter( '%(message)s' )


class TokenHeader (bytes):
    '''Token header.
    
    Raw bytes:
    000x xxxx
    x = read length

    Encoded bytes:
    xxxy yyyy yyyy yyyy
    x = dictionary match length (MUST LESS THAN 7)
    y = dictionary match distance

    Extended encoded bytes:
    xxxy yyyy zzzz zzzz yyyy yyyy
    x = dictionary match length
    y = dictionary match distance
    z = addition value to match length

    3-bit flag mapping:
    000 => Raw/no encode/no match
    001 => match length 3 bytes
    010 => match length 4 bytes
    011 => match length 5 bytes
    ...
    110 => match length 8 bytes
    111 => match length 9 bytes + extended length
    '''
    @property
    @_cache
    def is_raw (self) -> bool:
        return self[0] >> 5 == 0b000

    @property
    @_cache
    def is_extended (self) -> bool:
        return self[0] >> 5 == 0b111

    @property
    @_cache
    def distance (self) -> int:
        '''
        13 bits => 0 to 8191 + 1 => 1-8192 bytes.
        '''
        if self.is_raw:
            raise AttributeError( 'Raw value does not have a dictionary distance', name='distance', obj=self )
        xBits = (self[0] & 0b11111) # Take 5 bits from right
        xBits <<= 8 # move those bits 8 steps left
        yBits = self[1] # non-extended length use second byte as last 8 bits
        if self.is_extended:
            yBits = self[2] # otherwise, use third byte
        # bit: 000xxxxx yyyyyyyy
        return (xBits | yBits) + MATCH_DISTANCE_MIN
    
    def __add__ (self, s: bytes):
        return (type(self))(super().__add__(s))

    @property
    @_cache
    def size (self) -> int:
        '''
        3 bits => if x == 0 => next 5 bits is raw length => 0 to 31 => 1-32 bytes.
               => if x < 7 => 1 to 6 + [match length delta] => 3-8 bytes.
               => otherwise => next byte is extended length => 7 + [match length delta] + [extended length] => 9-264 bytes.
        '''
        if self.is_raw:
            return (self[0] & 0b11111) + 1
        size: int = self[0] >> 5 # Take 3 bits from left
        size += MATCH_LEN_DELTA
        if self.is_extended:
            size += self[1] # extended length
        return size
    
    @property
    @_cache
    def unused_data (self) -> bytes:
        if self.is_raw:
            return self[1:]
        if not self.is_extended:
            return self[2:]
        return self[3:]

    @classmethod
    def new (cls, size: int, distance: int|None = None):
        # unencoded/raw
        if distance is None:
            if size < RAW_LEN_MIN or size > RAW_LEN_MAX:
                raise ValueError( 'size is out of range' )
            size -= 1
            first_byte = size & 0b11111
            # sanity check
            assert first_byte >= 0b00000000 and first_byte <= 0b00011111, 'first byte is out of range'
            # bit: 000xxxxx
            return cls([first_byte])
        if distance < MATCH_DISTANCE_MIN or distance > MATCH_DISTANCE_MAX:
            raise ValueError( 'distance is out of range' )
        if size < MATCH_LEN_MIN or size > MATCH_LEN_MAX:
            raise ValueError( 'size is out of range' )
        size -= MATCH_LEN_DELTA
        distance -= MATCH_DISTANCE_MIN
        if size < 0b111:
            # encoded
            first_byte = size << 5 # 3 bits for size, 5 bits for distance
            first_byte |= distance >> 8 # 13 bits - 5 bits = 8 bits popped out
            second_byte = distance & 0b11111111 # and moved into second byte
            # sanity check
            assert first_byte >= 0b00100000 and first_byte <= 0b11011111, 'first byte is out of range'
            assert second_byte >= 0 and second_byte <= 0b11111111, 'second byte is out of range'
            # bit: xxxyyyyy yyyyyyyy
            return cls([first_byte, second_byte])
        # extended encoded
        size -= 0b111 # 264 - [match length delta] - 7 (3 bits in decimal) = 255
        first_byte = 0b111 << 5 # first 3 bits for size, last 5 bits for distance
        first_byte |= distance >> 8 # 13 bits - 5 bits = 8 bits from right popped out
        third_byte = distance & 0b11111111 # and moved into third byte
        second_byte = size # extended length
        # sanity check
        assert first_byte >= 0b11100000 and first_byte <= 0b11111111, 'first byte is out of range'
        assert second_byte >= 0 and second_byte <= 0b11111111, 'second byte is out of range'
        assert third_byte >= 0 and third_byte <= 0b11111111, 'third byte is out of range'
        # bit: xxxyyyyy zzzzzzzz yyyyyyyy
        return cls([first_byte, second_byte, third_byte])


class TokenDictionary (_deque[_T]):
    def find_token (
        self,
        token: _Sequence[_T],
        minsize: int=MATCH_LEN_MIN,
        maxsize: int|None=None,
        *,
        take_shortest_distance: bool=False,
        compress_repetition_at_start: bool=False,
    ) -> tuple[int, int]:
        # https://github.com/andre-a-alves/Lzss_Encoder_Cpp/blob/cd0c12c02fdb709714523059eb1a8f33da982c19/Lzss.cpp#L191
        dictionary = self
        dictionary_len: int = len( dictionary )
        token_len: int = len( token )
        if dictionary_len < minsize or token_len < minsize:
            return (0, 0,)
        distance: int = 0
        size: int = 0
        next_len: int = minsize
        max_len: int = dictionary_len if dictionary_len < token_len else token_len
        if maxsize and maxsize > 0:
            max_len = min( max_len, max(minsize, maxsize) )
        dictionary_last_pos: int = dictionary_len - 1
        # Repetition
        if compress_repetition_at_start:
            repetition_max_len: int = max_len // 2
            repetition: _deque[_T] = _deque( maxlen=repetition_max_len )
            i: int = dictionary_last_pos
            while i > dictionary_last_pos - repetition_max_len and not distance:
                repetition.appendleft( dictionary[i] )
                it: _Iterator[_T] = _cycle( repetition )
                count: int = 0
                j: int = 0
                while j < repetition_max_len:
                    if token[j] != next(it):
                        break
                    count += 1
                    j += 1
                repetition_len: int = len( repetition )  
                if count >= minsize:
                    distance = repetition_len
                    size = count
                i -= 1
        # Longest token
        if not distance:
            i: int = dictionary_last_pos
            while i >= 0 and next_len <= max_len:
                if take_shortest_distance and distance:
                    break
                j: int = 0
                while j < next_len and j < max_len and i + j < dictionary_len:
                    if token[j] != dictionary[i + j]:
                        break
                    if j == next_len - 1:
                        size = next_len
                        distance = dictionary_len - i
                        next_len += 1
                    j += 1
                i -= 1
        if not distance:
            size = 0
        return (distance, size,)
    
    def itake (self, distance: int, size: int) -> _Iterator[_T]:
        offset: int = len( self ) - distance
        maxlen = self.maxlen
        while size:
            data: _T = self[offset]
            if maxlen and len(self) < maxlen:
                offset += 1
            self.append( data )
            yield data
            size -= 1

    def extend (self, iterable: _Iterable[_T]):
        if not self.maxlen:
            return super().extend( iterable )
        for x in iterable:
            self.append( x )

    def extendleft (self, iterable: _Iterable[_T]):
        if not self.maxlen:
            return super().extendleft( iterable )
        for x in iterable:
            self.appendleft( x )


logger_decompress = _logging.getLogger( __name__ ).getChild( 'decompress' )
logger_decompress.setLevel( _logging.INFO )
__filehandler = _logging.FileHandler( filename='decompress.log', mode='w' )
__filehandler.setFormatter( LOGGER_FORMATTER )
__filehandler.setLevel( _logging.INFO )
logger_decompress.addHandler( __filehandler )


def decompress (inputstream: _BinaryIO, outputstream: _BinaryIO):
    dictionary: TokenDictionary[int] = TokenDictionary( maxlen=MATCH_DISTANCE_MAX )
    while first_byte := inputstream.read(1):
        token = TokenHeader( first_byte )
        if token.is_raw:
            data = inputstream.read( token.size )
            dictionary.extend( data )
            outputstream.write( data )
            logger_decompress.info( '{}\t{}'.format((0, token.size,),  bytes(data)) )
            #print(bytes(dictionary))
            continue
        if token.is_extended:
            token = TokenHeader([ *first_byte, *inputstream.read(2) ])
        else:
            token = TokenHeader([ *first_byte, *inputstream.read(1) ])
        data = bytes(dictionary.itake( token.distance, token.size ))
        outputstream.write( data )
        logger_decompress.info( '{}\t{}'.format((token.distance, token.size,), bytes(data)) )
        #print(bytes(dictionary))


logger_compress = _logging.getLogger( __name__ ).getChild( 'compress' )
logger_compress.setLevel( _logging.INFO )
__filehandler = _logging.FileHandler( filename='compress.log', mode='w' )
__filehandler.setFormatter( LOGGER_FORMATTER )
__filehandler.setLevel( _logging.INFO )
logger_compress.addHandler( __filehandler )


def compress (inputstream: _BinaryIO, outputstream: _BinaryIO):
    dictionary: TokenDictionary[int] = TokenDictionary( maxlen=MATCH_DISTANCE_MAX )
    raw_data: _deque[int] = _deque( maxlen=RAW_LEN_MAX )
    token = _deque[int]( maxlen=MATCH_LEN_MAX )

    def __raw_write ():
        if not raw_data:
            return
        outputstream.write( TokenHeader.new(len(raw_data)) )
        outputstream.write( bytes(raw_data) )
        logger_compress.info( '{}\t{}'.format((0, len(raw_data),), bytes(raw_data)) )
        raw_data.clear()
    
    def __raw_append (x: int):
        raw_data.append( x )
        if len(raw_data) == raw_data.maxlen:
            __raw_write()

    def __token_fill () -> bool:
        len_delta: int = token.maxlen - len( token )
        if len_delta == 0:
            return True
        token.extend( inputstream.read(len_delta) )
        return bool( token )

    while __token_fill():
        distance: int = 0
        size: int = 0
        distance, size = dictionary.find_token( token, take_shortest_distance=True, compress_repetition_at_start=True )
        if not distance:
            data: int = token.popleft()
            dictionary.append( data )
            __raw_append( data )
        else:
            data: tuple[int] = tuple( token.popleft() for _ in range(size) )
            if not len(token):
                # EOF is directly dumped into output
                for x in data:
                    __raw_append( x )
                continue
            dictionary.extend( data )
            __raw_write()
            outputstream.write( TokenHeader.new(size, distance) )
            logger_compress.info( '{}\t{}'.format((distance, size), bytes(data)) )
        #print(bytes(dictionary))
    __raw_write()


if __name__== '__main__':
    sample_name = 'trackerscheme'

    #'''
    with _Path('decompress.lzss').open('wb') as outstream:
        with _Path(f'samples/{sample_name}/compressed.bin').open('rb') as instream:
            decompress( instream, outstream )
    #'''


    #'''
    with _Path('compress.lzss').open('wb') as outstream:
        with _Path(f'samples/{sample_name}/raw.bin').open('rb') as instream:
            compress( instream, outstream )
    #'''
