# -*- coding:utf-8 -*-
## author : cypro666
## note   : python3.4+
"""
Wrapper of file reading/writing and other IO operations
"""
import os, sys
import time
import mmap
from _io import open, BytesIO
from _thread import RLock
from _collections import defaultdict
from pickle import dump as pickle_dump
from pickle import load as pickle_load
try:
    import fcntl
except ImportError:
    class fcntl:
        """ the dummy fcntl """
        LOCK_SH, LOCK_NB, LOCK_UN = 0, 0, 0
        @staticmethod
        def flock(*args, **kwargs):
            pass
    sys.stdout.write(fcntl.__doc__)

from .debug import print_exception, time_meter
from .iteralgos import BisectList 
from .debug import stdtime


__all__ = ['ONE_GB', 'ONE_MB', 'ONE_PAGE', 'FileLock', 'FileIO', 'BEST_IO_BUFFER',
           'FileMemoryMapping', 'FilePointerRecorder']

ONE_GB = (1024*1024*1024) # 1073741824 bytes
ONE_MB = (1024*1024)      # 1048576 bytes
ONE_PAGE = (4096)         # virtual memory page size 4KB
BEST_IO_BUFFER = (262144) # 256KB


class FileLock(object):
    """ thread-safe file lock """
    __slots__ = ('__fileno', '__locktype', '__mtlock')
    def __init__(self, fileobj, locktype = fcntl.LOCK_SH|fcntl.LOCK_NB):
        """ note default params """
        assert hasattr(fileobj, 'fileno')
        self.__fileno = fileobj.fileno()
        self.__locktype = locktype
        self.__mtlock = RLock()
    
    def lock(self):
        """ lock file """
        self.__mtlock.acquire(blocking=False)
        fcntl.flock(self.__fileno, self.__locktype)
        self.__mtlock.release()
    
    def unlock(self):
        """ unlock file """
        self.__mtlock.acquire(blocking=False)
        fcntl.flock(self.__fileno, fcntl.LOCK_UN)
        self.__mtlock.release()



class FileIO(object):
    """ File IO """
    __slots__ = ('__file', '__fn', '__flock', '__mode', 'closed')
    
    def __init__(self, filename, mode = 'r', encoding = 'utf-8'):
        """ 'replace' the default strategy when encoding is error """ 
        if 'r' in mode:
            if not os.path.exists(filename):
                raise FileNotFoundError(filename)
        super().__init__()
        self.__fn = filename
        self.__mode = mode
        if 'b' in mode: 
            self.__file = open(filename, mode=mode, buffering=BEST_IO_BUFFER)
        else:           
            self.__file = open(filename, mode=mode, encoding=encoding, errors='replace')
        if __debug__:
            sys.stdout.write('FileIO: %s opened in mode:%s with encoding:%s/replace\n' % (filename, mode, encoding))

    
    def fd(self):
        """ file descriptor """
        return self.__file.fileno()
    
    def __str__(self):
        """ file name """
        return self.__fn
    
    def __enter__(self):
        """ with statement """
        assert self.__fn and self.__file
        return self
    
    def __exit__(self, type, value, traceback):
        """ with statement """
        self.__file.close()
        return False
        
    def exist(self):
        """ file existed """
        return os.path.exists(self.__fn)

    def size(self):
        """ file size in bytes """
        return os.path.getsize(self.__fn)

    def __len__(self):
        """ same as `size` """
        return self.size()

    def abspath(self):
        """ absolute path of file """
        return os.path.abspath(self.__fn)
    
    def basename(self):
        """ return base name """
        return os.path.basename(self.__fn)
    
    def dirname(self):
        """ dir name """
        return os.path.dirname(self.__fn) 
    
    def __iter__(self):
        return iter(self.__file)
    
    def tell(self):
        """ file pointer pos """
        return self.__file.tell()
    
    def seek(self, offset, whence = os.SEEK_SET):
        """ file seek, whence should be os.SEEK_SET, os.SEEK_CUR and os.SEEK_END """
        return self.__file.seek(offset, whence)
    
    def read(self, bufsize):
        """ read hole file """
        return self.__file.read(bufsize)
    
    def readall(self):
        """ same as read, but better for binary file """
        if 'b' in self.__mode:
            return os.read(self.fd(), self.size())
        else:
            return self.__file.read()
    
    def readline(self):
        """ read a line """
        return self.__file.readline()
    
    def streamer(self, bufsize = BEST_IO_BUFFER):
        """ return stream generator """
        if bufsize < 0 or not bufsize:
            return self.readall()
        assert (bufsize % ONE_PAGE) == 0
        fd = self.fd()
        if 'b' in self.__mode:
            _os_read = os.read
            _read = lambda:_os_read(fd, bufsize)
        else:
            _file_read = self.__file.read
            _read = lambda:_file_read(bufsize)
        def _stream():
            try:
                buf = _read()
                while buf:
                    yield BytesIO(buf)
                    buf = _read()
            except IOError:
                print_exception('FileIO.streamer')
        return _stream()


    def readlines(self, bufsize = 0):
        """ read all lines """
        if bufsize < 0 or not bufsize:
            return self.__file.readlines()
        assert (bufsize % ONE_PAGE) == 0
        def _read():
            try:
                buf = self.__file.readlines(bufsize)
                while buf:
                    yield buf
                    buf = self.__file.readlines(bufsize)
            except IOError:
                print_exception('FileIO.readlines')
        return _read()


    def write(self, data, sync = False):
        """ write data to file """
        try:
            if data: 
                self.__file.write(data)
            if sync: 
                self.__file.flush()
        except IOError:
            print_exception('FileIO.write')

    def writelines(self, lines, sync = False):
        """ write lines to file """
        try:
            if lines: 
                self.__file.writelines(lines)
            if sync: 
                self.__file.flush()
        except IOError:
            print_exception('FileIO.write')
    
    

class FileMemoryMapping(object):
    """ wrapper of memory mapping """
    __slots__ = ('__file')
    def __init__(self, filename, mode = 'r'):
        """ only support read mode ? """
        super().__init__()
        self.__file = FileIO(filename, 'rb+', encoding = 'ascii')
        assert self.__file.exist()
        assert self.__file.size()
        
    def __del__(self):
        self.__file.close()
    
    def size(self):
        """ file size """
        return self.__file.size()
    
    def __create(self, fd, access, length, offset):
        """ create memory map impl """
        if length > self.size() or length <= 0:
            length = self.size()
        def _mmap():
            current = offset
            m = mmap.mmap(fd, length, access=access, offset=current)
            assert self.size() == m.size()
            while True:
                yield m
                m.close()
                current += length
                if current >= self.size():
                    break
                try:
                    m = mmap.mmap(fd, length, access=access, offset=current)
                except ValueError:
                    m = mmap.mmap(fd, 0, access=access, offset=current)
        return _mmap()
    
    
    def entire(self, fd, access, length, offset = 0):
        """ return entire memory mapping """
        return mmap.mmap(fd, length, access=access, offset=offset)
    
    def streamer(self, mode, length, offset = 0):
        """ return memory mapping generator """
        if length % ONE_PAGE:
            raise ValueError('FileMemoryMapping.mmap_generator_write : length error')
        if mode == 'w':
            access = mmap.ACCESS_WRITE
        else:
            access = mmap.ACCESS_READ
        return self.__create(self.__file.fd(), access, length, offset)
    
    def __enter__(self):
        """ with statement """
        assert self.__file
        return self
    
    def __exit__(self, type, value, traceback):
        """ with statement """
        return False



class FilePointerRecorder(object):
    """ experiment... """
    __slots__ = ('append', '_keeporder', '__rec')
    def __init__(self, keeporder = False):
        super().__init__()
        if keeporder:
            self.__rec = defaultdict(lambda:[])
            self.append = self._lappend
        else: 
            self.__rec = defaultdict(lambda:BisectList())
            self.append = self._sappend
        self._keeporder = keeporder
        
    def _sappend(self, key, offset): 
        self.__rec[key].insort(offset)
    
    def _lappend(self, key, offset): 
        self.__rec[key].append(offset)
    
    def __iter__(self):
        return self.__rec.__iter__()

    def __len__(self):
        return self.__rec.__len__() 
    
    def size(self, key):
        return self.__rec[key].__len__()
    
    def foreach(self, callback):
        for key in self.__rec:
            callback(key, self.__rec[key]) 
    
    def readline_by_offsets(self, key, filename):
        whence = os.SEEK_SET
        with open(filename, 'rb') as file:
            _seek = file.seek
            _readline = file.readline
            offsets = self.__rec[key]
            if self._keeporder:
                offsets.sort(reverse=False)
            for offset in offsets:
                _seek(offset, whence)
                yield _readline()

    def read_by_offsets(self, key, filename, size):
        with open(filename, 'rb', buffering=BEST_IO_BUFFER) as file:
            _pread = os.pread
            _fd = file.fileno()
            offsets = self.__rec[key]
            if self._keeporder:
                offsets.sort(reverse=False)
            for offset in offsets:
                yield _pread(_fd, size, offset)
    
    def dump(self, path):
        if path[-1] != os.sep:
            path += os.sep
        filename = '%s%s.%d.fpr' % (path, stdtime(), id(self))
        pickle_dump(self, filename)
        sys.stdout.write('FilePointerRecorder object dump to %s\n' % filename)
    
    @staticmethod
    def load(self, fprfile):
        assert os.path.exists(fprfile)
        return pickle_load(fprfile)



@time_meter(__name__)
def test(testfile):
    t1 = time.time()
    f = FileMemoryMapping(testfile)
    bufsize = ONE_MB*4
    for mm in f.streamer('r', bufsize):
        lines = mm.read(bufsize).split(b'\n')
        for s in lines:
            if b'abc123xyz' in s:
                print('bingo')
    f.close()
    print('mmap:', time.time()-t1)
    
    t1 = time.time()
    with FileIO(testfile, encoding='ascii', mode='r') as f:
        for line in f:
            if 'abc123xyz' in line:
                print('bingo')
    print('read line:', time.time()-t1)
    
    t1 = time.time()
    with FileIO(testfile, encoding='ascii', mode='rb') as f:
        for line in f.streamer(BEST_IO_BUFFER):
            if b'abc123xyz' in line:
                print('bingo')
    print('read buf:', time.time()-t1)

    t1 = time.time()
    with FileIO(testfile, encoding='ascii', mode='rb') as f:
        lines = f.readall().split(b'\n')
        for s in lines:
            if b'abc123xyz' in s:
                print('bingo')
    print('read buf all:', time.time()-t1)
    



