# 需求：
#
# 1. 从多个来源下载文件，每个来源只使用单线程
# 2. 多个来源的下载结果写到同一个文件中
# 3. 支持在下载过程中添加新的下载源
# 4. 每一个下载文件对应一个日志文件，用来记录下载的状态
# 5. 下载完成后删除日志文件
# 6. 支持断点续传
#
# 课后拓展:
# 1. 启动新进程时扫描一遍所有标为busy的块，确认工作进程存活。
# 2. 当一个进程获取空块得到空时，尝试扫描busy块，如果没有
#    busy块，或者busy块的进程是存活的，当前进程就退出。
# 3. 当所有的块都是已完成时，删除日志文件。
# 4. 支持查询状态：总大小，块大小，已下载，正在下载，未下载，
#    预计剩余时间。
# 5. 支持从一个文本文件读取多个url，同时启动多个下载进程。
#

import sys
import os
import json
import hashlib
import fcntl
import math
from io import BytesIO
from datetime import datetime

import pycurl

import thinap


def process_alive(pid):
    """Check if the process of pid is alive and is a downloader"""
    #todo


class Lock:

    """A system-wide exclusive lock to synchronize processes.

    Implemented by operating system excluse file locking mechanism.
    """

    def __init__(self, text):
        sha1text = hashlib.sha1(text.encode()).hexdigest()
        self.path = os.path.join('/tmp', sha1text)

    def acquire(self):
        self.fd = os.open(self.path, os.O_WRONLY|os.O_CREAT)
        fcntl.flock(self.fd, fcntl.LOCK_EX)

    def release(self):
        fcntl.flock(self.fd, fcntl.LOCK_UN)

    def __enter__(self):
        self.acquire()

    def __exit__(self, extype, exvalue, tb):
        self.release()
        return extype is None


class Block:

    """A block, that is, a part of the file to be downloaded.

    Status values of a block:

    True: the block had been downloaded.
    False: the block is available for download.
    int: zero meanings the block had been locked for download, but
    the download not yet started, other numbers are pids of the working
    processes.
    """

    def __init__(self, number, first, last, status=False):
        """
        self.number: the position of the block in the data file.
        self.first: the first byte of the block.
        self.last: the last byte of the block.
        self.status: whether the block been downloaded or not.
        """
        self.number = number
        self.first = first
        self.last = last
        self.status = status
        self.data = None

    def is_done(self):
        return self.status is True

    def is_empty(self):
        return self.status is False

    def is_busy(self):
        status = self.status
        if status is not True and status is not False:
            return process_alive(pid=status)
        return False

    def set_done(self):
        self.status = True

    def set_busy(self):
        self.status = os.getpid()


class Journal:

    """Store information of a single file"""

    def __init__(self, fname):
        self.fname = fname
        self.lock = Lock('journal' + fname)

    def create(self, size, bs=5242880):
        """Create a journal file and save info into it.

        size: total size of the file to download.
        bs: block size per task, defaults to 5MB.

        blocks structure:

            [stat, stat, stat, ...]

        Value of the status of each block can be one of these:

            True: the block had been downloaded.
            False: the block is available for download.
            int: an integer signifies the block is busy, either being
            downloaded or about to be downloaded, by a process, and
            the integer is the process' pid, if the download not yet
            started, the integer is zero.
        """
        count = math.ceil(size / bs)
        blocks = [False for x in range(count)]
        self.info = self.make_info(size, bs, blocks)
        info_text = json.dumps(self.info)
        self._write(info_text)

    def read(self):
        """Read the blocks info from file.

        This is for subsequent workers to get the blocks info.
        The initial worker will create the blocks info file.
        """
        self.info = self._read()

    def update_block(self, block):
        """Update the journal with the the provided block"""
        all_blocks = self.info['blocks']
        all_blocks[block.number] = block.status
        self._update()

    def _update(self):
        # Update the file with the data of self.info
        with self.lock:
            f = open(self.fname, 'r+')
            info = self._read(f)
            info.update(self.info)
            info_text = json.dumps(info)
            f.truncate(0)
            f.seek(0)
            self._write(info_text, f)
            f.close()
            self.info = info

    def _read(self, f=None):
        # Read and return blocks info from journal file
        if f is None:
            f = open(self.fname)
        return json.loads(f.read())

    def _write(self, data, f=None):
        # Write journal data to file
        if f is None:
            f = open(self.fname, 'w')
        f.write(data)

    @staticmethod
    def make_info(size, bs, blocks):
        return dict(size=size, bs=bs, blocks=blocks)


class Worker:

    """The worker process.

    Command example:

        $ mdown -o abc.iso http://xx.com/abc.iso

    If the path abc.iso.jnl exists, it's a subsequent worker of an
    existing download task. If the journal file does not exist but
    the data file does, exit the program.

    """

    def __init__(self, outpath, url):
        self.outpath = outpath
        self.ofile = None
        self.journal_file = outpath + '.jnl'
        self.url = url
        self.lock = Lock('worker' + outpath)

    def start(self):
        """Start to download"""
        #
        # read or populate the journal file
        #
        with self.lock:
            # If the out file exists but no journal file found,
            # the out file was not created by this program,
            # shall not continue to download. Exit here.
            outfile_exists = os.path.exists(self.outpath)
            journal_exists = os.path.exists(self.journal_file)
            if outfile_exists and not journal_exists:
                print('file %s exists, but no journal found' % self.outpath,
                      file=sys.stderr)
                exit(1)

            jnl = Journal(self.journal_file)
            if not journal_exists:
                # populate the journal file
                size = self.get_src_size(self.url)
                jnl.create(size)
            else:
                # journal exists, read info from it
                jnl.read()
            self.journal = jnl

        # The download loop
        while True:
            block = self.allocate_block(self.lock, self.journal)
            if block is None:
                print('no empty block available')
                break
            rng = (block.first, block.last)
            block.data = self.download_range(self.url, rng)

            # length differed, data corruption.
            if len(block.data) != block.last - block.first + 1:
                print('downloaded data currupted on range %s:%s' %
                      (block.first, block.last), file=sys.stderr)
                continue

            self._write_block(block)
            block.set_done()
            self.journal.update_block(block)

    @staticmethod
    def all_empty_blocks(journal):
        """Find and return all empty blocks"""
        blocks = journal.info['blocks']
        return [i for i, x in enumerate(blocks) if x is False]

    @staticmethod
    def allocate_block(lock, journal):
        """Find, lock and return an empty block for to download"""
        with lock:
            journal.read()  # update blocks info from file
            emptyblks = Worker.all_empty_blocks(journal)
            if not emptyblks:
                # no empty blocks available
                return None
            number = emptyblks[0]
            size = journal.info['size']
            bs = journal.info['bs']
            first = bs * number
            last = min((first + bs - 1), (size - 1))
            block = Block(number, first, last)
            block.set_busy()
            journal.update_block(block)
            return block

    @staticmethod
    def download_range(url, rng):
        # Download the specified range of the url
        rng = '%s-%s' % rng
        expected_code = 206
        buf = BytesIO()
        c = pycurl.Curl()
        c.setopt(c.URL, url)
        c.setopt(c.RANGE, rng)
        c.setopt(c.WRITEFUNCTION, buf.write)
        c.perform()
        code = c.getinfo(c.HTTP_CODE)
        assert code == expected_code, "unexpected http code: %s" % code
        data = buf.getvalue()
        return data

    @staticmethod
    def get_src_size(url):
        """Read the file size from the url"""
        expected_code = 200
        c = pycurl.Curl()
        c.setopt(c.URL, url)
        c.setopt(c.HEADER, True)
        c.setopt(c.NOBODY, True)
        c.setopt(c.WRITEFUNCTION, open(os.devnull, 'wb').write)
        c.perform()
        code = c.getinfo(c.HTTP_CODE)
        assert code == expected_code, "unexpected http code: %s" % code
        size = c.getinfo(c.CONTENT_LENGTH_DOWNLOAD)
        return int(size)

    def _write_block(self, block):
        """Write the data of the block to the out file"""
        if self.ofile is None:
            with self.lock:
                if os.path.exists(self.outpath):
                    # 'rb+' preserves the existing data
                    self.ofile = open(self.outpath, 'rb+')
                else:
                    self.ofile = open(self.outpath, 'wb')
        self.ofile.seek(block.first)
        self.ofile.write(block.data)
        self.ofile.flush()
        return True


def help(file=sys.stdout):
    bname = os.path.basename(sys.argv[0])
    print('usage: %s -o file url' % bname, file=file)


if __name__ == '__main__':

    args = sys.argv[1:]
    request = {'outfile': {'flag': '-o', 'arg': 1}}
    parser = thinap.ArgParser()
    params = parser.parse_args(args, request)
    mapping, urls = params[:2]
    outpath = mapping.get('outfile')
    if outpath is None or not urls:
        help(file=sys.stderr)
        exit(1)
    url = urls[0]

    worker = Worker(outpath, url)
    worker.start()
