#!/usr/bin/env python
import os
import sys
import time
import argparse
import itertools

from bz2 import BZ2File
from gzip import GzipFile
from datetime import datetime, timedelta

from connection import NNTPFetcher

CHUNK_SIZE = 60000
FILE_COUNT_PLACEHOLDER = "{0}"

STATE_DIRECTORY = "%s/.usenet-indexer/" % os.path.expanduser("~")

if not os.path.isdir(STATE_DIRECTORY):
    os.mkdir(STATE_DIRECTORY)

# Taken from http://stackoverflow.com/questions/7133179/python-yield-and-delete0
def chunker(iterable, chunksize):
    """
    Return elements from the iterable in `chunksize`-ed lists. The last returned
    chunk may be smaller (if length of collection is not divisible by `chunksize`).

    >>> print list(chunker(xrange(10), 3))
    [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
    """
    i = iter(iterable)
    while True:
        wrapped_chunk = [list(itertools.islice(i, int(chunksize)))]
        if not wrapped_chunk[0]:
            break
        yield wrapped_chunk.pop()

def get_state(group_name):
    try:
        state_handle = open("%s/%s" % (STATE_DIRECTORY, group_name))
        article_id = state_handle.readline().strip()
        state_handle.close()
        if len(article_id) == 0: article_id = None
    except:
        article_id = None
    return article_id

def write_state(group_name, article_id):
    assert (article_id is not None)
    state_handle = open("%s/%s" %(STATE_DIRECTORY, group_name), "w")
    state_handle.write(str(article_id))
    state_handle.close()

class FetcherFilename:
    file_count = 0
    output_filename = None
    def __init__(self, output_filename):
        assert(len(output_filename) > 0)
        self.output_filename = output_filename

    def get_name(self):
        self.file_count += 1
        return self.output_filename.format(self.file_count)

class RateLimiter:
    max_rate = None
    last_res = None
    last_time = None

    def __init__(self, max_rate_per_second):
        assert(max_rate_per_second > 0)
        self.max_rate = max_rate_per_second

    def sleep_if_required(self, current_resource):
        current_time = time.time()

        if self.last_res is None or self.last_time is None:
            self.last_time = current_time
            self.last_res = current_resource
            return 0

        assert(current_resource - self.last_res > 0)

        period_time = current_time - self.last_time
        period_resource = current_resource - self.last_res

        period_rate = period_resource / period_time

        if period_rate > self.max_rate:
            sleep_time = (period_rate - self.max_rate) / self.max_rate
            time.sleep(sleep_time)

        self.last_time = current_time
        self.last_res = current_resource

        return period_rate

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Fetches a NNTP group article headers.')
    parser.add_argument('-g', '--group',    action='store', required=True,  help='Group name',               dest='group_name')
    parser.add_argument('-o', '--output',   action='store', required=True,  help='Output filename',          dest='output_filename')
    parser.add_argument('-s', '--start',    action='store', required=False, help='Start article id',         dest='start_id',   default=None, type=int)
    parser.add_argument('-e', '--end',      action='store', required=False, help='End article id',           dest='end_id',     default=None, type=int)
    parser.add_argument('-m', '--max-size', action='store', required=False, help='Maximum output size (MB)', dest='max_size',   default=None, type=int)
    parser.add_argument('-r', '--max-rate', action='store', required=False, help='Maximum download rate (kb/s)', dest='max_rate',   default=None, type=int)
    parser.add_argument('-l', '--loop',     action='store_true', required=False, help='When the maximum output size is reached, open a new file. Use the {0} placeholder in your output filename.', dest='loop', default=False)
    parser.add_argument('-j',               action='store', required=False, help='Number of threads to use (defaults to 1)', dest='num_threads', default=1, type=int)
    arguments = parser.parse_args()

    if arguments.loop and FILE_COUNT_PLACEHOLDER not in arguments.output_filename:
        sys.stderr.write('-l|--loop option specified but no placeholder {0} found in output filename %s' % (arguments.output_filename))
        sys.exit(1)

    if arguments.loop and arguments.output_filename == "-":
        sys.stderr.write('-l|--loop option specified but output file is stdout (-).')
        sys.exit(1)

    nntp_fetcher = NNTPFetcher()

    article_count = 0
    last_fetched_article = -1
    last_known_good_fetched_article = None

    start_id, end_id = nntp_fetcher.get_article_count(arguments.group_name)
    if arguments.start_id is None and arguments.end_id is None:
        last_article_id = get_state(arguments.group_name)
        if last_article_id is not None:
            arguments.start_id = last_article_id
            arguments.end_id = end_id
        else:
            if arguments.end_id is None: arguments.end_id = end_id
            if arguments.start_id is None: arguments.start_id = start_id
    else:
        if arguments.end_id is None: arguments.end_id = end_id
        if arguments.start_id is None: arguments.start_id = start_id

    total_articles = arguments.end_id - arguments.start_id

    if total_articles < CHUNK_SIZE and arguments.num_threads > 1:
        sys.stderr.write("Warning: %d articles to fetch so using only 1 thread.\n" % total_articles)
        arguments.num_threads = 1

    current_size = total_size = 0.0
    output_file = None
    start_time = time.time()
    ffname = FetcherFilename(arguments.output_filename)

    rate_limiter = None
    if arguments.max_rate is not None:
        rate_limiter = RateLimiter(arguments.max_rate)

    try:
        output_str = ""
        current_rate = 0

        ranges = map(lambda n_: (n_, min(n_ + CHUNK_SIZE - 1, arguments.end_id)), range(arguments.start_id, arguments.end_id + 1, CHUNK_SIZE))

        for range_ in ranges:
            if output_file is None:
                current_filename = ffname.get_name()

                if os.path.exists(current_filename):
                    sys.stderr.write('The file %s already exists. Please remove this file or specify another one.\n' % (current_filename))
                    nntp_fetcher.terminate()
                    sys.exit(1)

                if current_filename.endswith(".bz2"):
                    output_file = BZ2File(current_filename, 'wb')
                elif current_filename.endswith(".gz"):
                    output_file = GzipFile(current_filename, 'wb')
                elif current_filename == '-':
                    output_file = sys.stdout
                else:
                    output_file = open(current_filename, 'wb')
                assert(output_file is not None)

            headers = []

            range_start_id = range_[0]
            range_end_id = range_[1]

            nntp_fetcher.fetch_article_headers(arguments.group_name, range_, headers)
            article_count += range_end_id - range_start_id
            last_known_good_fetched_article = range_end_id

            for header in headers:
                output_string = '\t'.join(header).encode('utf-8', 'surrogateescape')
                output_file.write(output_string)
                output_file.write(b'\n')
                total_size += len(output_string) / 1024 	# store sizes in KBytes
                current_size += len(output_string) / 1024

            if article_count > 0:
                estimated_time = (1 - article_count / total_articles) * (total_articles / article_count * (time.time() - start_time))
                eta = str(datetime.now() + timedelta(seconds=estimated_time))
                remaining_articles = total_articles - article_count
                if len(output_str) > 0:
                    sys.stdout.write(" "*len(output_str))
                output_str = "\r[%s] %d%% - ETA: %s [%d kbytes/s]\r" % (current_filename, article_count / total_articles * 100, eta, current_rate)
                sys.stdout.write(output_str)
                sys.stdout.flush()

            if arguments.max_size is not None:
                if (current_size / 1024) >= arguments.max_size:
                    output_file.close()
                    if arguments.loop:
                        output_file = None
                        current_size = 0
                        sys.stdout.write("\n")
                        sys.stdout.flush()
                    else:
                        break

            if rate_limiter is not None:
                current_rate = rate_limiter.sleep_if_required(total_size)

    except KeyboardInterrupt:
        sys.stderr.write('CTRL-C - Last known fully fetched/written article: %s.\n' % (last_known_good_fetched_article))

    if last_known_good_fetched_article is not None:
        write_state(arguments.group_name, last_known_good_fetched_article)

    sys.stdout.write("\n")
    nntp_fetcher.terminate()