# !/usr/bin/env python

from __future__ import print_function
import re
import os
from tempfile import NamedTemporaryFile

from _internal import (
    get_repo_root,
    execute,
)

def __handler(args):

    if args.folder:
        os.environ["CRAWLER_DATA_FOLDER"] = os.path.abspath(args.folder)

    if args.index > 0:
        # update env first, before changing directory.
        os.environ["CRAWLER_FILENAME_IDX"] = f"{args.index}"

    try:
        os.chdir( get_repo_root() )
    except OSError as exp:
        print(exp)
        return

    # all the list shall be merged into single file later.
    filename = __merge_files(args.filelist)
    os.environ["CRAWLER_URL_LIST"] = f"{filename}"
    execute(["scrapy", "crawl", "--loglevel", args.loglevel.upper(), args.crawler])
    os.remove(filename)


def __merge_files(filelist):
    # merge all the inputs into a temporary file and return its name.

    contents = ""
    for ele in filelist:
        with open(ele) as fh:
            contents += fh.read() + "\n\n"

    # remove duplicate lines.
    results = []
    for line in re.split(r"\r|\n", contents):
        line = line.strip()
        if len( line ) == 0:
            continue

        if line[0] == '#':
            results.append(line)
            continue

        if line in results:
            continue

        results.append(line)
    contents = "\n".join(results)

    fh = NamedTemporaryFile(delete=False)
    fh.write(  contents.encode() )
    fh.close()

    return fh.name


def add_parser(subparsers):
    '''
    add a sub-command parser.
    '''
    cmd_parser = subparsers.add_parser('crawl', help='crawl')
    cmd_parser.add_argument("-q", "--quiet",
                            dest="quiet",
                            default=False, action="store_true",
                            help="to show build logs or not")

    cmd_parser.add_argument("-i", "--index",
                            dest="index",
                            default=0,
                            type=int,
                            help="file name index start value (0 indicates auto detect)")

    cmd_parser.add_argument("-c", "--crawler",
                            dest="crawler",
                            default="generic",
                            choices = ["generic"],
                            help="crawler name")

    cmd_parser.add_argument("-f", "--folder",
                            dest="folder",
                            default=None,
                            help="target folder to create files")

    cmd_parser.add_argument("-l", "--loglevel",
                            dest="loglevel",
                            default="info",
                            choices = ["debug", "info", "warning", "error", "critical"],
                            help="target folder to create files")

    cmd_parser.add_argument('filelist',
                            nargs='+',
                            help='filename of URL list')

    cmd_parser.set_defaults(func=__handler)

