#!/usr/bin/env python3
import requests
import argparse
import sys
import re
from termcolor import cprint, colored
from queue import Queue
from concurrent.futures import ThreadPoolExecutor

try:
    from mongoHelper import dbhelper
except ImportError as e:
    cprint("No db moudules , install mongoHeler by https://github.com/")

USER_AGENT = "Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.35 (KHTML, like Gecko) Ubuntu/10.10 Chromium/13.0.764.0 Chrome/13.0.764.0 Safari/534.35"
RAW_HEADERS  = {
    'Accept-Encoding': 'gzip, deflate, sdch',
    'Accept-Language': 'en-US,en;q=0.8',
    'Cache-Control': 'max-age=0',
    'Connection': 'keep-alive',
    # 'Host': None,
    'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36'
}

COLORS = {
    500: 'blue',
    400: 'red',
    300: 'yellow',
    200: 'green',
}

def check(url):
    code = 400
    try:
        code = requests.get(url, headers=RAW_HEADERS).status_code
    except Exception:
        msg = colored(url, 'red', attrs=['bold'])
        print(msg, file=sys.stderr)
    return url, code

def check_uri(line):
    l = line.strip()
    if len(l.split()) != 1:
        return False

    if not l.startswith("http://"):
        return "http://" + l

class Exe:
    Results = []
    def __init__(self, num):
        self.num = num
        self.exe = ThreadPoolExecutor(max_workers=num)

    def submit(self, funcs, *args, **kargs):
        futures = self.exe.submit(funcs, *args, **kargs)
        Exe.Results.append( futures.result())

    def map(self, func, args_iterable):
        for res in self.exe.map(func, args_iterable):
            yield res


def asy_urls(f):
    with open(f) as fp:
        for line in fp:
            t = check_uri(line)
            if t:
                yield t

def record(db, url):
    rs = re.findall(r'(?:http\://.+?)(/.+)', url)
    if rs:
        db.insert("urls",**{"uri":rs[0]})


def main():
    doc = """
     test url's code
    --- write by qingluan
    """
    parser = argparse.ArgumentParser(usage=" how to use this "
        , description=doc)
    parser.add_argument("-f","--file", default=None, help="test file path")
    parser.add_argument("--threads", default=8, help="set threads number")
    parser.add_argument("-s","--save", default=None, help="save result to a file : exm : --save test.csv ")
    parser.add_argument("-g","--grep", default=None, help="grep field , this can easily extract urls which is (200/300/400/500) ")
    parser.add_argument("-r","--record", default=False, action='store_true', help="record uri to local mongodb")
    parser.add_argument("--args", default='', help="args for options")
    return parser.parse_args()

if __name__== "__main__":
    cprint("-- init ---",'blue',end="",file=sys.stderr)
    args = main()
    exe = Exe(int(args.threads))
    fp = sys.stdout
    grep_fields = re.findall(r'(\d+)', args.grep) if args.grep else []
    count = 0
    db = dbhelper("local")
    cprint("[ok]", 'green')

    if args.save:
        fp = open(args.save, "w")

    if args.file:
        gen_ips = asy_urls(args.file)
        for res in exe.map(check, gen_ips):
            ip, code = res
            field = round(code /100) * 100 # this will round to 200 ,300 ,400
            msg = "%s [%s]" % (ip, colored(str(code), COLORS[field]))
            if grep_fields:
                for i in grep_fields:
                    if int(i) == field:
                        print(msg, file=fp)
            else:
                print(msg, file=fp)
            ip_p = ip if len(ip) < 30 else ip[:30] + "..."
            # print(colored("[",'blue') + "Count: %d Now: %s " % (count, colored(ip, attrs=['bold'])) + colored("]",'blue') , end="\r", file="")
            count += 1
            if args.record:
                record(db, ip)


            
                        