#!/usr/bin/env python
#coding:utf-8
"""
设定程序使用的命令行参数
"""
import argparse
#命令参数默认值
_default_opt = {'threadNum':10,'dbFile':'data.db','keywords':'','downLoadFile':'*','logFile':'spider.log',
                'logLevel':5,'allowedDomains':0}
def url(value):
    if not value.startswith('http'):
        value = 'http://' + value
    return value
def number(value):
    try:
        value = int(value)
    except ValueError:
        raise argparse.ArgumentTypeError('Not a number')
    if value < 1:
        raise argparse.ArgumentTypeError('The number must more than 0')
    else:
        return value

parser = argparse.ArgumentParser()

parser.add_argument('-u', type=url, required=True, metavar='URL', dest='url',
                   help='Set the start url to crawl')

parser.add_argument('-d', type=number, required=True, metavar='DEPTH', dest='depth',
                   help='Set the max crawling depth')

parser.add_argument('--thread', type=number, metavar='NUM', default=_default_opt['threadNum'], dest='threadNum',
                   help='The amount of threads num. Default:%d' % _default_opt['threadNum'])

parser.add_argument('--dbfile', type=str, metavar='FILE', default=_default_opt['dbFile'], dest='dbFile',
                   help='Set the sqlite database file path, Default: %s' % _default_opt['dbFile'])

parser.add_argument('--key', type=str, metavar='KEYWORD', default=_default_opt['keywords'], dest='keywords',
                   help='The keyword of page for crawling. By default: crawl all pages')

parser.add_argument('--downloadfile', type=str, metavar='FILE EXT', default=_default_opt['downLoadFile'], dest='downLoadFile',
                   help='Set the file type to download. By default: download all file ')

parser.add_argument('--logfile', type=str, metavar='FILE', default=_default_opt['logFile'], dest='logFile',
                   help='Set the log file path, Default: %s' % _default_opt['logFile'])

parser.add_argument('--loglevel', type=int, choices=[1, 2, 3, 4, 5], default=_default_opt['logLevel'], dest='logLevel',
                   help='Set the level of logging from num [1-5],The more number you set the more log info you get. Default:%d' % _default_opt['logLevel'])

parser.add_argument('--allowedDomains', type=str, default=_default_opt['allowedDomains'], dest='allowedDomains',
                   help='Set the domain to limit crawling. By default: Crawler All')
parser.add_argument('--testself',action='store_true',dest='testSelf',
                   help='Run the test case')

if __name__ == '__main__':
    args = parser.parse_args()
    print args