#!/usr/bin/env python
#coding:utf-8
import time
from datetime import datetime
import threading
from spider import Spider
from config.options import parser
from utils import logHandle
class PrintProcess(threading.Thread):
    """
    默认间隔 10秒 打印爬虫运行状态
    """
    def __init__(self, spider,interval=10):
        threading.Thread.__init__(self)
        self.start_time = datetime.now()
        self.interval = interval
        self.spider = spider
        self.daemon = True
    def run(self):
        while True:
            if self.spider.is_runing:
                print "\n"*2
                print "="*50
                print "\n"
                print 'Crawling in depth %d' % self.spider.current_depth
                print 'Have %d tasks in threadpool.' % self.spider.thread_pool.get_task_left()
                print 'Success crawl %d urls' % len(self.spider.finished)
                print 'Fail crawl %d urls' % len(self.spider.error_urls)
                print "\n"
                print "="*50
                time.sleep(self.interval)
    @property
    def print_time(self):
        self.end_time = datetime.now()
        print "*"*50
        print 'By-by!'
        print 'Start time :%s' % self.start_time
        print 'End time :%s' % self.end_time
        print 'Cost time: %s ' %(self.end_time - self.start_time)
        print "*"*50
def main():
    args = parser.parse_args()
    if not logHandle.handle(args.logFile, args.logLevel):
        print 'logging in log file: %s is error' % args.logFile
    spider = Spider(args)
    if args.testSelf:
        #进行自检
        if not spider.test_self(args):
            #自检失败,退出
            exit(-1)
    #每隔 10S 打印状态信息
    printProcess = PrintProcess(spider,interval=10)
    printProcess.start()
    spider.run
    printProcess.print_time
if __name__ == '__main__':
    main()