# -*-coding:utf-8-*-
import requests
import threading
import datetime
import Queue
from props import properties
from crawler_func import tag_crawler, booklist_crawler, update_bookinfo, update_api, crawler_base
from DAO.tag import tag
from DAO.book import book
from DAO.proxies import init_proxy_queue, del_proxy
from props.sql_connect import DBSession
from proxy_crawler import proxy_crawler_run

from threads import book_task_queue, proxy_queue, book_task_fail_queue, save_queue, work_threads, main_threads

props = properties.props
thread_count = int(props.get('thread.count'))
work_crawler = crawler_base.crawler_base
order_global = ''


def check_status():
    host_url = 'http://' + properties.props.get('header.bookHost')
    status_code = requests.get(host_url).status_code
    return status_code


def put_tag_run():
    session = DBSession()
    query = session.query(tag)
    while True:
        for t in query.filter(tag.tag_level == 1).all():
            book_task_queue.put(t.tag_name)
        if not book_task_fail_queue.empty():
            book_task_queue.put(book_task_fail_queue.get())


def put_book_run():
    session = DBSession()
    query = session.query(book.book_id)
    cur_time = datetime.datetime.now()
    time_delta = datetime.timedelta(days=int(props.get('update.day')))
    time_offset = cur_time - time_delta
    while True:
        not_update_list = query.filter(book.last_update == None).all()
        for b in not_update_list:
            book_task_queue.put(str(b.book_id))
        del not_update_list[:]

        overdue_update_list = query.filter(book.last_update <= time_offset).all()
        for b in overdue_update_list:
            book_task_queue.put(str(b.book_id))
        del overdue_update_list[:]
        if not book_task_fail_queue.empty():
            book_task_queue.put(book_task_fail_queue.get())


class work_book_thread(threading.Thread):
    success = True
    proxy = None
    lock = None
    work_queue = None
    results = None

    def __init__(self, proxy):
        self.proxy = proxy
        self.lock = threading.Lock()
        self.work_queue = Queue.Queue(thread_count)
        self.results = []
        threading.Thread.__init__(self)

    def run(self):
        work_count = 0
        while True:
            self.work_queue.put(work_count)
            work_count += 1
            self.lock.acquire()
            results = self.results
            self.lock.release()
            if results.__contains__(False):
                del_proxy(self.proxy)
                break
            book_task = str(book_task_queue.get(timeout=10))
            t = work_book_task_thread(self.proxy, book_task, self.work_queue, self.results, self.lock)
            print '^^^^^^^^^^^^^^^^^^^^^^^^^' + t.name + '^^^^^^^^^^^^^^^^^^^^^^^^^' + 'proxy:' + self.proxy[
                'http'] + 'task:' + book_task
            work_threads.append(t)
            t.start()


class work_book_task_thread(threading.Thread):
    success = True
    proxy = None
    book_task = None
    lock = None
    work_queue = None
    results = []

    def __init__(self, proxy, task, q, res, lock):
        self.proxy = proxy
        self.book_task = task
        self.work_queue = q
        self.results = res
        self.lock = lock
        threading.Thread.__init__(self)

    def run(self):
        self.success = work_crawler().work(self.book_task, self.proxy)
        self.lock.acquire()
        self.results.append(self.success)
        self.lock.release()
        sig = self.work_queue.get()


def save_entity_back_in_bundle():
    session = DBSession()
    while True:
        tobe_saved_bundle = save_queue.get()
        for i in range(len(tobe_saved_bundle)):
            tobe_saved = tobe_saved_bundle[i]
            session.add(tobe_saved)
            print tobe_saved, 'is saving!'
        try:
            session.commit()
        except Exception, e:
            session.rollback()
            print e.message


def save_entity_back():
    session = DBSession()
    while True:
        tobe_saved = save_queue.get()
        session.add(tobe_saved)
        try:
            session.commit()
            print tobe_saved, 'has saved!'
        except Exception, e:
            session.rollback()
            print e.message


def work_book_back():
    while True:
        proxy = proxy_queue.get()
        t = work_book_thread(proxy)
        print '*******************************' + t.name + '*******************************'
        work_threads.append(t)
        t.start()


def work_book(param_func):
    work_proxy()
    w = threading.Thread(target=work_book_back)
    w.start()
    p = threading.Thread(target=param_func)
    p.start()

    save_func = None
    if order_global == 'list':
        save_func = save_entity_back
    elif order_global == 'info':
        save_func = save_entity_back_in_bundle
    elif order_global == 'api':
        save_func = save_entity_back_in_bundle
    for i in range(thread_count):
        s = threading.Thread(target=save_func)
        main_threads.append(s)
        s.start()
    main_threads.append(w)
    main_threads.append(p)


def work_proxy_back():
    init_proxy_queue()
    while True:
        proxy_crawler_run.craw()


def work_proxy():
    t = threading.Thread(target=work_proxy_back)
    t.start()
    main_threads.append(t)


def run(order):
    global work_crawler
    global order_global
    order_global = order
    if check_status() == 200:
        if order == 'tag':
            work_crawler = tag_crawler.tag_crawler
            print "starting crawling tag"
        elif order == 'list':
            work_book(put_tag_run)
            work_crawler = booklist_crawler.book_list_crawler
            print "starting crawling book list"
        elif order == 'info':
            work_book(put_book_run)
            work_crawler = update_bookinfo.update_bookinfo_crawler
            print "starting crawling book info"
        elif order == 'api':
            work_book(put_book_run)
            work_crawler = update_api.update_api_crawler
    elif check_status() == 403:
        print 'Forbidden'
    else:
        print 'Can not access DouBan'

    for t in main_threads:
        t.join()
    del main_threads[:]
    print 'end'
