# -*-coding:utf-8-*-
import requests
import threading
import Queue
from props import properties
from crawler_func import crawler_base
from YX_crawler.crawler_func import yx_crawler
from DAO.proxies import init_proxy_queue, del_proxy
from props.sql_connect import DBSession
from YX_crawler.crawler_func import proxy_crawler_run

from threads import yx_queue, proxy_queue, save_queue, work_threads, main_threads

props = properties.props
thread_count = int(props.get('thread.count'))
work_crawler = crawler_base.crawler_base
order_global = ''


def check_status():
    host_url = 'http://' + properties.props.get('header.baseHost')
    status_code = requests.get(host_url).status_code
    return status_code

def put_item_id_run():
    for id in range(1000000,9999999):
        yx_queue.put(id)

class work_yx_thread(threading.Thread):
    success = True
    proxy = None
    lock = None
    work_queue = None
    results = None

    def __init__(self, proxy):
        self.proxy = proxy
        self.lock = threading.Lock()
        self.work_queue = Queue.Queue(thread_count)
        self.results = []
        threading.Thread.__init__(self)

    def run(self):
        work_count = 0
        while True:
            self.work_queue.put(work_count)
            work_count += 1
            self.lock.acquire()
            results = self.results
            self.lock.release()
            if results.__contains__(False):
                del_proxy(self.proxy)
                break
            yx_task = str(yx_queue.get(timeout=10))
            t = work_yx_task_thread(self.proxy, yx_task, self.work_queue, self.results, self.lock)
            print '^^^^^^^^^^^^^^^^^^^^^^^^^' + t.name + '^^^^^^^^^^^^^^^^^^^^^^^^^' + 'proxy:' + self.proxy[
                'http'] + 'task:' + yx_task
            work_threads.append(t)
            t.start()


class work_yx_task_thread(threading.Thread):
    success = True
    proxy = None
    yx_task = None
    lock = None
    work_queue = None
    results = []

    def __init__(self, proxy, task, q, res, lock):
        self.proxy = proxy
        self.yx_task = task
        self.work_queue = q
        self.results = res
        self.lock = lock
        threading.Thread.__init__(self)

    def run(self):
        self.success = work_crawler().work(self.yx_task, self.proxy)
        self.lock.acquire()
        self.results.append(self.success)
        self.lock.release()
        sig = self.work_queue.get()


def save_entity_back():
    session = DBSession()
    while True:
        tobe_saved = save_queue.get()
        session.add(tobe_saved)
        try:
            session.commit()
            print tobe_saved, 'has saved!'
        except Exception, e:
            session.rollback()
            print e.message


def work_book_back():
    while True:
        proxy = proxy_queue.get()
        t = work_yx_thread(proxy)
        print '*******************************' + t.name + '*******************************'
        work_threads.append(t)
        t.start()


def work_yx(param_func):
    work_proxy()
    w = threading.Thread(target=work_book_back)
    w.start()
    p = threading.Thread(target=param_func)
    p.start()

    save_func = save_entity_back
    for i in range(thread_count):
        s = threading.Thread(target=save_func)
        main_threads.append(s)
        s.start()
    main_threads.append(w)
    main_threads.append(p)


def work_proxy_back():
    init_proxy_queue()
    while True:
        proxy_crawler_run.craw()


def work_proxy():
    t = threading.Thread(target=work_proxy_back)
    t.start()
    main_threads.append(t)


def run(order):
    global work_crawler
    global order_global
    order_global = order
    if check_status() == 200:
        if order == 'run':
            work_yx(put_item_id_run())
            work_crawler = yx_crawler.yx_crawler
    elif check_status() == 403:
        print 'Forbidden'
    else:
        print 'Can not access YX'

    for t in main_threads:
        t.join()
    del main_threads[:]
    print 'end'
