#!/usr/bin/python
#-*-coding:utf-8-*-
#Author : Manning
#Date : 2015-10-17
"""
MSpider global_scheduling
"""
import time
import logging
import sqlite3
import sys,os
sys.path.append(os.getcwd()+'/data')
sys.path.append(os.getcwd()+'/UIlib')
from UIlib.URLdata import UrlData
from DataBaseHelper import dataBaseHelper
spider_logger = logging.getLogger('MSpiderLogs')


def global_scheduling(spider_global_variable):
    while True:
        if spider_global_variable.global_urlnode_queue.qsize() > 0:
            node = spider_global_variable.global_urlnode_queue.get()
            spider_global_variable.spider_urlnode_queue.put(node)
            save_data(node)
        if UrlData.end_flag: 
            sys.exit(0)
            
        '''
            In this function, you can put something interesting code in this,
        The global_scheduling function can get all the url_node, the url_node
        structure in the UrlData.py.
        '''
    

def save_data(node):
    # conn = sqlite3.connect('./data/urldata.db',check_same_thread = False)
    # conn.execute("insert into get_url (url_full,method)values(?,?)",(node[1].url,node[1].method))
    # conn.commit()
    # db = dataBaseHelper()
    # db.open()
    # db.table("get_url").insert(url_full=node[1].url,method=node[1].method)
    #db.execute("insert into get_url (url_full,method)values(?,?)",(node[1].url,node[1].method))
    #print "测试下看能不能用",node[1].url,node[1].method,node[1].depth,node[1].referer,node[1].data
    UrlData.data_queue.put(node)