# -*- coding:UTF-8 -*- ＃
#!/usr/bin/python
import MySQLdb
from com.xdtech.python.crawler import ImageFetchTool
import threading
import Queue
import time

result = set()
queue = Queue.Queue()

class MutilThread(threading.Thread):
    def __init__(self,queue):
        threading.Thread.__init__(self)
        self.queue = queue
    def run(self):
        #按照顺序执行，先put所有的的queue然后再逐步处理；
        #while queue.qsize() != 0:
        while True:
           rs = self.queue.get()
           size = queue.qsize()
           print "Thread's Size: %s" % size
           t.gDownload(rs[1], rs[2], path)
           self.queue.task_done()
           time.sleep(1000);

def getConnection(host,userName,password,db):
    try:
        connection = MySQLdb.connect(host=host, user=userName, passwd=password, db=db)
    except:
        print "couldn\'t connect to mysql"
        exit(0)

def excuteQuery(host,userName,password,db,sql):
    connection = MySQLdb.connect(host=host, user=userName, passwd=password, db=db)
    cursor = connection.cursor()
    cursor.execute(sql)
    print "records selected:",cursor.rowcount
    
    result = set()
    if cursor.rowcount > 0 :
        for row in cursor.fetchall():
            result.add(row)
    cursor.close();
    connection.close();
    return result

if __name__ == '__main__':
    #base = sys.argv[1]
    base = "e:\\hightech\\image\\"
    pageSize = 100
    sql = "select count(*) from failwebdb where downloadstate=-1"
    res = excuteQuery("123.124.135.103", "root", "123456", "gxqradar",sql)
    total = res.pop()[0]
    print('共有' + str(total) + "条记录")
    
    pages = total / pageSize if total % pageSize == 0 else (total / pageSize) + 1
    print('共有' + str(pages) + "页") 
    
    t = ImageFetchTool.ImageFetchTool()
    for page in range(1, pages + 1):
        start = (page - 1) * pageSize
        
        sql = "select localpath,url,referer from failwebdb where downloadstate=-1 limit "+ str(start) +", " + str(pageSize) 
        print(sql)
        res = excuteQuery("123.124.135.103", "root", "123456", "gxqradar",sql)
        for rs in res:
            path = rs[0]
            path = path.replace("/home/hightech/image/",base)
            print("download image:" + rs[1] + " to localpath:" + path)
            thread = MutilThread(queue)
            thread.setDaemon(True)
            thread.start()
        for rs in res:
            queue.put(rs)   
        queue.join()