#! /usr/bin/python
#  -*- coding: utf-8 -*-
import time


import Queue
from urlLib import  *
from   dataer  import *


from libs import sqliteDB
from libs.functions import *
import stackless 
import threading
import urllib2

def sqler():
    DB=connectDB()   
    while 1:       
        for i in range(0, sqlquene.qsize(), 1):
      
            sql = sqlquene.get()
          
            try:
                DB.execsql(sql)
               
            except TypeError:
                print TypeError
        DB.commit()

def urlsreturn(tab,q):
    '''
    
    把待处理url 放入队列
    '''

    DB=connectDB()
    urler = urlAdminer(DB)
    
    while 1:       
        url = urler.getUrls(5, tab)
               
        for u in url:
            
            q.put(u)
          


def connectDB():
    global db
    DB=sqliteDB.sqlite_db(db)
    return DB
    
    
    

def GET_url():
    '''
    处理队列中的url（取得地址）
    
    '''
    
    DB=connectDB()
    urler = urlAdminer(DB)
    
    while 1:
         
        url = urlqueu.get()
                
        try:
            urlpath = hostParser(url)
            
            uh = urllib2.urlopen(url)
            html = uh.read()
            htmers = htmlparser.htmler(html=html)
            arr = htmers.getTags('a')
      
           
            j = 1
            for i in arr:
                try:                    
                    u = replairUrl(urlpath, i['href'])
                    pass
                                
                    urler.setUrls([u])
                                   
                except:
                    pass
        except  BaseException, e:
            DB.execsql("delete from urls where urls='%s'" % url)
            
            print "ERR0:%s %s" % (url,str(e))
            
            
            
def GET_art(dataers):
    while 1:
        
         url = url_article_quene.get()
         dataers.do_article(url)
         
     
 
if __name__ == '__main__':
    iniXml = ['blog\.sohu\.com', 'blog\.sina', 'blog\.163', 'hi\.baidu']
    ini = [{'list':'[\w\/]+\//(List_\d+\.html)?','dom':'^http\:\/\/\w*\.meishichina\.com','article':'\/(recipe-)?\d+\.html'}]
    
    urlcheck= dataChecker(ini ,iniXml)
    sqlquene = Queue.Queue()
    urlqueu = Queue.Queue()
    url_article_quene=Queue.Queue()
    theards = []
    db='data/db.s3db'
    
    #mydb1 = mysql.mysql(inc.dbIni, inc.cacheDir)


    mytags = tags.tags('data/dict')
    
    rss = readRss.readRss('item')

    for i in range(1,5):
        
        theards.append(threading.Thread(target=GET_art,args=(dataerContrl(mysql.mysql(inc.dbIni, inc.cacheDir),mytags,rss),)))


    
   
    
    #theards.append(threading.Thread(target=urlsreturn, args=('urls',urlqueu )))
    theards.append(threading.Thread(target=urlsreturn, args=('article',url_article_quene )))

   

    #theards.append(threading.Thread(target=GET_url))

    #theards.append(threading.Thread(target=sqler))

    for t  in theards:
        t.start()
        
    


    

        
        
    

    
    
    

