#! /usr/bin/python
#  -*- coding: utf-8 -*-

import time

import Queue
from dataer import *
from libs import sqliteDB
from libs.functions import *
import stackless
import threading
from urlLib import *
import urllib2


def sqler(sqlchannel):
    DB = connectDB()
    while sqlchannel.receive():       
        for i in range(0, sqlquene.qsize(), 1):
      
            sql = sqlquene.get()
            print sql
          
            try:
                DB.execsql(sql)
               
            except TypeError:
                print TypeError
        DB.commit()

def urlsreturn_urls():
    '''
    
    把待处理url 放入队列
    
    '''

    DB = connectDB()
    urler = urlAdminer(DB, urlcheck)



    while channel_art_s_url.receive():
        print '--'
        url = urler.getUrls('urls')
        urlqueu.put(url)
               
      
           
           
        channel_art_r_url.send('dd')
           
          


def connectDB():
    global db
    DB = sqliteDB.sqlite_db(db)
    return DB
    
    
    

def GET_url():
    '''
    处理队列中的url（取得地址）
    
    '''
    
    DB = connectDB()
    urler = urlAdminer(DB, urlcheck)

    
    while  channel_art_r_url.receive():

                        
        url = urlqueu.get()
        print url
        try:
            urlpath = hostParser(url)

            uh = urllib2.urlopen(url)
            html = uh.read()
            htmers = htmlparser.htmler(html=html)
            arr = htmers.getTags('a')
            j = 1
            for i in arr:
                try:
                    u = replairUrl(urlpath, i['href'])
                        

                    urler.setUrls([u])

                except:
                    pass
        except  BaseException, e:
            DB.execsql("delete from urls where urls='%s'" % url)


            print "ERR0:%s %s" % (url, str(e))
        stackless.schedule()
        channel_art_s_url.send('dd')

            
def urlsreturn_art():
    '''
    
    把待处理url 放入队列
    
    '''

    DB = connectDB()
    urler = urlAdminer(DB, urlcheck)



    while channel_art_s.receive():
        print '--'
        url = urler.getUrls('article')
        artqueu.put(url)
               
      
           
           
        channel_art_r.send('dd')            
            
def GET_art(dataers):

             
    while  channel_art_r.receive():
        url = artqueu.get()
        print url
        dataers.do_article(url)
        stackless.schedule()
        channel_art_s.send('dd')
         
         
         
         
         
     
 
if __name__ == '__main__':
    iniXml = ['blog\.sohu\.com', 'blog\.sina', 'blog\.163', 'hi\.baidu']
    ini = [{'list':'[\w\/]+\//(List_\d+\.html)?', 'dom':'^http\:\/\/\w*\.meishichina\.com', 'article':'\/(recipe-)?\d+\.html'}]

    urlcheck = dataChecker(ini, iniXml)
    channel_art_s = stackless.channel()
    channel_art_r = stackless.channel()
    channel_art_s_url = stackless.channel()
    channel_art_r_url = stackless.channel()
    channel_sql = stackless.channel()
    artqueu = Queue.Queue()
    urlqueu = Queue.Queue()
   
    theards = []
    db = 'data/db.s3db'
    
    mydb1 = mysql.mysql(inc.dbIni, inc.cacheDir)
  


    mytags = tags.tags('data/dict')
    
    rss = readRss.readRss('item')
    dataers = dataerContrl(mydb1, mytags, rss)
    #采集信息进程
    stackless.tasklet(urlsreturn_art)()
    stackless.tasklet(GET_art)(dataers)
    stackless.tasklet(channel_art_s.send)('startup')

    
    
    #采集url
    stackless.tasklet(urlsreturn_urls)()
    stackless.tasklet(GET_url)()

    stackless.tasklet(channel_art_s_url.send)('startup')
    
    stackless.run()
    
