from BeautifulSoup import BeautifulSoup as bs
from apps.wuliu.models import *
from datetime import datetime as dt
from libs.kronos import *
from pyquery import PyQuery as pq
import mechanize
import urllib


urlread = lambda url: urllib.urlopen(url).read()

def scan_baidu_interval():
    s = ThreadedScheduler()
    s.add_interval_task(scan_baidu, "test1", 0, 4, method.threaded, ["task 1"], None)
    s.start()
    
    print '#' * 40
    add_interval_task(action=scan_baidu, taskname="test1", interval=30 * 60 * 60, initialdelay=0)

def scan_baidu():
    keywords = Keyword.find_all()
    domains = Domain.find_all()
    br = mechanize.Browser()
    for i in keywords:
        #c1 = open('crawler/baidu.htm', 'rb').read().decode('gb18030')
        c1 = br.open('http://www.baidu.com/s?%s' % urllib.urlencode({'wd': unicode.encode(i.name, 'gbk')})).read().decode('gb18030')
        soup = bs(c1).prettify()
        d = pq(soup)
        top_list, normal_list, right_list = [], [], []
        for j in d('.EC_mr15'):
            if not pq(j).attr('id'):
                _d = pq(j).find('.EC_PP').eq(0)
                top_list.append(_d('a').children('font').eq(1).text().lower())
        for j in d('#ec_im_container .EC_PP'):
            _d = pq(j)
            normal_list.append(_d('a').eq(1).children('font').eq(1).text().lower())
        for j in d('.ec_pp_f .EC_PP .m'):
            _d = pq(j)
            right_list.append(_d.parent().prev().text().split(' ')[0].lower())
        for j in domains:
            flag = False
            for index, name in enumerate(top_list):
                if name.find(j.name.lower()) >= 0:
                    flag = True
                    KeywordRank.save(** dict(keyword_id=i.id, domain_id=j.id, rank=index + 1, html=c1, location='top'))
            for index, name in enumerate(normal_list):
                if name.find(j.name.lower()) >= 0:
                    flag = True
                    KeywordRank.save(** dict(keyword_id=i.id, domain_id=j.id, rank=index + 1, html=c1, location='normal'))
            for index, name in enumerate(right_list):
                if name.find(j.name.lower()) >= 0:
                    flag = True
                    KeywordRank.save(** dict(keyword_id=i.id, domain_id=j.id, rank=index + 1, html=c1, location='right'))
            if not flag:
                KeywordRank.save(** dict(keyword_id=i.id, domain_id=j.id, html=c1))
            print '%s-save domain: %s' % (dt.now(), j.name)
