# -*- coding: utf-8 -*-
"""
Created on 15-12-24 上午10:24
@file: baidurank.py
@author: Liangrong Li
@contact: liliangrong0@126.com
# 增加过滤百度自身产品后的排名
"""

import urllib
import urllib2
import re
import random


# 抓取baidu搜索结果页
class BaiduRank:
    def __init__(self, keywords, site_url):
        self.rank = 0
        self.baidu_num = 0
        self.baidu_url = "baidu.com"
        self.page = 1
        self.site_url = site_url
        self.keywords = keywords
        self.site_url = site_url
        self.user_agent = ['Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999',
                           'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; The World)',
                           'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)',
                           'Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+',
                           'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)',
                           'Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0',
                           'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
                           'Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13',
                           'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
                           'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
                           'Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124',
                           'MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
                           'NOKIA5700/ UCWEB7.0.2.37/28/999',
                           'Openwave/ UCWEB7.0.2.37/28/999',
                           'Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10',
                           'UCWEB7.0.2.37/28/999',
                           'User-Agent:Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',
                           'User-Agent:Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',
                           'User-Agent:Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;',
                           'User-Agent:Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
                           'User-Agent:Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
                           'User-Agent:Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
                           'User-Agent:Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
                           'User-Agent:Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
                           'User-Agent:Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
                           'User-Agent:Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11',
                           'User-Agent:Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11']
        self.ip = [
            'www.baidu.com',
            '58.217.200.13',
            '58.217.200.15',
            '58.217.200.37',
            '58.217.200.39',
            '61.135.185.31',
            '61.135.185.32',
            '61.135.169.103',
            '61.135.169.107',
            '61.135.169.113',
            '61.135.169.114',
            '61.135.169.115',
            '61.135.169.121',
            '61.135.169.125',
            '111.13.12.139',
            '111.13.12.142',
            '111.13.100.91',
            '111.13.100.92',
            '115.239.210.25',
            '115.239.210.26',
            '115.239.210.27',
            '115.239.210.28',
            '115.239.211.109',
            '115.239.211.112',
            '115.239.211.113',
            '115.239.211.114',
            '119.75.213.50',
            '119.75.213.51',
            '119.75.213.61',
            '119.75.216.20',
            '119.75.217.11',
            '119.75.217.26',
            '119.75.217.56',
            '119.75.217.63',
            '119.75.217.109',
            '119.75.218.11',
            '119.75.218.45',
            '119.75.218.70',
            '119.75.218.77',
            '119.75.218.143',
            '123.125.114.107',
            '123.125.114.220',
            '123.125.114.238',
            '123.125.115.140',
            '123.125.115.165',
            '123.125.65.78',
            '123.125.65.82',
            '123.125.65.88',
            '123.125.65.90',
            '180.149.131.98',
            '180.149.132.151',
            '180.149.132.166',
            '180.149.132.168',
            '180.97.33.67',
            '180.97.33.71',
            '180.97.33.107',
            '180.97.33.108',
            '202.108.22.5',
            '202.108.22.142',
            '220.181.111.111',
            '220.181.111.149',
            '220.181.111.188',
            '220.181.111.22',
            '220.181.111.37',
            '220.181.111.83',
            '220.181.112.12',
            '220.181.112.18',
            '220.181.112.147',
            '220.181.112.195',
            '220.181.112.21',
            '220.181.112.244',
            '220.181.112.76',
            '220.181.112.89',
            '220.181.37.55',
        ]

    def set_url(self, keywords):
        keywords = keywords.strip(' ')  # 去处关键词的空格
        value = {"q1": keywords.encode('utf-8', 'ignore')}  # 以 encoding 指定的编码格式编码字符串
        kw_data = urllib.urlencode(value)  # 生成URL格式字符串
        domain = random.choice(self.ip)     # 随机使用百度代理IP
        search_url = "http://%s/s?%s&q2=&q3=&q4=&rn=50&lm=0&ct=0&ft=&q5=&q6=&tn=baiduadv" % (domain, kw_data)
        # print search_url
        return search_url

    # 获取页面html
    def get_html(self, keywords):
        # 随机选择User-Agent
        """
        :param search_url:
        :param callback:
        :rtype: object
        """
        header = {
            'User-Agent': random.choice(self.user_agent)
        }
        search_url = self.set_url(keywords)
        try:
            req = urllib2.Request(search_url, data=None, headers=header)
            # 增加请求超时处理，超时自动退出，然后抛出异常
            resp = urllib2.urlopen(req, timeout=5)
            source_code = resp.read()
            if not source_code:
                print u"加载页面失败……"
                return None

            # 获取排序num和domain
            pattern1 = re.compile(r'<div .*? id="(\d{1,2})" .*?>.*?<.*? class="c-showurl".*?>(.*?)</[^b].*?>.*?</div>',
                                  re.S)
            data = re.findall(pattern1, source_code)

            # 获取下一页URL
            pattern = re.compile(
                r'<div id="page" >.*?<a .*?><span class="pc">10</span></a>.*?<a href="(.*?)" class="n">.*?</a></div>')
            next_item = re.search(pattern, source_code)
            if next_item:
                # print next_item.group(1)
                next_url = "http://www.baidu.com" + next_item.group(1)
            else:
                print u"获取下一页链接失败"
            return data, next_url
        except Exception, e:
            print e

    # 解析页面html
    def parse_html(self, keywords):
        data, next_url = self.get_html(keywords)

        # 查询排名

        for num, domain in data:
            # 过滤的抓取的domain中的加粗项
            filter_domain = re.sub(r'(<b>|</b>)', '', domain)
            # 查询有都少个百度产品的排名
            if self.baidu_url in filter_domain:
                self.baidu_num += 1
            # 查询指定网站的关键词排名
            if self.site_url in filter_domain:
                self.rank = num
                break
            else:
                self.rank = 0
                continue

                # g_rank = self.rank - self.baidu_num

        return self.rank, self.baidu_num, next_url

    # 检查排名
    def check_rank(self, keywords):
        rank, baidu_num, next_url = self.parse_html(keywords)
        g_rank = int(rank) - int(baidu_num)
        if rank == 0:  # 如果不在页面中
            # print u"在前50位中未查询到结果"
            # print u"%s - %s ： 不在前50位中" % (site_url, keywords)
            time.sleep(2)
            while self.page <= 2:
                rank, baidu_num, next_url = self.parse_html(next_url)
                g_rank = int(rank) - int(baidu_num)
                # rank -= int(1)
                if rank > 0:
                    print u"%s - %s ： %s | %s 位" % (site_url, keywords, rank, g_rank)
                    break
                else:
                    print u"%s - %s ： 不在前%s位中" % (site_url, keywords, int(self.page * 50))
                self.page += 1
                # if rank == 0:
                #     print u"网站【%s】的关键词【%s】不在百度前100位中 " % (site_url, keywords)
                # print self.page
        else:
            print u"%s - %s ： %s | %s 位" % (site_url, keywords, rank, g_rank)
            # return rank

if __name__ == '__main__':
    import time

    start = time.clock()

    site_url = 'kumhosunny.com'
    # u""把字符转换成UTF-8格式
    keywords_list = [
		u'mybatis'
		u'soa架构'
		u'元数据'
		u'ad域'
		u'空间数据库'
		u'灾备'
		u'元数据管理'
		u'webservice框架'
		u'triggerevent'
		u'集中部署'
		u'soa应用平台'
		u'协同软件 典型案例'
		u'soa软件'
		u'信息技术解决方案'
		u'webservice安全'
		u'开放info共享平台'
		u'soa成功案例'
		u'flushprivileges'
		u'验证安装文件签名失败'
		u'soa应用案例'
        # u'轿车车灯',    #排名在50-100位之间，用于测试
    ]
    # 循环查询关键词
    for k in keywords_list:

        # 调用模块查询关键词排名
        spider = BaiduRank(k, site_url)
        spider.check_rank(k)
        # 延时1s查询，以免被百度屏蔽
        time.sleep(2)

    end = time.clock()
    print "run time is %f s" % (end - start)
