import requests
import pymongo
from urllib.parse import urlencode
from requests.exceptions import ConnectionError
from pyquery import PyQuery as pq


client=pymongo.MongoClient("localhost")
db=client["weixin"]
baseUrl='http://weixin.sogou.com/weixin?'

headers={
    'Cookie': 'GOTO=Af12583;IPLOC=CN2102; SUID=0464DD311F13940A000000005A9C069E; usid=o58xdmuBiRglUGWy; SUV=007B896531DD64045A9C069F91803234; ABTEST=0|1520174756|v1; SNUID=BEDE648BB9BFDC2EFDE2A997BA18D654; weixinIndexVisited=1; sct=2; JSESSIONID=aaaN2SJxMmVnharAnsxhw; ppinf=5|1520174810|1521384410|dHJ1c3Q6MToxfGNsaWVudGlkOjQ6MjAxN3x1bmlxbmFtZTozMzolRTglQjclODMlNUIlRTUlOTglQkYlRTUlOTMlODglNUR8Y3J0OjEwOjE1MjAxNzQ4MTB8cmVmbmljazozMzolRTglQjclODMlNUIlRTUlOTglQkYlRTUlOTMlODglNUR8dXNlcmlkOjQ0Om85dDJsdU55T1RzcjExYVZMd241YnBYVmdCX0lAd2VpeGluLnNvaHUuY29tfA; pprdig=uFwZAgM50c8SXzWqz4Q5BWZQnPPGwtzmfuM2cBLQuVFEc3kl1PcZkJzQi2y91wfBb7KcYFaSjUaQTNs4dwSF9NMmWflL8FaUPmzJKhvxBCBFW8-ZbibFY_O_dYe6_kkVuVBJRsnE75VNm1Oh4m3GfdKZT4Nz_bSSgTS6kZIKq-g; sgid=14-33650361-AVqcBtoj87FNLicF2vs3DQqc; ppmdig=1520174811000000e177c9a657d65ab2d73727fb50ab528d',
    'Host': 'weixin.sogou.com',
    'Upgrade-Insecure-Requests': '1',
    'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36'
}

proxyPoolUrl='http://127.0.0.1:5000/get'

proxy=None

maxCount=5

def getProxy():
    try:
        reponse=requests.get(proxyPoolUrl)
        if reponse.status_code==200:
            return reponse.text
        return None
    except ConnectionError:
        return None

def getHtml(url,count=1):
    global proxy
    print('crawrl: ',url)
    print('count: ',count)
    if count >=maxCount:
        print('请求错误次数达到上限')
        return None
    try:
        if proxy:
            proxies={
                'http':'http://'+proxy
            }
            response=requests.get(url,allow_redirects=False,headers=headers,proxies=proxies)
        else:
            response=requests.get(url,allow_redirects=False,headers=headers)
        print(response.status_code)
        if response.status_code==200:
            print('text: ',11)
            doc=pq(response.text)
            con=doc('h2').text()
            if '无效' in con:
                count+=1
                return getHtml(url,count)
            return response.text
        if response.status_code!=200:
            # need proxy
            proxy=getProxy()
            if proxy:
                print('Using proxy',proxy)
                count+=1
                return getHtml(url,count)
            else:
                print('获取代理失败')
                return None
            
    except ConnectionError:
        print('error ')
        proxy=getProxy()
        count+=1
        return getHtml(url,count)

def getIndex(keyword,page):
    data={
        'query':keyword,
        'type':2,
        'page':page,
        'ie':'utf8'
    }
    queries=urlencode(data)
    url=baseUrl+queries
    html=getHtml(url)
    print("html: ",html)
    return html
    
def parseIndex(html):
    doc=pq(html)
    items=doc('.news-box .news-list li .txt-box h3 a').items()
    for item in items:
        yield item.attr('href')

def getDetail(url):
    try:
        response=requests.get(url)
        if response.status_code==200:
            return response.text
        return None
    except ConnectionError:
        return None


def paresDeatil(html):
    doc=pq(html)
    title=doc('.rich_media_title').text()
    content=doc('.rich_media_content').text()
    date=doc('#post-date').text()
    nickName=doc('#js_profile_qrcode > div > strong').text()
    wechat=doc('#js_profile_qrcode > div > p:nth-child(3) > span').text()
    return {
        'title':title,
        'content':content,
        'date':date,
        'nickName':nickName,
        'wechat':wechat
    }

def saveToMongo(data):
    if db['articles'].update({'title':data['title']},{'$set':data},True):
        print('保存成功 ',data['title'])
    else:
        print('保存失败',data['title'])

def main():
    for page in range(1,101):
        html=getIndex('风景',page)
        if html:
            articleUrls=parseIndex(html)
            for articleUrl in articleUrls:
                article=getDetail(articleUrl)
                if article:
                    articleData=paresDeatil(article)
                    saveToMongo(articleData)



if __name__=='__main__':
    main()
