#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from pyquery import PyQuery as pq
import jieba.analyse

url="http://blog.csdn.net/"
url_proxy="http://127.0.0.1:5000/get"


def get_proxy_ip():
    try:
        res=requests.get(url_proxy)
        if res.status_code==200:
            return res.text
        else:raise Exception("code!=200")
    except Exception as e:
        print(e.__cause__)
        print("try again")
        get_proxy_ip()




def get_index_html():
    try:
        res = proxyRequest(url)
        if res.status_code==200:
            return res.text
        else:
            raise Exception("code error")
    except Exception as e:
        print(e)
        return None


def proxyRequest(url):
    print(url)
    # proxies = get_proxy_ip()
    # print(proxies)
    # proxies = {'http': proxies}
    res = requests.get(url)
    return res


def get_context_list(html):
    doc=pq(html)
    items=doc("#feedlist_id li").items()

    for item in items:
        title=item.find(".title h2 a").text()
        type=item.find(".name > a").text()
        detail_url=item.find(".title h2 a").attr("href")
        read_num=item.find(".read_num > p.num").text()
        author=item.find(".name > a").text()
        if title =="" or detail_url=="":
            continue
        yield {
            "title":title,
            "type":type,
            "detail_url":detail_url,
            "read_num":read_num,
            "author":author
        }

def get_context_detail(url):
    try:
        res=proxyRequest(url)
        if res.status_code==200:
            return res.text
        else:
            return None
    except Exception as e:
        print(e)
        return None
html=get_index_html()

if html:
    details=get_context_list(html)
    for detail in details:
        detail_url=detail.get("detail_url")
        html=get_context_detail(detail_url)
        if html:
            detail["key_word"]=jieba.analyse.extract_tags(html,topK=5)
            print(detail)


