# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import random
import re
from urlparse import urljoin
import MySQLdb
import requests
import hashlib

from lxml import etree
import sys
import time
reload(sys)
sys.setdefaultencoding('utf-8')

from gcpy_utils.proxy.adsl import get_proxy_list
import requests

def get_html():
    config = {"host": "80.240.20.67",
              "port": 3306,
              'user': 'root',
              "passwd": '123456',
              "db": 'zhp_blog',
              "charset": "utf8"}
    category_id = 7
    add_time = int(time.time())
    edit_time = int(time.time())
    edit_user_name = '苹果'
    count = 0
    urls = [('http://news.foodmate.net/guonei/', 3978), ('http://news.foodmate.net/quanwei/', 342),
            ('http://news.foodmate.net/guoji/', 483),
            ('http://news.foodmate.net/yujing/', 343), ('http://news.foodmate.net/keji/', 144),
            ('http://news.foodmate.net/wangyou/', 166), ('http://news.foodmate.net/qiye/', 940)]

    for url, pg in urls:
        new_url = url
        pages = pg
        for i in range(pages):  # 页数
            new_url = url + 'list_%s.html' % str(i)

            proxy_ips = get_proxy_list(dynamic=True)
            proxy_ip = proxy_ips[0]
            headers = {

                'Host': 'news.foodmate.net',

                'Connection': 'keep-alive ',

                'Cache-Control': 'max-age=0',

                'Upgrade-Insecure-Requests': '1',

                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',

                'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',

                'Referer': 'http://news.foodmate.net/from_270_2.html',

                'Accept-Encoding': 'gzip, deflate',

                'Accept-Language': 'zh-CN,zh;q=0.9'}
            cookie = {
                '__gads=ID': '9957a70f15584486:',
                'T': '1537238907',
                'S': 'ALNI_Mb-Veckucyzg-QYDLarOBiapu8llg',
                'bc08_f0d8_saltkey': 'eW61r6A4',
                'bc08_f0d8_lastvisit': '1537328675',
                'Hm_lvt_2aeaa32e7cee3cfa6e2848083235da9f': '1537238907,1537532474',
                '__51cke__': '',
                'yunsuo_session_verify': '5d6d81358f6b675d57b2090ca8e2289c',
                'Hm_lpvt_2aeaa32e7cee3cfa6e2848083235da9f': '1537840654',
                '__tins__1636283': '%7B%22sid%22%3A%201537840642542%2C%20%22vd%22%3A%202%2C%20%22expires%22%3A%201537842453733%7D',
                '__51laig__': '15'
            }
            try:
                html = requests.get(new_url,cookies=cookie,headers=headers,proxies={"http":proxy_ip,
                                                 "https":proxy_ip})
            except:
                html=get_html()
            html.encoding = 'utf-8'
            html = html.text

            dom = etree.HTML(html)
            urls = dom.xpath('//*[@class="catlist"]/ul/li/a[1]/@href')
            for i in urls:
                new_url = i
                try:
                    html2 = requests.get(new_url,cookies=cookie,headers=headers,proxies={"http":proxy_ip,
                                                 "https":proxy_ip})
                except:
                    html2 = get_html()
                html2.encoding = 'utf-8'
                html2 = html2.text

                dom2 = etree.HTML(html2)
                try:
                    title = dom2.xpath('//h1[@id="title"]/text()')[0]

                    text = dom2.xpath('//*[@id="article"]')[0]
                    text = etree.tostring(text, encoding="utf-8", method='html')

                    pattern = re.compile(r'<strong>.*?</strong>')
                    content = pattern.sub('', text)

                    pattern = re.compile(r'"http.*?class="lazy" original=')
                    content = pattern.sub('', content)

                    pattern = re.compile(r'本文由食品伙伴网.*?@foodmate.net')
                    content = pattern.sub('', content)

                except:
                    title = None
                    content = None
                    pass
                if title:
                    detail = content
                    # print detail

                    con = MySQLdb.connect(**config)
                    con.ping()
                    cue = con.cursor()
                    try:
                        cue.execute(
                            "insert into zhp_blog.article(title,category_id,add_time,edit_time,edit_user_name,`detail`,count) values(%s,%s,%s,%s,%s,%s,%s)",
                            (title, category_id, add_time, edit_time, edit_user_name, str(detail), count))

                        print title
                        print ("存储成功。。。",)
                        con.commit()
                        print 'insert ---------------------success'
                    except Exception as e:
                        print e, "-----------2233"
                        con.rollback()
                    else:
                        con.commit()
                    cue.close()
                    con.close()
if __name__ == '__main__':

    get_html()