# -*- coding: utf-8 -*-

import random
import re
from urlparse import urljoin
import MySQLdb
import requests
import hashlib

from lxml import etree
import sys
reload(sys)
sys.setdefaultencoding('utf-8')


from gcpy_utils.proxy.adsl import get_proxy_list
import requests

# -*- coding: utf-8 -*-
def get_html():

    config = {"host": "192.168.14.90",
                    "port": 3306,
                    'user': 'root',
                    "passwd": '123456',
                    "db": 'zhp_blog',
                    "charset": "utf8"}


    for i in range(1,16):
        proxy_ips = get_proxy_list(dynamic=True)
        proxy_ip = proxy_ips[0]
        use_hea = [
            'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36',
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
            "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
            "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
            "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
            "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
            "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
            "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
        ]

        url = 'https://news.16888.com/gnxc/index_list_%s.html' % str(i)

        try:

            html = requests.get(url, headers=headers, proxies={"http": proxy_ip,
                                                                         "https": proxy_ip})
        except:
            html=get_html()
        html.encoding = 'utf-8'
        html = html.text

        dom = etree.HTML(html)
        urls = dom.xpath('//*[@class="news_list"]/ul/li/a[2]/@href')

        for new_url in urls:
            new_url = new_url

            try:
                html2 = requests.get(new_url,headers=headers,proxies={"http":proxy_ip,
                                             "https":proxy_ip})
            except:
                html2 = get_html()

            html2.encoding = 'utf-8'
            html2= html2.text

            dom2 = etree.HTML(html2)
            try:
                title = dom2.xpath('//*[@class="news_title"]/text()')[0]

                content = dom2.xpath('//div[@class="news_con_text clearfix"]')[0]
                content = etree.tostring(content, encoding="utf-8", method='html')
                pattern = re.compile(r'【车主之家 新闻】')
                content = pattern.sub('', content)
                # print content
            except:
                title = None
                content = None
                pass
            if title:
                source_url = new_url
                desc = content
                print desc
                md5 = hashlib.md5(source_url).hexdigest()[-8:]
                num = int(md5, 16) % 10
                con = MySQLdb.connect(**config)
                con.ping()
                cue = con.cursor()

                try:

                    if not cue.execute('select * from gc_info.info_%s where source_url = %s',
                                       (num, source_url)):
                        cue.execute(
                            "insert into gc_info.info_%s (title,source,source_url,`desc`,industry_id,cate1_id,cate2_id) values(%s,%s,%s,%s,%s,%s,%s)",
                            (num, title, source, source_url, desc, industry, cate1_id, cate2_id))

                    print title
                    print ("存储成功。。。", num, source_url)
                    # 提交sql语句
                    con.commit()
                    print 'insert ---------------------success'
                except Exception as e:
                    print e,"-----------2233"
                    con.rollback()
                else:
                    con.commit()
                cue.close()
                con.close()
if __name__ == '__main__':

    get_html()