# -*- coding:utf-8 -*-
import re

from concurrent.futures import ThreadPoolExecutor as Pool

import requests, pymysql
from requests.exceptions import ConnectionError


class Spider():
    """国内网站爬虫"""

    def __init__(self):
        self.base_url = 'http://neuro.dxy.cn/tag/news/p-'
        self.headers = {
            'user-agent': 'Mozilla / 5.0(X11;Linuxx86_64) AppleWebKit / 537.36(KHTML, likeGecko) Chrome / 68.0.3440.75Safari / 537.36'
        }
        self.conn = pymysql.connect(host="localhost", user="root", password="123456", db="little_pig", port=3306,
                                    charset='utf8')
        self.cursor = self.conn.cursor()

    def get_pages(self, num):
        url = self.base_url + str(num)
        try:
            response = requests.get(url, headers=self.headers)
            if response.status_code != 200:
                return 404
            elif response.status_code == 200:
                print('正在爬取url:' + url)
                self.parse_pages(response.text)
            else:
                self.get_pages(num)
        except ConnectionError:
            self.get_pages(num)

    def parse_pages(self, data):
        items = re.findall(
            r'<dl class="x_box12">.*?src="(.*?)".*?class.*?<span class="fr">(.*?)</span>.*?href="(.*?)" title="(.*?)".*?<p class="summary">(.*?)</p>',
            data, re.S)
        self.save_pages(items)

    def save_pages(self, items):
        sql = 'insert into chinese_news values ("{}", "{}", "{}", "{}", "{}")'
        for item in items:
            if len(item[4]) >= 50:
                desc = item[4][:50] + '...'
            else:
                desc = item[4]
            try:
                sqls = sql.format(item[3], item[0], item[2], item[1], desc)
                self.cursor.execute(sqls)
                self.conn.commit()
            except Exception:
                self.conn.rollback()

    def run(self):
        # nums = [x for x in range(1, 11)]
        # pool = Pool()
        # pool.map(self.get_pages, nums)
        for i in range(1, 11):
            self.get_pages(i)


if __name__ == '__main__':
    spider = Spider()
    spider.run()


