#!/usr/bin/python3
# -*- coding: utf-8 -*-
import configparser
import datetime
import random
import sys
import time

import pymysql
import redis
import requests
from bs4 import BeautifulSoup

# 获取配置
cfg = configparser.ConfigParser()
cfg.read("config.ini")


class NgaSpider:
    def __init__(self, page, max_page):
        self.page = page
        self.max_page = max_page
        self.base_url = "https://bbs.nga.cn/thread.php?fid=-7&page={}"
        self.headers = {
            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) "
                          "Chrome/70.0.3538.110 Safari/537.36 "
        }
        self.cookie = "taihe_bi_sdk_uid=1b7956ba96a3d5712889f88af9b1984d; " \
                      "UM_distinctid=1780f64d40d13e-038eb18ccee1ed-53e356a-1fa400-1780f64d40e95d; " \
                      "taihe=4dbdbd86986cdf4d06a583c18790ee4c; " \
                      "UM_distinctid=1787cc30e0635a-07f220fb9b26cd-53e356a-1fa400-1787cc30e07cc6; " \
                      "ngaPassportUid=60556366; ngaPassportUrlencodedUname=daixu_y; " \
                      "ngaPassportCid=X94e27dsvr29diljh3k77kss3l4b1e3bptarldfd; " \
                      "taihe_bi_sdk_session=b40c2bc8c247b16cf9d91343c309a6a1; " \
                      "CNZZDATA30043604=cnzz_eid%3D1785528018-1608790865-https%253A%252F%252Fbbs.nga.cn%252F%26ntime" \
                      "%3D1617152497; bbsmisccookies=%7B%22pv_count_for_insad%22%3A%7B0%3A-34%2C1%3A1617210004%7D%2C" \
                      "%22insad_views%22%3A%7B0%3A1%2C1%3A1617210004%7D%2C%22uisetting%22%3A%7B0%3A%22b%22%2C1" \
                      "%3A1617154487%7D%7D; _cnzz_CV30043604=forum%7Cfid-343809%7C0; " \
                      "lastpath=/thread.php?fid=-7&page=1&lite=js&noprefix; lastvisit=1617175961; " \
                      "ngacn0comUserInfo=daixu_y%09daixu_y%0939%0939%09%0911%09116400%094%090%090%0961_47%2C53_30; " \
                      "ngacn0comUserInfoCheck=cd368a2b4e8987f3192b75b56c363c30; ngacn0comInfoCheckTime=1617175961 "

        # 获取配置
        self.config = cfg

        # 初始化redis连接
        try:
            redis_host = self.config.get("redis", "host")
            redis_port = self.config.get("redis", "port")
            self.redis_con = redis.Redis(host=redis_host, port=redis_port, db=0)
            # 刷新redis库
            # self.redis_con.flushdb()
            print('redis_host', redis_host)
            print('redis_port', redis_port)
        except Exception as err:
            print("请安装redis或检查redis连接配置", err)
            sys.exit()

        # 初始化数据库连接
        try:
            db_host = self.config.get("db", "host")
            db_port = int(self.config.get("db", "port"))
            db_user = self.config.get("db", "user")
            db_pass = self.config.get("db", "password")
            db_db = self.config.get("db", "db")
            db_charset = self.config.get("db", "charset")
            self.db = pymysql.connect(host=db_host, port=db_port, user=db_user, passwd=db_pass, db=db_db,
                                      charset=db_charset)
            self.db_cursor = self.db.cursor()
            print('db_host', db_host)
        except Exception as err:
            print("请检查数据库配置", err)
            sys.exit()

    def get_url_list(self):
        url_list = []

        for pn in range(1, self.max_page, 1):
            url = self.base_url.format(pn)
            url_list.append(url)
        return url_list

    def get_content(self, url):
        cookie_dict = {i.split("=")[0]: i.split("=")[-1] for i in self.cookie.split("; ")}
        response = requests.get(url=url, headers=self.headers, cookies=cookie_dict)
        return response.content

    def add_item_url(self, item_url, item):
        if not self.redis_con.hexists('item_url', item_url):
            item_replies = item.find(class_='c1').find(class_='replies').string
            item_name = item.find(class_='topic').string
            item_author = item.find(class_='author').string
            self.redis_con.hset('item_url', item_url, item_name)
            self.add_item_db(item_name, item_url, item_replies, item_author)
            if item.find(class_='titleadd2') is not None:
                if item.find(class_='titleadd2').find(class_='silver') is not None:
                    item_label = item.find(class_='titleadd2').find(class_='silver').string
                    print(item_replies, item_name, item_url, item_label, item_author)
                else:
                    print(item_replies, item_name, item_url, item_author)
            else:
                print(item_replies, item_name, item_url, item_author)
        else:
            print(item_url, '已存在')

    def add_item_db(self, item_name, item_url, item_replies, item_author):
        # SQL 插入语句
        sql = 'INSERT INTO nga_list (title, link, replies, author) VALUES (%s, %s, %s, %s)'
        try:
            # 执行sql语句
            self.db_cursor.execute(sql, (item_name, item_url, item_replies, item_author))
            # 提交到数据库执行
            self.db.commit()
        except Exception as err:
            # 如果发生错误则回滚
            print('err', err)
            self.db.rollback()

    def get_items(self, content):
        html = str(content, 'gb18030')
        soup = BeautifulSoup(html, 'lxml')
        item_list = soup.findAll('tbody')
        for item in item_list:
            item_url = 'https://bbs.nga.cn/' + item.find(class_='c1').find('a').get('href')
            self.add_item_url(item_url, item)

    def run(self):
        # 1. 获取 url 列表
        url_list = self.get_url_list()

        for url in url_list:
            # 2. 发送请求获取响应
            content = self.get_content(url)

            time_stamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
            print('time_stamp', time_stamp)
            # 3. 从响应中提取数据
            self.get_items(content)
            time.sleep(random.randint(3, 5))

        # 关闭数据库连接
        self.db.close()


if __name__ == '__main__':
    spider = NgaSpider(1, 10)
    spider.run()
