import requests
from pyquery import PyQuery as pq
import pymysql
import os
import logging
import warnings

# log
logging.basicConfig(level="DEBUG")
log = logging.getLogger('糗百爬虫')

# 数据库配置
MYSQL_HOST = os.getenv('MYSQL_HOST', 'localhost')
MYSQL_USER = os.getenv('MYSQL_USER', 'root')
MYSQL_PASSWORD = os.getenv('MYSQL_PASSWORD', '1234')
MYSQL_DATABASE = os.getenv('MYSQL_DATABASE', 'qiubai')
MYSQL_PORT = os.getenv('MYSQL_PORT', 3306)

# 糗事百科爬虫
def qiubaispider(url= 'https://www.qiushibaike.com/textnew/'):
    # 指定请求的浏览器标志
    headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'}
    r = requests.get(url, headers= headers)
    
    if r.status_code == 200:
        log.info(" 成功获取 {} ".format(url))
        doc = pq(r.content)
        articles = []
        for content in doc('#content-left  .contentHerf').items():
            article = {
                "id" : content.attr['href'][9:],
                "content" : content.text().strip()
            }
            articles.append(article)

        log.info("成功爬取 {} 个笑话  ".format( len(articles ) ) )
        return articles
    
    return False

# 保存糗事百科笑话到数据库
def savearticles(articles):
    db = pymysql.connect( host=MYSQL_HOST, user=MYSQL_USER, password=MYSQL_PASSWORD,
        port=MYSQL_PORT, charset='utf8', )

    try:
        with warnings.catch_warnings():
            # 忽略数据库产生的警告信息
            warnings.simplefilter("ignore")

            with db.cursor() as cursor:
                # 初始化建数据库
                sql = "create database if not exists {} character set utf8 collate utf8_general_ci".format(MYSQL_DATABASE)
                cursor.execute(sql)
                
                # 选择数据库
                sql = "use {}".format(MYSQL_DATABASE)
                cursor.execute(sql)

                # 建表
                sql = "create table if not exists xiaohua(id int not null, content text default null,primary key(id) )  default charset=utf8 "
                cursor.execute(sql)
                
                # 保存笑话
                sql = "insert ignore into xiaohua(id, content) values(%s, %s) "
                for article in articles:
                    cursor.execute(sql, (article['id'], article['content']) )
                    log.debug("笑话 {} 保存成功 {}".format(article['id'], article['content'][:20]))
                    
        db.commit()
    finally:
        db.close()

import time    
if __name__ == '__main__':
    # 获取糗百笑话
    articles = qiubaispider()
    if articles != False:
        # 保存到数据库
        savearticles(articles)

    # 再次爬取等待时间
    time.sleep( int(os.getenv('QIUBAI_WAIT', '10')) )