# encoding: utf-8
"""
@author: 夏洛
@QQ: 1972386194
@file: spider.py
"""

# encoding: utf-8
"""
@author: 夏洛
@QQ: 1972386194
@file: crawl.py
"""

import traceback
import pymysql
import requests

def get_conn():
    conn = pymysql.connect(host='127.0.0.1',port=3306,user='root',password='',db='x_xls',charset='utf8')
    course =conn.cursor()
    return conn,course

def close_conn(conn, cursor):
    if cursor:
        cursor.close()
    if conn:
        conn.close()

url = 'https://www.runoob.com/python3/python3-interpreter.html'
headers = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36'
}
from lxml import etree
from pyquery import PyQuery as pq

def crawl():
    res = requests.get(url,headers=headers)
    html = etree.HTML(res.text)
    maps = lambda  x: x[0] if x else ''
    for i in html.xpath('//div[@id="leftcolumn"]/a'):
        src = maps(i.xpath('./@href'))
        if not src.startswith('/'):
            src = '/python3/' + src
        title = maps(i.xpath('./@title'))
        c_url = 'https://www.runoob.com' + src
        res1 = requests.get(c_url,headers=headers)
        content = pq(res1.text)
        body = content('.article-intro')

        print('*'*50)
        # print(c_url)
        # print(body)
        save(title,body)

def save(title,body):
    '''保存模块 '''
    try:
        import datetime
        item = (title,body,0)
        print(item)
        conn, cursor = get_conn()
        sql = """insert into tb_cat (title,`desc`,`read`) values (%s,%s,%s)"""
        # for i in item:
        cursor.execute(sql,item)
        conn.commit()
    except:
        traceback.print_exc()
    finally:
        close_conn(conn, cursor)

if __name__ == '__main__':
    crawl()