https://www.ryjiaoyu.com/book
import requests
from bs4 import BeautifulSoup
import csv
# import pandas as pd
from urllib.parse import urljoin
import pymysql

def get_html(url):
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Referer': 'https://www.baidu.com/',
        'Connection': 'keep-alive'}
    try:
        r = requests.get(url=url,headers=headers)
        r.encoding = r.apparent_encoding
        r.raise_for_status()
        return r.text
    except Exception as e:
        print(e)

def parser(html):
    soup = BeautifulSoup(html,'lxml')
    out_list = []
    for row in soup.select('#tab-book > div.col-md-8.col-sm-8.main > div.g-main > div > ul > li'):
        name = row.select('div.book-info > h4 > a')[0].text.strip()
        author = row.select('div.book-info > div > span')[0].text.strip()
        price = row.select('div.book-info > span > span')[0].text.strip()
        qianban = 'https://www.ryjiaoyu.com/'
        href = row.select('div.book-img > a')[0].attrs['href']
        full_url = urljoin(qianban,href)
        row_data = [name,author,price,full_url]
        out_list.append(row_data)
    return out_list

def save_mysql(sql,**dbdata):
    try:
        connect =pymysql.connect(**dbdata)
        cursor = connect.cursor()
        cursor.execute(sql)
        print(cursor.fetchall())
        connect.commit()
    except Exception as err:
        connect.close()
        print(err)
    finally:
        cursor.close()
        connect.close()

if __name__ == '__main__':
    url = 'https://www.ryjiaoyu.com/book'
    html = get_html(url)
    out_list = parser(html)
    data = {'host':'127.0.0.1',
            'user':'root',
            'password':'root',
            'db':'spider',
            'charset':'utf8',
            'cursorclass':pymysql.cursors.DictCursor
            }
    sql = 'select * from bookinfo1'
    save_mysql(sql,**data)