import requests
from bs4 import BeautifulSoup
import csv
def get_html(url):
    head = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36"}
    try:
        r = requests.get(url=url,headers=head)
        r.encoding = r.apparent_encoding
        r.raise_for_status()
        return r.text
    except Exception as e:
        print(e)
def parser(html):
    soup = BeautifulSoup(html,'lxml')
    out_list = []
    for a in soup.select('li[class="row-3"] h1'):
        row_data1 = [a.text]
        out_list.append(row_data1)
    for b in soup.select('li[class="row-6"] h1'):
        row_data2 = [b.text]
        out_list.append(row_data2)
    return out_list
def save_csv(path, shuju):
    with open(path, 'w+', newline='', encoding='utf-8') as f:
        csv_write = csv.writer(f)
        csv_write.writerow(shuju)
if __name__ == '__main__':
    url = 'https://www.mxbc.com/product'
    yuanma = get_html(url)
    out_list = parser(yuanma)
    path = 'mxbc.csv'
    save_csv(path, out_list)