from bs4 import BeautifulSoup
import requests
import json

base_url = 'https://www.azone-int.co.jp/azonet/'
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36'
}

all_datas = []


def start():
    # 发起调用
    response = requests.get(base_url, headers=headers)
    soup = BeautifulSoup(response.content, 'html.parser')
    list_a = soup.find('div', class_='col-xs-3').find('div', class_='side-menu').findAll('div')
    for a in list_a:
        item = a.find('a')
        if item != None:
            parseCategory(item['href'])
    writeToFile(toJson(all_datas), 'data')


def parseCategory(url):
    response = requests.get(url, headers=headers)
    soup = BeautifulSoup(response.content, 'html.parser')
    list = soup.findAll('div', class_='row category-list-item')
    for item in list:
        a = item.find('p', class_='text-bold').find('a')
        parseDetails(a['href'])


def parseDetails(url):
    goods_data = {}
    response = requests.get(url, headers=headers)
    soup = BeautifulSoup(response.content, 'html.parser')
    # 名称
    name = soup.find('div', class_='col-xs-9').find('h1').text
    goods_data['name'] = name
    # 图片
    images_span = soup.findAll('span', class_='thumbnail')
    images = []
    for image in images_span:
        images.append(image.find('img')['src'])
    goods_data['images'] = images
    # 价格
    price_div = soup.find('div', class_='col-xs-7 item-price')
    price = price_div.find('span', class_='text-xl text-red').text
    goods_data['price'] = price
    # 价格含税
    price_1 = price_div.find('span', class_='inline-block').text
    goods_data['price1'] = price_1
    # 商品番号
    tables = soup.find('div', class_='col-xs-12 item-detail').findAll('div', class_='table-row')
    for table in tables:
        spans = table.findAll('span')
        for i in range(len(spans)):
            if spans[i].text.strip() == '【商品番号】':  # 商品番号
                goods_data['goodsNumber'] = spans[i + 1].text
            if spans[i].text.strip() == '【カラー】':  # 彩色
                goods_data['color'] = spans[i + 1].text
            if spans[i].text.strip() == '【サイズ】':  # 比例
                goods_data['size'] = spans[i + 1].text
            if spans[i].text.strip() == '【発売予定】':  # 发售预定时间
                goods_data['subscribeAt'] = spans[i + 1].text
            if spans[i].text.strip() == '【メーカー】':  # 制造商
                goods_data['makeBy'] = spans[i + 1].text
    try:
        brs = soup.find('div', class_='col-xs-12 text-s item-detail-comment').findAll('br')
        introduction = ''
        for br in brs:
            if br.text.strip() != '':
                introduction += br.text.strip()
        goods_data['introduction'] = introduction
    except Exception as e:
        pass
    print(goods_data)
    all_datas.append(goods_data)


def toJson(obj):
    return json.dumps(obj)


def writeToFile(data, fileName):
    with open(f'{fileName}.json', 'a') as file:
        file.write(data)


start()
