import csv

import requests
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np


class Display:
    def __init__(self, Name, ID, PID, Platform, Price, Shape, Size, Resolution, RefreshRate, Panel, ColorGamut,
                 Curvature):
        self.Name = Name
        self.ID = ID
        self.PID = PID
        self.Platform = Platform
        self.Price = Price
        self.Shape = Shape
        self.Size = Size
        self.Resolution = Resolution
        self.RefreshRate = RefreshRate
        self.Panel = Panel
        self.ColorGamut = ColorGamut
        self.Curvature = Curvature

    def tranTo(self, begin, after):
        if begin == '类型':
            self.Platform = after
        elif begin == '屏幕曲率':
            self.Shape = after
        elif begin == '屏幕尺寸':
            self.Size = after
        elif begin == '最佳分辨率':
            self.Resolution = after
        elif begin == '刷新率':
            self.RefreshRate = after
        elif begin == '面板类型':
            self.Panel = after
        elif begin == '色域':
            self.ColorGamut = after
        elif begin == '屏幕曲率':
            self.Curvature = after


def get_html(url):
    # 模拟浏览器访问
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
                      'AppleWebKit/537.36 (KHTML, like Gecko) '
                      'Chrome/108.0.0.0 Safari/537.36 Edg/108.0.1462.54',
        'accept-language': 'zh-CN,zh;q=0.9'
    }
    print("--> 正在获取网站信息")
    response = requests.get(url, headers=headers)  # 请求访问网站
    if response.status_code == 200:
        html = response.text  # 获取网页源码
        return html  # 返回网页源码
    else:
        print("获取网站信息失败！")


if __name__ == '__main__':
    csv_file = open('DisplayInfo.csv', 'w', newline='', encoding='utf-8')
    writer = csv.writer(csv_file)
    writer.writerow(
        ['Name', 'ID', 'PID', 'Platform', 'Price', 'Shape', 'Size', 'Resolution', 'RefreshRate', 'Panel', 'ColorGamut',
         'Curvature'])

    data = pd.read_csv("../IDs/Display.csv")
    col_1 = data["ID"]
    col_2 = data['PicPath']
    col_3 = data['Name']
    col_4 = data['Price']
    ids = np.array(col_1)
    picPath = np.array(col_2)
    names = np.array(col_3)
    prices = np.array(col_4)
    for i in range(len(ids)):
        # for i in range(10):
        search_url = 'https://detail.zol.com.cn/1428/' + str(ids[i]) + '/param.shtml'
        html = get_html(search_url)
        soup = BeautifulSoup(html, 'lxml')
        beginList = soup.find_all('th')
        afterList = soup.find_all('td', class_='hover-edit-param')
        case = Display('null', 'null', 'null', 'null', 'null', 'null', 'null', 'null', 'null', 'null', 'null', 'null')
        case.Name = names[i]
        case.ID = ids[i]
        case.PID = picPath[i]
        case.Price = prices[i]
        for j in range(len(beginList)):
            begin = beginList[j].span.get_text()
            after = afterList[j].span.get_text()
            case.tranTo(begin, after)
        writer.writerow(
            [case.Name, case.ID, case.PID, case.Platform, case.Price, case.Shape, case.Size, case.Resolution,
             case.RefreshRate, case.Panel, case.ColorGamut, case.Curvature])
        i += 1
    csv_file.close()
