from time import sleep

import requests
from lxml import etree
import pandas as pd

headers = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,/;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "Accept-Encoding": "gzip, deflate, br",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
    "Cache-Control": "max-age=0",
    "Connection": "keep-alive",
    "Cookie": "_xmLog=h5&ebc617d5-bd2c-44ed-959f-522524ba0e2d&process.env.sdkVersion; xm-page-viewid=ximalaya-web; impl=www.ximalaya.com.login; x_xmly_traffic=utm_source%253A%2526utm_medium%253A%2526utm_campaign%253A%2526utm_content%253A%2526utm_term%253A%2526utm_from%253A; x-ats=ACM0MDEwNWEwMWJkZTE5NmUzJbLOfRFFC5x4bXdlYl93d3c; Hm_lvt_4a7d8ec50cfd6af753c4f8aee3425070=1700897322; Hm_lpvt_4a7d8ec50cfd6af753c4f8aee3425070=1700897322",
    "Host": "www.ximalaya.com",
    "Referer": "https://www.baidu.com/",
    "Sec-Ch-Ua": '"Chromium";v="116", "Not)A;Brand";v="24", "Microsoft Edge";v="116"',
    "Sec-Ch-Ua-Mobile": "?0",
    "Sec-Ch-Ua-Platform": "Windows",
    "Sec-Fetch-Dest": "document",
    "Sec-Fetch-Mode": "navigate",
    "Sec-Fetch-Site": "same-origin",
    "Sec-Fetch-User": "?1",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Edg/116.0.1938.69"
}

# urls = [
#     'https://www.ximalaya.com/top/',
#     'https://www.ximalaya.com/top/1/100149/',
#     'https://www.ximalaya.com/top/1/100150/',
#     'https://www.ximalaya.com/top/1/100007/',
#     'https://www.ximalaya.com/top/1/100271/',
#     'https://www.ximalaya.com/top/1/100297/',
#     'https://www.ximalaya.com/top/1/100413/',
#     'https://www.ximalaya.com/top/1/100490/',
#     'https://www.ximalaya.com/top/1/100326/',
#     'https://www.ximalaya.com/top/1/100325/',
#     'https://www.ximalaya.com/top/1/100151/',
#     'https://www.ximalaya.com/top/1/100411/'
# ]

info_list = []


def main():
    urls, ranK_types = req_urls_list()
    req_data(urls, ranK_types)


def req_urls_list():
    url = 'https://www.ximalaya.com/top/'
    response = requests.get(url, headers=headers)
    html = response.content.decode()
    html_doc = etree.HTML(html)
    url_list = html_doc.xpath('/html/body/div/main/div[1]/div[2]/div/div[2]/div/div/div[1]//a')
    urls = ['https://www.ximalaya.com' + i.xpath('./@href')[0] for i in url_list]
    rank_types = [i.xpath('./text()')[0] for i in url_list]
    return urls, rank_types


def req_data(urls, rank_types):
    for url, rank_type in zip(urls, rank_types):
        response = requests.get(url, headers=headers)
        html = response.content.decode()
        html_doc = etree.HTML(html)
        div = html_doc.xpath(
            '/html/body/div/main/div[1]/div[2]/div/div[2]/div/div/div[2]//div[@class="album-item _Sq"]')
        info_div = [i.xpath('./a/div[2]')[0] for i in div]
        rank_list = [i.xpath('./span[@class="album-index _Sq"]/text()')[0] for i in div]
        handle_info(info_div, rank_list, rank_type)
        sleep(1)
    save_info()


def handle_info(info_div, rank_list, rank_type):
    for info, rank in zip(info_div, rank_list):
        title = info.xpath('./div[@class="title _Sq"]/text()')[0]
        category = info.xpath('./span[@class="user-category"]/span/text()')[0]
        author = info.xpath('./span[@class="user-name mgl-20"]/span/text()')[0]
        album_count = info.xpath('./span[@class="user-albumcount mgl-20"]/span/text()')[0]
        play_count = info.xpath('./span[@class="user-playcount mgl-20"]/span/text()')[0]
        description = info.xpath('./div[@class="description _Sq"]/text()')[0]
        info_list.append([title, category, author, album_count, play_count, description, rank, rank_type])


def save_info():
    df = pd.DataFrame(info_list,
                      columns=['title', 'category', 'author', 'album_count', 'play_count', 'description', 'rank',
                               'rank_type'])
    df.to_csv('../static/data/info.csv', index=False)


if __name__ == '__main__':
    main()
