import requests
from pyquery import PyQuery as pq
import time
import pandas as pd
import os

cookies = {
  "MEIQIA_TRACK_ID": "2VEGdRIfjvSrBzuyLBjoyirt6SL",
  "MEIQIA_VISIT_ID": "2VEGdRAcwDLpxJTgGU5EHH8RiSX",
  "DeliverAreaCodeVal": "",
  "AGL_USER_ID": "331fd302-22d4-4781-8e79-db8d1b85ac9f",
  "Hm_lvt_ae896b60487020696d3df0e260c080bf": "1716205126",
  "_gcl_au": "1.1.430211628.1716205126",
  "CookiesHuaPid": "/gifts/-1077011",
  "_ga": "GA1.1.915308569.1694393911",
  "ASP.NET_SessionId": "z44bmvq0phvcq4p0xedavebd",
  "CookiesUnique": "ce12c73e-f203-43ec-8dc3-207de02d4bdf~2023/09/11~2024/05/26~0~0",
  "_ga_0H5T4WNBH4": "GS1.1.1716736344.7.1.1716738017.46.0.0",
}

headers = {
  "accept": "*/*",
  "accept-language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7",
  "cache-control": "no-cache",
  # 'cookie': 'MEIQIA_TRACK_ID=2VEGdRIfjvSrBzuyLBjoyirt6SL; MEIQIA_VISIT_ID=2VEGdRAcwDLpxJTgGU5EHH8RiSX; DeliverAreaCodeVal=; AGL_USER_ID=331fd302-22d4-4781-8e79-db8d1b85ac9f; Hm_lvt_ae896b60487020696d3df0e260c080bf=1716205126; _gcl_au=1.1.430211628.1716205126; CookiesHuaPid=/gifts/-1077011; _ga=GA1.1.915308569.1694393911; ASP.NET_SessionId=z44bmvq0phvcq4p0xedavebd; CookiesUnique=ce12c73e-f203-43ec-8dc3-207de02d4bdf~2023/09/11~2024/05/26~0~0; _ga_0H5T4WNBH4=GS1.1.1716736344.7.1.1716738017.46.0.0',
  "pragma": "no-cache",
  "priority": "u=1, i",
  "referer": "https://m.hua.com/youqingxianhua/",
  "sec-fetch-dest": "empty",
  "sec-fetch-mode": "cors",
  "sec-fetch-site": "same-origin",
  "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1",
  "x-requested-with": "XMLHttpRequest",
}

params = {
  "r": "0",
  "pIndex": 0,
  "kwCode": "youqing",
}

result_df = pd.DataFrame()
fileName = "xianhua"


def fetch_data():
  response = requests.get(
    "https://m.hua.com/ProductList/ListMore",
    params=params,
    cookies=cookies,
    headers=headers,
  )
  # print(response.status_code)
  # with open("z.html", "w", encoding="utf-8") as fp:
  #     fp.write(
  #         response.content.decode("utf-8")
  #     )  # 浏览器控制台通过document.charset查看编码方式

  try:
    wash(response.content.decode("utf-8"))
  except:
    save()


def wash(res):
  doc = pq(res)
  items = doc(".product-item")
  for v in items:
    o = {}
    o["href"] = pq(v)(".navigation").attr("href")
    o["img_src"] = pq(v)(".product-item-pic>img").attr("data-original")
    o["name"] = pq(v)(".product-item-info-name").text()
    tags = pq(v)(".product-item-info-tags span")
    o["tags"] = [pq(x).text() for x in tags]
    o["price"] = pq(v)(".product-item-info-price").text()[1:]
    o["sales"] = pq(v)(".product-item-info-sales").text()[3:-1]
    # 写入当前数据
    global result_df
    result_df = pd.concat([result_df, pd.DataFrame([o])])


def save():
  path = os.path.join(os.path.dirname(__file__), "output", f"{fileName}.xlsx")
  result_df.to_excel(path, index=False, header=['跳转链接', '图片链接', '名称', '标签', '价格', '销售量'])  # 输出总的结果


if __name__ == "__main__":
  for item in range(1, 20):
    print(item)
    params["pIndex"] = item
    time.sleep(1)
    fetch_data()
  save()
