from tools import read_jscode
import requests
import time
from urllib.parse import urlencode
import os
from beeize.scraper import Scraper


scraper = Scraper()
_input = scraper.input

# os.environ["PDDAccessToken".upper()]="WN2UCUVE7IWKG2HS7BLF3LB666LXZPOKQLEU42JTVNC5I3K522JA120fa11"
# os.environ["seach_id".upper()]="橙子"

PDDAccessToken = _input.get_string('PDDAccessToken')  # 获取 websession 输入
seach_id = _input.get_string('seach_id')  # 获取 note_id 输入



url = 'https://mobile.yangkeduo.com/proxy/api/search'

headers = {
  "accept": "application/json, text/plain, */*",
  "accept-language": "zh-CN,zh;q=0.9",
  "cache-control": "no-cache",
  "pragma": "no-cache",
  "priority": "u=1, i",
  "referer": "https://mobile.yangkeduo.com/search_result.html?search_key=iphone15promax&search_type=goods&source=index&options=3&search_met_track=suggestion&refer_search_met_pos=0&refer_page_el_sn=99886&q_search=%7B%22pes_req_id%22%3A%221721475107777614%22%7D&refer_page_name=search_result&refer_page_id=10015_1721475097217_hbi3v2wv0e&refer_page_sn=10015",
  "sec-ch-ua-mobile": "?0",
  "sec-fetch-dest": "empty",
  "sec-fetch-mode": "cors",
  "sec-fetch-site": "same-origin",
  "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
}

cookies={
  "njrpl": "EobCFqroEfAQsnSfZgO1jRVGhc8zqyKH",
  "PDDAccessToken": PDDAccessToken,
}
print(cookies)

for j in range(1,15):
  at = read_jscode('main.js', 'fn')
  print(at)
  params={
    "pdduid": "8424308498570",
    "item_ver": "lzqq",
    "coupon_price_flag": "1",
    "source": "index",
    "search_met": "suggestion",
    "track_data": "refer_page_id,10015_1721475097217_hbi3v2wv0e;refer_search_met_pos,0",
    "q_search": "{\"pes_req_id\":\"1721475107777614\"}",
    "list_id": "gokot9wswr",
    "sort": "default",
    "filter": "",
    "q": seach_id,
    "page": j,
    "is_new_query": "1",
    "size": "50",
    "flip": "0;0;0;0;01581703-8488-7df4-737f-5f9638dbecff;/20;0;0;38ccadb059b26aa217d4d658783d08eb",
    "anti_content": at
  }

  res = requests.get(
    'http://bapi.51daili.com/unlimitedip/getip?linePoolIndex=1&packid=17&time=5&qty=1&port=1&format=txt&pid=5eb42df579a921a3be66fc7afe95d183&usertype=17&uid=41334')
  proxy = {
    "http": res.text,
    "https": res.text
  }
  print(proxy)

  response = requests.get(url, headers=headers, cookies=cookies, params=params,proxies=proxy)
  print(response.json())
  items = response.json()['items']
  time.sleep(2)
  for i in items:
    print(i['item_data']['goods_model'])
    scraper.push_data(i['item_data']['goods_model'])



