#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :test.py
# @Time      :2023/7/25 
# @Author    :CL
# @email     :1037654919@qq.com

import time
import requests
from bs4 import   BeautifulSoup
import os
import random
# 代理 pigcha
proxies = {'http': '127.0.0.1:15732',
           'https': '127.0.0.1:15732'}
def get_proxy():
    port = random.randint(24000, 24400)
    proxies = {'http': f'http://zheng123:zheng123@haproxy.iinti.cn:{port}',
               'https': f'http://zheng123:zheng123@haproxy.iinti.cn:{port}'}
    return proxies


def get_pin(url):
    print('begin:',url)
    for __ in range(5):
        try:
            response = requests.get(url,timeout=5)
            if response.status_code ==200:
                return response.text
            time.sleep(5)
        except:
            pass
    return None

def get_pins(url):

    headers = {
        "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0",
        "Accept": "application/json, text/plain, */*",
        "Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
        "Accept-Encoding": "gzip, deflate, br",
        "Referer": "https://huaban.com//boards/63835150",
        "Origin": "https://huaban.com",
        "Connection": "keep-alive",
        "Sec-Fetch-Dest": "empty",
        "Sec-Fetch-Mode": "cors",
        "Sec-Fetch-Site": "same-site",
        "Pragma": "no-cache",
        "Cache-Control": "no-cache"
    }
    cookies = {
        "user_device_id": "81a9c3e41f8b4dceb6ea3c987964d77c",
        "user_device_id_timestamp": "1690265509043",
        "Hm_lvt_d4a0e7c3cd16eb58a65472f40e7ee543": "1690265510",
        "Hm_lpvt_d4a0e7c3cd16eb58a65472f40e7ee543": "1690267737",
        "Hm_up_d4a0e7c3cd16eb58a65472f40e7ee543": "%7B%22version%22%3A%7B%22value%22%3A%222.0.0%22%2C%22scope%22%3A1%7D%2C%22has_plugin%22%3A%7B%22value%22%3A%220%22%2C%22scope%22%3A1%7D%7D",
        # "_ga_50RYEM7F09": "GS1.1.1690265516.1.1.1690267735.0.0.0",
        "_ga": "GA1.2.705138313.1690265516",
        "_gid": "GA1.2.391768333.1690265544",
        "sid": "s%3Av9kh7OyMn1HpsN1h6Eb2C0YP59DR_SAa.JfBf3LsTyiVzetl%2BjZnf1TEryrnH7PsRqDzXyZBihEw",
        "newbietask": "1",
        "registered": "registered",
        "uid": "37215891",
        "gd_id": "2024933926397944859",
        # "acw_tc": "0b63bb2e16902675431892587e87197706ae479426696bb6e2e27bb13da8a1",
        "_gat_UA-135559536-2": "1"
    }
    params = {
        "limit": "40",
        "max": "3362261801",
        "fields": "pins:PIN,board:BOARD_DETAIL,check"
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params)

    # print(response.text)
    print(response)
    if response.status_code == 200:
        return response.json()

def get_board_id(keyword,page):
    headers = {
        "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0",
        "Accept": "application/json, text/plain, */*",
        "Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
        "Accept-Encoding": "gzip, deflate, br",
        "Referer": "https://huaban.com/search?type=board&q=%E8%BD%A6",
        "Origin": "https://huaban.com",
        "Connection": "keep-alive",
        "Sec-Fetch-Dest": "empty",
        "Sec-Fetch-Mode": "cors",
        "Sec-Fetch-Site": "same-site",
        "Pragma": "no-cache",
        "Cache-Control": "no-cache"
    }
    cookies = {
        "user_device_id": "81a9c3e41f8b4dceb6ea3c987964d77c",
        "user_device_id_timestamp": "1690265509043",
        "Hm_lvt_d4a0e7c3cd16eb58a65472f40e7ee543": "1690265510",
        "Hm_lpvt_d4a0e7c3cd16eb58a65472f40e7ee543": "1690271438",
        "Hm_up_d4a0e7c3cd16eb58a65472f40e7ee543": "%7B%22version%22%3A%7B%22value%22%3A%222.0.0%22%2C%22scope%22%3A1%7D%2C%22has_plugin%22%3A%7B%22value%22%3A%220%22%2C%22scope%22%3A1%7D%7D",
        "_ga_50RYEM7F09": "GS1.1.1690338064.3.1.1690338076.0.0.0",
        "_ga": "GA1.2.705138313.1690265516",
        "_gid": "GA1.2.391768333.1690265544",
        "sid": "s%3Av9kh7OyMn1HpsN1h6Eb2C0YP59DR_SAa.JfBf3LsTyiVzetl%2BjZnf1TEryrnH7PsRqDzXyZBihEw",
        "newbietask": "1",
        "registered": "registered",
        "uid": "37215891",
        "gd_id": "2024933926397944859",
        "acw_tc": "0b32824216903374873624237ea17f6d71f4a8875c31ff58fcf5625d04c6f2",
        "_gat_UA-135559536-2": "1"
    }

    url = "https://api.huaban.com/search/boards"
    params = {
        "q": keyword,
        "sort": "all2",
        "page": page,
        "per_page": "20",
        "hide_other_count": "1",
        "fields": "boards:BOARD,facets,board_count"
    }
    response = requests.get(url, headers=headers, cookies=cookies, params=params)

    # print(response.text)
    print(response)
    return response.json()

def parse_url(id):
    url = "https://api.huaban.com/boards/{}/pins".format(id)
    print('begin:',url)
    hreflist=[]
    response = get_pins(url)
    if response:
        json_data =response
        pins = json_data['pins']
        board = json_data['board']
        print(len(pins))
        # for pin in pins:
        #     # print(pin['pin_id'])
        #     href = 'https://huaban.com/pins/' +str(pin['pin_id'])
        #     print(pin['file'])
        #     href = 'https://gd-hbimg.huaban.com/'+str(pin['file']['key'])
        #     print(href)
        #     hreflist.append(href)
        #     name = href.split('/')[-1]
        #     save_image(name=name,url=href,PWD='test/')

    return hreflist
def save_image(name, url, PWD):
        print('save image:', url)
        # name = url.split('/')[-1]
        # 将图片以最后的数字+后缀命名，方便检查文件是否存在
        filename = PWD + name
        if os.path.isfile(filename):  # 如果文件已爬取，则不再重复爬取
            print("文件存在：", filename)
            return
        # urllib.request.urlretrieve(url, filename=path+filename)
        try:
            response = requests.get(url, proxies=proxies,stream=True, timeout=5)
            with open(filename, 'wb') as fd:
                fd.write(response.content)
            requests.session().close()
            return 1
        except  Exception as e:
            print(url, '保存图片失败', e)
            return 0

def main():
    pass

if __name__=="__main__":

    response = get_board_id(keyword='车',page=1)

    datas = response['boards']
    print(len(datas))
    print(datas[0])
    for data in datas:

        board_id = data['board_id']
        print(board_id)
        # pins=  data['pins']
        # print('len(pins)',len(pins))
        hreflist = parse_url(board_id)
        # break
