#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :test.py
# @Time      :2023/7/12 
# @Author    :CL
# @email     :1037654919@qq.com


import time
import os
import requests
proxies={'http':'127.0.0.1:15732',
         'https':'127.0.0.1:15732'}

def parse_json_page(response):
    results=[]
    json_data =response.json()
    items =json_data['items']
    for key,value in items.items() :
        result ={}
        result['content_id'] = value['content_id']
        result['title'] = value['title']
        result['content_thumb_extra_large_url'] = value['content_thumb_extra_large_url']
        result['comp_file_path'] = value['comp_file_path']
        result['media_type_label'] = value['media_type_label']
        result['content_url'] = value['content_url']
        results.append(result)
    return results

def down_image(file, url):  # 下载图片方法
    print("开始下载：", url)
    for i in range(3):
        try:
            response = requests.get(url, proxies=proxies, timeout=10)
            with open(file, 'wb') as fd:
                fd.write(response.content)
            requests.session().close()
            return 1
        except Exception as e:
            time.sleep(5)
    print("下载失败了", url)
    return 0


def get_json_page(kw,pageNo):
    headers = {
        "authority": "stock.adobe.com",
        "accept": "*/*",
        "x-requested-with": "XMLHttpRequest"
    }
    url = "https://stock.adobe.com/hk/Ajax/Search"
    params = {
        "filters%5Bcontent_type%3Aphoto%5D": "1",
        "filters%5Bcontent_type%3Aillustration%5D": "1",
        "filters%5Bcontent_type%3Azip_vector%5D": "1",
        "filters%5Bcontent_type%3Avideo%5D": "1",
        "filters%5Bcontent_type%3Atemplate%5D": "1",
        "filters%5Bcontent_type%3A3d%5D": "1",
        "filters%5Bcontent_type%3Aaudio%5D": "0",
        "filters%5Binclude_stock_enterprise%5D": "0",
        "filters%5Bis_editorial%5D": "0",
        "filters%5Bfree_collection%5D": "0",
        "filters%5Bcontent_type%3Aimage%5D": "1",
        "k": kw,
        "order": "relevance",
        "safe_search": "1",
        "limit": "100",
        "search_page": pageNo,
        "get_facets": "0",
        "search_type": "pagination"
    }
    for __ in range(5):
        try:
            response = requests.get(url, proxies= proxies,headers=headers, params=params,timeout=10)
            if response.status_code==200:
                return response
            time.sleep(5)
        except:
            pass
    return  None


if __name__ == '__main__':
    response = get_json_page("concept car",1)
    # print(response.text)
    results =parse_json_page(response)
    for result in results:
        print(result)



