import hashlib
import json
import random
import requests

# res_urls = set()
# urls = [f'https://httpbin.org/get?value{random.randint(1,100)}'for i  in range(10)]
# for url in urls:
#     if url not in res_urls:
#         res_urls.add(url)
#         res = requests.get(url)
#         print(res.json())
#     else:
#         print(f"改地址已经请求过了，不用再次请求")


#
# import hashlib
# import json
#
# data = {
#     "id": 1,
#     "value": 5,
#     "k1": "v1"
# }
# data2 = {
#     "id": 1,
#     "value": 5,
#     "k1": "v3"
#
# }
# data3 = {
#     "id": 1,
#     "value": 5,
#     "k1": "v3"
#
# }
# data4 = {
#     "id": 1,
#     "value": 5,
#     "k1": "v2"
#
# }
#
# print(hashlib.md5(json.dumps(data).encode()).hexdigest())
# print(hashlib.md5(json.dumps(data2).encode()).hexdigest())
# print(hashlib.md5(json.dumps(data3).encode()).hexdigest())
# print(hashlib.md5(json.dumps(data4).encode()).hexdigest())



url = f'http://httpbin.org/post'
datas = [{"id":101,"value":random.randint(1,10)} for i in range(10)]
requests_data = set()
for data in datas:
    data_hash = hashlib.md5(json.dumps(data).encode('utf-8')).hexdigest()
    if data_hash not in requests_data:
        requests_data.add(data_hash)
        res = requests.post(url,data=data)
        print(res.json())
    else:
        print("有重复项")