import requests
import asyncio
import aiohttp
import threading
import concurrent.futures




# url = 'http://www.xinfadi.com.cn/getPriceData.html'
# for i in range(1,23812):
#     print(f'正在爬取第{i}页')
#     response = requests.post(url,data={"current":i})
#     items = response.json()['list']
#     for item in items:
#         print(item)


# #
# def make_data(page):
#     return {
#         "limit": 20,
#         "current": page,
#         "pubDateStartTime": "2023/01/01"          # 去掉空格
#     }
# CONCURRENCY = 3
# RATE = 1
#
# async def fetch(session, data, sem):
#     async with sem:                            # 并发闸
#         await asyncio.sleep(RATE)              # 限速
#         print(f'开始爬取第{data["current"]}页')
#         async with session.post(
#                 url='http://www.xinfadi.com.cn/getPriceData.html',  # 去掉空格
#                 data=data) as resp:
#             d = await resp.json()
#             print(d)
#             return d
#
# async def main():
#     sem = asyncio.Semaphore(CONCURRENCY)
#     async with aiohttp.ClientSession(
#             connector=aiohttp.TCPConnector(limit=CONCURRENCY, ssl=False),
#             timeout=aiohttp.ClientTimeout(total=15)) as session:
#
#         datas = [make_data(page) for page in range(1, 101)]
#         # 先跑 100 页
#         values = await asyncio.gather(*[fetch(session, d, sem) for d in datas])
#         for v in values:
#             print(v)
# if __name__ == '__main__':
#     asyncio.run(main())




# 多线程
# def make_data(page):
#     return {
#             "limit": 20,
#             "current": page,
#             "pubDateStartTime": "2023/01/01"          # 去掉空格
#         }
#
# def fetch(data):
#     print(f'子线程{data['current']}启动')
#     res = requests.post(url='http://www.xinfadi.com.cn/getPriceData.html',data = data)
#     data = res.json()
#     print(data['list'])
#
# def main():
#     ls = []
#     datas = [make_data(page) for page in range(1, 101)]
#     ds = [datas[i:i + 20] for i in range(0, len(datas), 20)]
#     # print(len(ds))
#     for d in ds:
#         print(d)
#         t = threading.Thread(target=fetch, args=()
#         t.start()
#         ls.append(t)
#     for l in ls:
#         l.join()
#
#
# if __name__ == '__main__':
#     main()


import concurrent.futures
import pymysql

con = pymysql.connect(host='127.0.0.1', port=3306,  user='zzf',password='123456',database='lx')
cursor = con.cursor()



def make_data(page):
    return {
            "limit": 20,
            "current": page,
            "pubDateStartTime": "2023/01/01"          # 去掉空格
        }
datas = [make_data(page) for page in range(1, 20000)]

def f(data):
    res = requests.post(url="http://www.xinfadi.com.cn/getPriceData.html", data=data)
    data = res.json()
    for i in data['list']:
        # print(i["id"],i['peodName'],i['prodCat'],i["lowPrice"])

        print(i['prodName'],i['lowPrice'])
        cursor.execute('INSERT INTO jg VALUES (%s, %s,%s)', (i['prodName'],i['lowPrice'],0))
        con.commit()

        con.close()


with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
    for data in datas:
        executor.submit(f, data)
        break



