import requests
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.date import DateTrigger
from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.schedulers.asyncio import AsyncIOScheduler
# from .lib.request_handler import async_request_tool
from .models import *
import threading
import asyncio
import aiohttp

import logging

logging.basicConfig()
logging.getLogger('apscheduler').setLevel(logging.DEBUG)

async def test():
	case = ApiCase.objects.filter(id=15, deleted=False).first()
	await async_request_tool(case)
test()
# def we():
# 	url = "http://jushangmei.com/"
#
# 	headers = {
# 		'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36'
# 	}
# 	print(f'当前请求响应状态: {requests.get(url).status_code}')
#
# # # job = scheduler.add_job(func=we, trigger='interval', job_id='request-id-3')
#
#
# # run_time = {
# # 	"seconds": 1,
# # }
#
# run_time = "*/1 * * * *"
#
# scheduler = BlockingScheduler()
# # job = scheduler.add_job(func=we, trigger=CronTrigger.from_crontab('*/1 * * * *'), job_id='request-id-1')
# # job = scheduler.add_job(func=we, trigger=DateTrigger('2021-6-2 10:55:02'), job_id='date-request-1')
# # job = scheduler.add_job(func=we, trigger=DateTrigger(), job_id='date-request-1')
# job = scheduler.add_job(func=we, trigger=IntervalTrigger(**run_time))
#
# scheduler.start()
#
# while True:
# 	print("执行while")

# import time
#
# t = round(time.time()*1000)
# print(t)
# print(t*1000)
# print(round(t*1000))


async def spider(params, headers):
	url = "http://test-jushangmei-platform-web.jushangmei.com/api/user/login"
	method = 'POST'
	session = aiohttp.ClientSession()

	print(f'当前线程是: {threading.current_thread().name}')

	async with session.request(url=url, method=method, params=params, headers=headers) as res:
		print(str(await res.read(), encoding='utf-8'))
		print(res.request_info)

	await session.close()

params = 'phone=17611165762&code=2959'
headers = {
	'Content-Type': 'application/x-www-form-urlencoded',
	'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36'
}
# asyncio.run(spider(params, headers))

# from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
# from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
# from apscheduler.executors.asyncio import AsyncIOExecutor
#
# jobstores = {
#     'default': SQLAlchemyJobStore(url='sqlite:///jobs.sqlite')
# }
# executors = {
#     'default': AsyncIOExecutor(),
#     # 'default': ThreadPoolExecutor(10),
#     'processpool': ProcessPoolExecutor(2)
# }
# job_defaults = {
#     'coalesce': False,
#     'max_instances': 3
# }
# scheduler = AsyncIOScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults)
#
# args = [params, headers]
#
# scheduler.start()
# scheduler.add_job(func=spider, trigger=CronTrigger.from_crontab('*/1 * * * *'), args=args)
#
# asyncio.get_event_loop().run_forever()
#
# while True:
# 	pass
