from concurrent.futures.thread import ThreadPoolExecutor

from z_Userapp.MyRedis import MyRedis
from z_Userapp.MyRedisQueue import RedisQueue

from .models import *


# 同步消费
# for i in range(100):
#     print(q.pop())
# print(z.lrange())
# print(f'执行了:{end - start}秒')

# 异步消费
z = RedisQueue('queue')
r = MyRedis()
async def dojob():
    while 1:
        result=z.pop()
        if not result:
            break
        else:
            result = eval(result)
            print(result)
            print(type(result))
            key = 'lz'
            if UserModel.objects.filter(username=result[1]):
                r.hset(key, result[0], 400)
            else:
                UserModel.objects.filter(id=result[0]).update(username=result[1])
                r.hset(key, result[0], 200)



# [z.put('[1,2,3]') for i in range(100)]

# 启动多线程
# for index in range(3):
#     thread=threading.Thread(target=dojob()) # 不写()是方法名调用 写了()是执行该方法
#     thread.start()


# 线程池
# with ThreadPoolExecutor(max_workers=5) as t:
#     [t.submit(dojob) for i in range(2)]


# if __name__ == '__main__':
#     # [z.put(i) for i in range(100)]
#     # z.put('栗子')
#     p = ThreadPoolExecutor(4)
#     for i in range(z.lz):
#         print("第{}次执行".format(i))
#         p.submit(dojob)  # 异步提交dojob到一个子进程中执行
    # p.close()  # 关闭进程池，用户不能再向这个池中提交任务了
    # p.join()  # 阻塞直到进程池中所有的任务都被执行完


# 协程
# import asyncio
# loop=asyncio.get_event_loop()
# res=loop.run_until_complete(dojob())
# loop.close()