from threading import Thread
from queue import Queue
import getua
import requests
import time
cnt = 0

class MyThread(Thread):
    def __init__(self):
        Thread.__init__(self)
    def run(self):
        while not url_queue.empty():
            url = url_queue.get()
            header = {
                "User-Agent" : getua.get_ua()
            }

            resp = requests.get(url , headers = header)

            # print(resp.json())

            for d in resp.json().get('data') :
                print(f"tid: {d.get('tid')},topicUrl: {d.get('topicUrl')},\ncontent: {d.get('content')}")
            global cnt
            cnt += 1
            if resp.status_code == 200 :
                print("*" * 50 , f"成功获取第{cnt}页数据" , "*" * 50)
            time.sleep(0.2)

def spider():
    while not url_queue.empty():
        url = url_queue.get()
        header = {
            "User-Agent" : getua.get_ua()
        }

        resp = requests.get(url, headers=header)

        #print(resp.json())

        for d in resp.json().get('data'):
            print(f"tid: {d.get('tid')},topicUrl: {d.get('topicUrl')},\ncontent: {d.get('content')}")
        global cnt
        cnt += 1
        if resp.status_code == 200:
            print("*"*50,f"成功获取第{cnt}页数据","*"*50)
        time.sleep(0.2)

if __name__ == '__main__':
    url_queue = Queue()
    for i in range(1,10):
        url = f"https://www.hupu.com/home/v1/news?pageNo={i}&pageSize=50"
        url_queue.put(url)
    for i in range(5):
        #t = Thread(target = spider)
        t = MyThread()
        t.start()

