# coding : utf-8
import queue
import HTML
import math
from bs4 import BeautifulSoup
import os
from get_task import get_start_url


while True:
    _db_data = get_start_url()
    if _db_data is not  None:
        print(_db_data)
        start_url_db = str(_db_data).split("&")
        start_url = start_url_db[1]
        batch_id = start_url_db[0]
        pool = set()
        q = queue.Queue()
        pool.add(start_url)
        q.put((start_url, 1))


        # inp = input("请输入全网抓取还是站内抓取（1.全网抓取；2.站内抓取）：")
        flag_site = True
        # inp = input("请输入最大网页抓取个数（无穷大请输入-1）：")
        flag_most = math.inf
        # inp = input("请输入宽度优先搜索最大抓取深度（无穷大请输入-1）：")
        flag_depth = math.inf

        now = 0
        while not q.empty():
            #try:
                front = q.get()
                link = front[0]
                depth = front[1]
                print('crawling:', link)
                html = HTML.save(link, start_url, batch_id)
                if html is None:
                    continue
                soup = BeautifulSoup(html, 'html.parser', from_encoding='gb18030')
                for a in soup.find_all('a'):
                    try:
                        url2 = a['href']
                        fl = HTML.full_link(link, url2, flag_site)
                        if fl is None:
                            continue
                        if (fl not in pool) and (depth + 1 <= flag_depth):
                            pool.add(fl)
                            q.put((fl, depth + 1))
                            print('in queue:', fl)
                    except Exception as e:
                        print(e)
                now += 1
                print("当前已经采集数量" + str(now))
                if now >= flag_most:
                    break
            # except Exception as e:
            #     print(e)
