import requests
from bs4 import BeautifulSoup
from www import headers
from www import get_info
from www.Insql import insert_db, table_info
import time


def get_page(keyword, pageNumber, city_code):
    for p in range(int(pageNumber)):
        url = 'https://search.51job.com/list/' + city_code + ',000000,0000,00,9,99,' + keyword + ',2,' + str(
            p + 1) + '.html'
        r = requests.get(url, headers=headers)
        soup = BeautifulSoup(r.content.decode('gbk'), 'html5lib')
        alist = eval(soup.get_text().split(
            'window.__SEARCH_RESULT__ = {"top_ads":[],"auction_ads":[],"market_ads":[],"engine_search_result":')[
                         1].split(
            ',"jobid_count":"')[
                         0])
        for i in range(len(alist)):
            adict = eval(str(alist[i]))
            job_href = str(adict.get('job_href')).replace('\\', '')
            try:
                info_dict = get_info(job_href)
                if info_dict:
                    insert_db(info_dict)
                    print('success')
            except Exception as e:
                print(e)
                time.sleep(5)
