from fake_useragent import UserAgent

from crawler_core.ProxyPool.getproxy import get_proxy


class Property51job:
    search_url = "https://ehire.51job.com/Candidate/SearchResumeNew.aspx"

    detail_url = ""

    search_header = {
        'Host': 'ehire.51job.com',
        'Connection': 'keep-alive',
        'Cache-Control': 'max-age=0',
        'Origin': 'https://ehire.51job.com',
        'Upgrade-Insecure-Requests': '1',
        'Content-Type': 'application/x-www-form-urlencoded',
        'User-Agent': UserAgent().random,
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Referer': 'https://ehire.51job.com/Candidate/SearchResumeNew.aspx',
        'Accept-Encoding': 'gzip, deflate, br',
        'Accept-Language': 'zh-CN,zh;q=0.9',
    }

    detail_header = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
        'Accept-Encoding': 'gzip,deflate, sdch, br',
        'Accept-Language': 'zh-CN,zh;q=0.8',
        'Cache-Control': 'max-age=0',
        'Connection': 'close',
        'Host': 'ehire.51job.com',
        'Referer': 'https://ehire.51job.com/Candidate/SearchResumeNew.aspx',
        'Upgrade-Insecure-Requests': '1',
        'User-Agent': UserAgent().random, }

    search_param = {

    }

    detail_param = {

    }

    # proxy = get_proxy().split()[0]
    proxies = {
        # 'http': 'http://' + proxy,
        # 'https': 'https://' + proxy
    }
