from scrapy.cmdline import execute

def get_cookies():
    import json
    from time import sleep
    from selenium import webdriver

    driver = webdriver.Chrome()
    driver.get('https://www.lagou.com/jobs/list_%s?labelWords=&fromSearch=true&suginput=' % "数据分析")
    sleep(60)
    cookies = driver.get_cookies()

    with open("lagou_cookies.txt", "w", encoding="utf-8") as file:
        file.write(json.dumps(cookies))
        file.flush()

    driver.close()

if __name__ == '__main__':
    #没有存cookie的话需要先登录后存下cookies
    #get_cookies()
    execute(argv=["scrapy", "crawl", "lagouspider"])
