#coding:utf-8


import requests
import json

from jsonpath import jsonpath


class LagouDepthSpider(object):
    def __init__(self):
        self.headers = {
                "Acceept": "application/json, text/javascript, */*; q=0.01",
                "Accept-Encoding": "gzip, deflate, br",
                "Accept-Language": "zh-CN,zh;q=0.9",
                "Connection": "keep-alive",
                "Content-Length": "26",
                "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
                "Cookie": "JSESSIONID=ABAAABAAADEAAFI0DD07E26EB9FB6F8E52B9CDBC703B9DE; _ga=GA1.2.754895021.1542433960; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1542433960; _gid=GA1.2.1536395690.1542433960; user_trace_token=20181117135239-fde4320a-ea2c-11e8-892b-5254005c3644; LGUID=20181117135239-fde435ce-ea2c-11e8-892b-5254005c3644; index_location_city=%E5%85%A8%E5%9B%BD; X_HTTP_TOKEN=784caa92b730af22c71c7e97b90c6558; sajssdk_2015_cross_new_user=1; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%22167203edb74166-00ff6f92a0c4db-4313362-2073600-167203edb75bf9%22%2C%22%24device_id%22%3A%22167203edb74166-00ff6f92a0c4db-4313362-2073600-167203edb75bf9%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E7%9B%B4%E6%8E%A5%E6%B5%81%E9%87%8F%22%2C%22%24latest_referrer%22%3A%22%22%2C%22%24latest_referrer_host%22%3A%22%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC_%E7%9B%B4%E6%8E%A5%E6%89%93%E5%BC%80%22%7D%7D; TG-TRACK-CODE=search_code; SEARCH_ID=f98b8ef78b5b47cf966c7f2e098b8d1a; LGRID=20181117150411-fc29f4a8-ea36-11e8-892c-5254005c3644; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1542438252",
                "Host": "www.lagou.com",
                "Origin": "https://www.lagou.com",
                #这个在拉钩网站中也作为反爬处理,表示是从哪个页面跳转过来的
                "Referer": "https://www.lagou.com/jobs/list_python?",
                #这个字段通常作为反爬处理
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36",
                "X-Anit-Forge-Code": "0",
                "X-Anit-Forge-Token": "None",
                "X-Requested-With": "XMLHttpRequest"
                }

        self.headers_detail = {
                "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
                "Accept-Encoding": "gzip, deflate, br",
                "Accept-Language": "zh-CN,zh;q=0.9",
                "Cache-Control": "max-age=0",
                "Connection": "keep-alive",
                "Cookie": "_ga=GA1.2.754895021.1542433960; _gid=GA1.2.1536395690.1542433960; user_trace_token=20181117135239-fde4320a-ea2c-11e8-892b-5254005c3644; LGUID=20181117135239-fde435ce-ea2c-11e8-892b-5254005c3644; sensorsdata2015jssdkcross=%7B%22distinct_id%22%3A%22167203edb74166-00ff6f92a0c4db-4313362-2073600-167203edb75bf9%22%2C%22%24device_id%22%3A%22167203edb74166-00ff6f92a0c4db-4313362-2073600-167203edb75bf9%22%2C%22props%22%3A%7B%22%24latest_traffic_source_type%22%3A%22%E7%9B%B4%E6%8E%A5%E6%B5%81%E9%87%8F%22%2C%22%24latest_referrer%22%3A%22%22%2C%22%24latest_referrer_host%22%3A%22%22%2C%22%24latest_search_keyword%22%3A%22%E6%9C%AA%E5%8F%96%E5%88%B0%E5%80%BC_%E7%9B%B4%E6%8E%A5%E6%89%93%E5%BC%80%22%7D%7D; JSESSIONID=ABAAABAAAGFABEF87D62F9362CF97806C7A7C8A0E93003A; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1542433960,1542501516; index_location_city=%E6%B7%B1%E5%9C%B3; SEARCH_ID=ae6d083fe08742e9ab51340d0a785d3d; TG-TRACK-CODE=search_code; _gat=1; LGSID=20181118091838-e086c2a2-eacf-11e8-a52a-525400f775ce; PRE_UTM=; PRE_HOST=; PRE_SITE=https%3A%2F%2Fwww.lagou.com%2Fjobs%2Flist_python%3FlabelWords%3D%26fromSearch%3Dtrue%26suginput%3D; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2Fjobs%2F5309421.html; LGRID=20181118091838-e086c42b-eacf-11e8-a52a-525400f775ce; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1542503920",
                "Host": "www.lagou.com",
                "Referer": "https://www.lagou.com/jobs/list_python?labelWords=&fromSearch=true&suginput=",
                "Upgrade-Insecure-Requests": "1",
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36"
                }

        self.base_url = "https://www.lagou.com/jobs/"
        self.post_url = self.base_url + "positionAjax.json?"
        
        self.params = {
                "needAddtionalResult": "false",
                "city": raw_input("请输入需要抓取的城市: ")
                }
        self.page = 1
        self.form_data = {
                "first": "false",
                "pn": self.page,
                "kd": raw_input("请输出需要爬取的职位: ")
                }
        self.item_list = []

        pass

    def send_request(self, url, params={}, form_data={}):
        """发送请求,返回响应"""
        print("[INFO]正在发送请求 {}".format(url))
        if bool(params and form_data):
            response = requests.post(url, headers=self.headers, params=params, data=form_data)
        else:
            response = requests.get(url, headers=self.headers_detail)
        return response

    def parse_response(self, response):
        """解析响应,获取详情页链接"""
        json_str = response.json()
        position_id_list = jsonpath(json_str, "$..positionId")
        #print(len(position_id_list))
        return position_id_list
        
    def parse_detail(self, response):
        """解析详情页,获取详情页公司信息/招聘职位"""
        html = response.content
        
        #这里应该解析html

        return html

    def save_data(self, html, filename):
        with open(str(filename) + ".html", "w") as f:
            f.write(html)

    def main(self):
        response = self.send_request(self.post_url, self.params, self.form_data)
        position_id_list = self.parse_response(response)
        for position_id in position_id_list:
            detail_url = self.base_url + str(position_id) + ".html"
            response = self.send_request(detail_url)
            html = self.parse_detail(response)
            
            #这里分析html,获取详情页中公司信息/招聘信息等,挺麻烦的,不搞了,直接保存吧,若要完成,就采用xpath或者BeautifulSoup获取即可

            self.save_data(html, position_id)
            


if __name__ == "__main__":
    lagoudepth = LagouDepthSpider()
    lagoudepth.main()


