# -*- coding: utf-8 -*-
import scrapy
from utils.cookies_get import make_cookie
import os


class ZhihuSpider(scrapy.Spider):
    name = 'zhihu'
    allowed_domains = ['www.zhihu.com']

    # start_urls = ['http://www.zhihu.com/']

    def start_requests(self):
        cookie = make_cookie()
        headers = {
            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.119 Safari/537.36'
        }

        yield scrapy.Request(url="https://www.zhihu.com", headers=headers, cookies=cookie, callback=self.parse)
        pass

    def parse(self, response):
        html = response._body.decode()
        base_dir = os.path.dirname(os.path.abspath(__file__))
        with open(os.path.join(base_dir, 'save_file', 'zhihu_index.html'), 'w') as f:
            f.write(html)
        pass
