from urllib import parse
import re
import json
import mouse
import time

import scrapy
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys

class CnblogsSpider(scrapy.Spider):
    name = 'zhihu'
    allowed_domains = ['www.zhihu.com']
    start_urls = ['https://www.zhihu.com/']

    def parse(self, response):
        pass

    def start_requests(self):
        # chrome_option = Options()
        # chrome_option.add_argument('--disable-extensions')
        # chrome_option.add_experimental_option('debuggerAddress', '127.0.0.1:9222')
        browser = webdriver.Chrome(executable_path='C:/Program Files (x86)/Google/Chrome/Application/chromedriver.exe')
                                   # chrome_options=chrome_option)
        '''
        browser.get('https://www.zhihu.com/signin')
        browser.find_element_by_css_selector('.SignFlow-accountInput.Input-wrapper').send_keys(Keys.CONTROL + 'a')
        browser.find_element_by_css_selector('.SignFlow-accountInput.Input-wrapper').send_keys('18669195712')
        browser.find_element_by_css_selector('.SignFlow-password input').send_keys(Keys.CONTROL + 'a')
        browser.find_element_by_css_selector('.SignFlow-password input').send_keys('123456')
        # browser.find_element_by_css_selector('.Button.SignFlow-submitButton').click()
        time.sleep(3)
        mouse.move(778, 519)
        time.sleep(2)
        mouse.click()
        time.sleep(60)
        '''
        browser.get('https://www.zhihu.com/')
        cookies = browser.get_cookies()
        import pickle
        pickle.dump(cookies, open('D:/CnblogsSpider/CnblogsSpider/cookies/zhihu.cookie', 'wb'))
        cookie_dict = {}
        for cookie in cookies:
            cookie_dict[cookie['name']] = cookie['value']
        # return [scrapy.Request(url=self.start_urls[0], dont_filter=True, cookies=cookie_dict)]
        yield scrapy.Request(url=self.start_urls[0], dont_filter=True, cookies=cookie_dict)


