import scrapy


class Github2Spider(scrapy.Spider):
    name = 'github2'
    allowed_domains = ['github.com']
    start_urls = ['https://github.com/login']

    def parse(self, response):
        """
        scrapy自动提交表单
        scrapy.Formrequest.from_response
        能够自动的从响应中寻找form表单，然后把formdata中的数据提交到action对应的url地址中
        :param response:
        :return:
        """
        yield scrapy.FormRequest.from_response(
            # 1. response响应对象,自动解析
            response,
            # 2. 根据xpath定位到form表单
            formxpath="//div[@id='login']/form",
            # 3. 准备好form表单需要的数据
            formdata={"login": "ZuoAndroid", "password": "lyp82nlf@.."},
            callback=self.parse_login
        )

    def parse_login(self, response):
        with open("file_html/github2.html", "w", encoding="utf-8") as f:
            f.write(response.body.decode())
