import scrapy
import time


class GitSpider(scrapy.Spider):
    name = 'git'
    allowed_domains = ['github.com']
    start_urls = ['http://github.com/login']

    def parse(self, response):

        authenticity_token = response.xpath("//input[@name='authenticity_token']/@value").extract_first()
        timestamp = int(time.time()*1000)
        timestamp_secret = response.xpath("//input[@name='timestamp_secret']/@value").extract_first()
        required_value = response.xpath("//input[@type='text']/@name").extract()[-1]
        post_data = {
            "commit": "Sign in",
            "authenticity_token": authenticity_token,
            "login": "1354036246@qq.com",
            "password": "xyj379253",
            "trusted_device": "",
            "webauthn-support": "supported",
            "webauthn-iuvpaa-support": "unsupported",
            "return_to": "https://github.com/login",
            "allow_signup": "",
            "client_id": "",
            "integration": "",
            required_value: "",
            "timestamp": str(timestamp),
            "timestamp_secret": timestamp_secret
        }
        print(post_data)
        yield scrapy.FormRequest(
            url="https://github.com/session",
            formdata=post_data,
            callback=self.login,
        )
    def login(self, response):
        yield scrapy.Request(
            url='https://github.com/xiayujiang',
            callback=self.check_login
        )

    def check_login(self, response):
        name = response.xpath("//h1/span[last()]/text()").extract_first().strip()
        print(name)
        # with open("git_html", 'w') as f:
        #     f.write(response.text)