# -*- coding: utf-8 -*-
import scrapy

"""
直接使用cookie进行登录
"""
class Login1Spider(scrapy.Spider):
    name = 'login1'
    allowed_domains = ['www.renren.com']
    start_urls = ['http://www.renren.com/']

    cookies = {
        "anonymid": "jm4xl419ikasba",
        "_r01_": "1",
        "ln_uact": "13179143428",
        "ln_hurl": "http://head.xiaonei.com/photos/0/0/men_main.gif",
        "depovince": "BJ",
        "jebe_key": "924c7db1-9dce-4059-9362-a2ac816f2e25%7Cd034b05c7af3c5f5e3f972f08c475b85%7C1537106699402%7C1%7C1537772732392",
        "_de": "A134E0C6215C8CE132540B3EF7654E2A",
        "jebecookies": "df0e1ed5-e048-4f25-8e1a-0db361050c85|||||",
        "JSESSIONID": "abcABrzke5qVL44ozFmyw",
        "ick_login": "67c3084a-b27a-479a-9c5e-7034f8e17634",
        "p": "43cc9508044802a2f98e3f40fb7e005f6",
        "first_login_flag": "1",
        "t": "9373bc492e40e2d9f95020a28837d5776",
        "societyguester": "9373bc492e40e2d9f95020a28837d5776",
        "id": "968050096",
        "xnsid": "f3794fe8",
        "loginfrom": "syshome"
    }

    def start_requests(self):
        for url in self.start_urls:
            yield scrapy.FormRequest(url, cookies=self.cookies, callback=self.parse_page)

    def parse_page(self, response):
        with open("login1.html", "w", encoding='utf8') as f:
            f.write(response.body.decode("utf-8"))
