#!/usr/bin/env python
# -*- coding:utf-8 -*-
# author:sirian
# datetime:2018/11/6 13:20
# software: PyCharm

'''
爬虫用用户名和密码登录页面
自动获取cookie
在无tokin验证的页面进行登录

'''
import urllib
import urllib2
import cookielib

url = "http://www.renren.com/PLogin.do"
proxy_pass = {"http" : "60.2.37.198:49565"}#代理地址
authproxy_handler = urllib2.ProxyHandler(proxy_pass)#代理处理器
ua = [
("Connection", "keep-alive"),
("User-Agent", "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36"),
("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"),
("Accept-Language", "zh-CN,zh;q=0.9")
]

# 通过CookieJar()类创建一个CookieJar()对象用来保存cookie
cookie = cookielib.CookieJar()

# 创建一个HTTPCooikeProcess用来处理cookie，参数就是创建的cookie
cookie_handler = urllib2.HTTPCookieProcessor(cookie)

# 创建一个自定义的opener
opener = urllib2.build_opener(cookie_handler, authproxy_handler)

# 利用addheaders,为自定义的opener 添加http头信息
opener.addheaders = ua

# 需要登录的账户名和密码
data = {"email":"15010093097", "password":"51969863ly"}

# 转码
data = urllib.urlencode(data)

#获取cookie，并获取登录人人网页面
request = urllib2.Request(url, data=data)
response = opener.open(request)

#用获取的cookie，获取其他页面

response_url = opener.open("http://www.renren.com/410043129/profile")

print(response_url.read())