# coding:utf-8
import urllib
import zlib
from urllib import request, parse
from http import cookiejar
from urllib.request import urlopen

from bs4 import BeautifulSoup
# 设置登录url
login_url = "http://58.215.207.22:8078/Authentication/Login.aspx?ReturnUrl=%2fRealtime%2fWastewater.aspx"
# 创建登录类
class Login(object):
    #初始化
    def __init__(self):
        self.username = ''
        self.password = ''
        # 验证码
        self.rode = ''
        #设置cookie
        self.cj = cookiejar.CookieJar()
        self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj))
        request.install_opener(self.opener)

    def setLoginInfo(self, username, passwoed ,rode):
        """设置登录用户信息"""
        self.username = username
        self.password = password
        self.rode = rode

    def login(self):
        """"模拟登录"""
        login_data = {
            '__VIEWSTATE': '/wEPDwUJNjAyMTA3MzM4ZBgBBR5fX0NvbnRyb2xzUmVxdWlyZVBvc3RCYWNrS2V5X18WAQUKUmVtZW1iZXJNZUSAugu4z9Eo/fE44JvrW1zOsZXHo8IM2MTFR9I2PciI',
            '__VIEWSTATEGENERATOR': '2D4BD54E',
            '__EVENTVALIDATION': '/wEdAAUz8avRbmX2ut9ndYHcgIpPR1LBKX1P1xh290RQyTesRVwK8/1gnn25OldlRNyIedknHXI0rQ+dGiv2a31rP7kF3Kmo+ouB2mmE3Mg9Mibsbts5YMf1/xdOfJe7c2xu/jRLgHgHWraCh882WnCPkAMl',

            'UserName': 'zhoubei',
            'Password': '123456'
        }
        # headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2859.0 Safari/537.36'}

        headers = {
            'Accept': b'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
            'Accept-Encoding': 'gzip, deflate, lzma',
            'Accept-Language': 'zh-CN,zh;q=0.8',
            'Cache-Control': 'max-age=0',
            'Connection': 'keep-alive',
            'Content-Length': len(login_data.encode('GB2312')),
            'Content-Type': 'application/x-www-form-urlencoded',
            'Cookie': 'cookieText',
            'Host': '222.217.19.16:3512',
            'Origin': 'http://222.217.19.16:3512',
            'Referer': 'http://222.217.19.16:3512/Site/LzsfySite/Default.aspx',
            'Upgrade-Insecure-Requests': '1',
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36 OPR/38.0.2220.41}'
        }
        req = request.Request(login_url, data=parse.urlencode(login_data).encode('GB2312'), headers=headers)
        response = urlopen(req)
        loginResponseData = zlib.decompress(response.read(), 16 + zlib.MAX_WBITS).decode('GB2312')
        # 加cookie, 自动保存seissionid,做后续跳转
        self.operate = self.opener.open(req)
        # 通过BeautifulSoup获取网页内容
        soup = BeautifulSoup(response, 'lxml')
        # 获取网页的content
        data = soup.select('#menu')
        if data:
            print (f'模拟登录成功!{data}')
        print('模拟登录失败!')

   # def skip(self, skip_url):
   #     """
   #     模拟登录成功后,跳转网页
   #     ----------------------------
   #     传递跳转网页的url
   #    """
   #     headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2859.0 Safari/537.36'}
   #     req = urllib2.Request(skip_url, headers=headers)
   #     response = urllib2.urlopen(req)
   #     soup = BeautifulSoup(response, 'lxml')
   #     # 获取跳转后网页的title
   #     print soup.title.string

if __name__ == "__main__":
    userlogin = Login()
    username = '***********'
    password = '***********'
    rode = '***********'
    # 执行模拟登录方法
    userlogin.login()
    # 执行模拟登录成功后网页跳转方法
    # userlogin.skip(skip_url = '***************')