"""

@Author : Lee Yucheng
@Contact : 2925168463@qq.com
@Project : GlidSky_Rank_JS
@File : RankOfJs.py
@Software : Visual Studio Code
"""

#2020.02.14 ERROR

import requests
import re
import time
import math
import hashlib
import json
from config import proxy
from bs4 import BeautifulSoup

def get_time():
    round(time.time())

data = {}
url_login  = "http://glidedsky.com/login"
r = requests.get(url_login)

data['_token'] = re.findall(r'<meta name="csrf-token" content="(.*?)">',r.text)[0]
#用户名密码 隐去
data['email'] = '----------' 
data['password'] = '--------'

cookies = r.cookies.get_dict()
HEADER = {'Cookie':'Hm_lvt_020fbaad6104bcddd1db12d6b78812f6=1565850527;_ga=GA1.2.438446912.1565850527; _gid=GA1.2.1529928071.1565850527; _gat_gtag_UA_75859356_3=1; Hm_lpvt_020fbaad6104bcddd1db12d6b78812f6=%s; footprints=%s; XSRF-TOKEN=%s; glidedsky_session=%s'%(get_time(), cookies['footprints'], cookies['XSRF-TOKEN'], cookies['glidedsky_session'])}

s = requests.session()

res = s.post(url_login, headers=HEADER, data=data,timeout=5)

if "爬虫" in res.text:
    print("登陆成功")
else:
    print("登陆失败")

sum_num = 0
page = 1
while True:
    if page==2:
        break
    sha1 = hashlib.sha1()
    num_list=""
    t = math.floor(time.time())
    sha1.update(("Xr0Z-javascript-obfuscation-1%s"%t).encode("utf-8"))
    sign = sha1.hexdigest()
    data_url = "http://glidedsky.com/api/level/web/crawler-javascript-obfuscation-1/items?page={0}&t={1}&sign={2}".format(page,t,sign)
    print(data_url)
    respones = s.get(data_url,headers=HEADER,timeout=5)
    num_list = json.loads(respones.text)
    print(num_list)
    sum_num=sum_num+sum(num_list["items"])
    print(page)
    page = page+1
    
print(sum_num)
