'''
爱站平台
根据网站获取网站seo基础信息
Q群：170555357 
'''
from multiprocessing import Pool
import requests
from bs4 import BeautifulSoup
import re,json,redis
from pskpackage.db import *
from pskpackage.ippool import *


pool = redis.ConnectionPool(host='127.0.0.1', port=6379,  db=1)

def seo(id,url,redis_val):
    global pool
    # 链接redis
    r = redis.Redis(connection_pool=pool)
    json_data = []
    seo = {}
    try:
        data = requests.get("https://www.aizhan.com/cha/"+str(url)+"",timeout=10)
    except:
        r.lpush('info', redis_val)  # 失败数据写入redis
        return False

    soup = BeautifulSoup(data.text, "lxml")
    # 公共key值
    rn = re.findall('rn = (.+),cc', soup.text)
    cc = re.findall('cc = "(.+)",token', soup.text)


    # 百度PC和百度移动权重
    rank = requests.get("http://rest.aizhan.com/baidurank/infos?domain=" + str(url) + "&rn=" + rn[0] + "&cc=" + cc[0] + "", timeout=10)
    if rank.status_code==200:
        rank_dict = json.loads(rank.text)
        seo['rank']=rank_dict
    else:
        r.lpush('info', redis_val)  # 失败数据写入redis
        return False


     #备案
    ipc = requests.get("http://rest.aizhan.com/icp?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if ipc.status_code == 200:
        ipc_dict = json.loads(ipc.text.encode('utf-8').decode('unicode_escape'))
        seo['ipc'] = ipc_dict


    # 域名信息
    whois = requests.get("http://rest.aizhan.com/whois?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if whois.status_code == 200:
        whois_dict = json.loads(whois.text.encode('utf-8').decode('unicode_escape'))
        seo['whois'] = whois_dict


    # 打开速度
    speed = requests.get("http://rest.aizhan.com/speed?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if speed.status_code == 200:
        speed_dict = json.loads(speed.text.encode('utf-8').decode('unicode_escape'))
        seo['speed'] = speed_dict

    # dns
    dns = requests.get("http://rest.aizhan.com/dns?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if dns.status_code == 200:
        dns_dict = json.loads(dns.text.encode('utf-8').decode('unicode_escape'))
        seo['dns'] = dns_dict

    # 收录
    shoulu = requests.get("http://rest.aizhan.com/shoulu/index?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if shoulu.status_code == 200:
        shoulu_dict = json.loads(shoulu.text.encode('utf-8').decode('unicode_escape'))
        seo['shoulu'] = shoulu_dict

    # 反链
    fan_link = requests.get("http://rest.aizhan.com/shoulu/link?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if fan_link.status_code == 200:
        fan_link_dict = json.loads(fan_link.text)
        seo['fan_link'] = fan_link_dict

    # 24小时、一周、一个月
    time_shoulu = requests.get("http://rest.aizhan.com/shoulu/index-change?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if time_shoulu.status_code == 200:
        time_shoulu_dict = json.loads(time_shoulu.text)
        seo['time_shoulu'] = time_shoulu_dict


    # 基本信息
    webpage = requests.get("http://rest.aizhan.com/webpage?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if webpage.status_code == 200:
        webpage_dict = json.loads(webpage.text,encoding="utf-8")
        webpage_dicts={}
        webpage_dicts['html']=webpage_dict['html']
        webpage_dicts['keywords'] = webpage_dict['keywords']
        seo['webpage'] = webpage_dicts
    else:
        r.lpush('info', redis_val)  # 失败数据写入redis
        return False


    # 排名
    words = requests.get("http://rest.aizhan.com/baidurank/words?domain="+str(url)+"&rn=" + rn[0] + "&cc=" + cc[0] + "",timeout=10)
    if words.status_code == 200:
        words_dict = json.loads(words.text.encode('utf-8').decode('unicode_escape'))
        seo['words'] = words_dict
    else:
        r.lpush('info', redis_val)  # 失败数据写入redis
        return False

    # 百度PC端90天排名
    pc = requests.get("http://rest.aizhan.com/baidurank/trend?domain=" + str(url) + "&platform=pc&day=90&rn=" + rn[0] + "&cc=" + cc[0] + "", timeout=10)
    if pc.status_code == 200:
        pc_dict = json.loads(pc.text)
        seo['pc'] = pc_dict
    else:
        r.lpush('info', redis_val)  # 失败数据写入redis
        return False


    # 百度移动端90天排名
    mobile = requests.get("http://rest.aizhan.com/baidurank/trend?domain=" + str(url) + "&platform=mobile&day=90&rn=" + rn[0] + "&cc=" +cc[0] + "", timeout=10)
    if mobile.status_code == 200:
        mobile_dict = json.loads(mobile.text)
        seo['mobile'] = mobile_dict
    else:
        r.lpush('info', redis_val)  # 失败数据写入redis

    json_data.append(seo)
    json_data = json.dumps(json_data, ensure_ascii=False)
    update_wapinfo_detail(id,json_data)
    print("更新成功")


if __name__=="__main__":
   
    r = redis.Redis(connection_pool=pool)
    p = Pool(2)  # 4核cpu
    while True:
        try:
            redis_val = r.rpop("info")
            if redis_val:
                redis_val = redis_val.decode()
                task = redis_val.split('^')
                # 如果搜索任务没有搜索标识，直接返回0
                p.apply_async(seo, args=(int(task[0]),task[1],redis_val))
        except:
            pass
    p.close()  # 关闭进程池,不在接收新的任务
    p.join()  # 等待子进程全部运行完成，执行后续操作
