# -*- coding: utf-8 -*-
# @Time    : 2019/2/26 12:32
# @Author  : Macher
# @File    : badips.py
# @Software: PyCharm Community Edition
#收集badIPs发布信息

import requests
import re
from bs4 import BeautifulSoup
import mysql.connector
import time
import threading
config = {'host': '219.216.65.41',
          'user': 'root',
          'password': '123456',
          'port': 3306,
          'database': 'mydata',
          'charset': 'utf8'}

sql_insert = "insert into badips(ip,category,score,country,reporttime,reportorigin)values( %(ip)s,%(category)s,%(score)s,%(country)s,%(reporttime)s,%(reportorigin)s)"

def sql():
    l = []
    conn = mysql.connector.connect(**config)
    cursor = conn.cursor()
    cursor.execute("SELECT ip FROM badips ")
    results = cursor.fetchall()
    new = []
    for row in results:
        row = "".join(row)
        new.append(row)
    new = list(set(new))
    cursor.close()
    conn.commit()
    conn.close()
    return new

def insertMysql(list):
    conn = mysql.connector.connect(**config)
    cursor = conn.cursor()
    cursor.execute(sql_insert, list)
    conn.commit()
    cursor.close()
    conn.close()

def getpage(url):
    print url
    headers = {'Accept': 'application/json, text/javascript, */*; q=0.01',
               'Accept-Encoding': 'gzip, deflate, br',
               'Accept-Language': 'zh-CN,zh;q=0.8',
               'Connection': 'keep-alive',
               'Content-Type': 'application/json; charset=UTF-8',
               'Cookie': 'MxVisitorUID=d797041e-bdc6-462c-a41c-4023d4d8fc74; _vis_opt_exp_312_exclude=1; _vwo_uuid=974491D5B9066661CAAE036DBFB5984E; _vis_opt_exp_335_combi=2; _vis_opt_s=2%7C; _ceg.s=ov46sb; _ceg.u=ov46sb; _gat=1; ismobile=false; _cio=aa8504a9-17c9-86fd-7dfb-eb0cbc5df5a4; _ga=GA1.2.636406067.1501461605; _gid=GA1.2.21041308.1507970087; ki_t=1501461610219%3B1507970091131%3B1507970111250%3B12%3B81; ki_r=; _vwo_uuid_v2=974491D5B9066661CAAE036DBFB5984E|782ec007ad63acefe56b46a7f7b32ad7; _mx_u={"UserId":"00000000-0000-0000-0000-000000000000","UserName":null,"FirstName":null,"IsAdmin":false,"IsPaidUser":false,"IsLoggedIn":false,"MxVisitorUid":"d797041e-bdc6-462c-a41c-4023d4d8fc74","AppUID":"2eab821e-fdad-4545-9730-d2ce7e6ccb3e"}; _gaexp=GAX1.2.jQWr1JWLTCOH_ZPj74b2mA.17514.1!3jEbJbKJQM6-_lqmJEDxKw.17517.2!fUhZDJh2SvymWf4W_nFisQ.17544.1; _mx_vtc=AB-177=Variation&AB-175=Variation&AB-197B=Variation&AB-166=Variation&AB-205=Control&VWO-Blocked=true&AB-226=Control&AB-240=Control&AB-216=Control&AB-229=variation&AB-230=Control',
               'Host': 'mxtoolbox.com',
               'MasterTempAuthorization': '2eab821e-fdad-4545-9730-d2ce7e6ccb3e',
               'Origin': 'https://mxtoolbox.com',
               'Referer': 'https://mxtoolbox.com/SuperTool.aspx?action=blacklist%3a98.126.12.45&run=toolpage',
               'X-Requested-With': 'XMLHttpRequest',
               'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'}
    proxies = {
        "http": None,
        "https": None,
    }
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36'}
    res = requests.get(url, headers=headers, proxies=proxies,timeout = 15 )
    # print(res.text.encode('GBK', 'ignore').decode('GBk'))
    return res.text

def getIPurl(pagesource):

    ipList = []
    soup = BeautifulSoup(pagesource, 'lxml')
    s = soup.find('div', id='content')
    tbody = s.find_all('a', class_='badips')
    for i in tbody:
        if re.match(r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$",
                    i['href'].split('/')[-1]):
            # if i['href'].split('/')[-1] not in l1:
            ipList.append(i['href'].replace('\n',''))
    return ipList



def detailJudge(pagesource):
    info = {'ip':'','category':'','score':'','country':'','reporttime':'','reportorigin':''}
    soup = BeautifulSoup(pagesource, 'lxml')
    info['ip'] = soup.find('div', class_='overview-info').find('b').text
    a = soup.find('div', class_='overview-info').find('a', class_='badips')
    info['category'] = a.text
    info['score'] = a.next_sibling.next_sibling.text
    info['country'] = soup.find('div', id='content').find('img')['alt']
    try:
        s = soup.find_all('tr')[1].find_all('td')
        info['reporttime'] = s[0].text
        info['reportorigin'] = s[1].text
    except:pass
    print info
    insertMysql(info)

def writeRecord(record):
    fp  = open("record.txt", 'a')
    fp.writelines(str(record)+'\n')
    fp.close()
    print "updata record"

def writeloge(record):
    fp  = open("page.txt", 'a')
    fp.writelines(str(record)+'\n')
    fp.close()
    print "updata page"

def writeurl(record):
    fp  = open("url.txt", 'a')
    fp.writelines(str(record)+'\n')
    fp.close()
    print "updata url"

def thread():
    threads = []
    for i in range(0,7):
        threads.append(threading.Thread(target=run, args=([i*2971])))
    for i in threads:
        i.start()
        time.sleep(2)
    for i in threads:
        i.join()
        time.sleep(2)

def run(*t):
    # l1 = sql()
    page = list(t)[0] + 798
    pageEnd = page + 2971
    # print page ,pageEnd
    # for pagenum in range(0, 20797):
    for pagenum in range(page,pageEnd):
        print 'page',pagenum
        url = 'https://www.badips.com/info/'+str(pagenum)
        try:
            source = getpage(url)
            iplist = getIPurl(source)
            while iplist:
                detail_url = iplist.pop()
                print 'last',len(iplist)
                try:
                    s = getpage(detail_url)
                    detailJudge(s)
                except:
                    writeRecord(detail_url)
                    continue
        except:
            writeurl(url)
            continue
        writeloge(pagenum)


        # source = getpage(url)
        # iplist = getIPurl(l1, source)
        # while iplist:
        #     detail_url = iplist.pop()
        #     print 'last', len(iplist)
        #
        #     s = getpage(detail_url)
        #     detailJudge(s)
        #
        #     writeRecord(detail_url)
        #     continue
        #
        # writeurl(url)
        # continue
        # writeloge(pagenum)
# run()
thread()
# writeRecord('1dfsdfe23')
# insertMysql({'category': u'qmail-smtp', 'reporttime': u'June 16 2017, 23:41', 'reportorigin': u'Germany', 'ip': u'201.240.222.80', 'score': u'0', 'country': 'Peru'})