# -*- coding: utf-8 -*-
# @Time    : 2019/3/1 15:19
# @Author  : Macher
# @File    : IBM_judge.py
# @Software: PyCharm Community Edition

# coding=utf-8
#python 3.5
import os
import psutil
from selenium import webdriver
from bs4 import BeautifulSoup
from selenium.webdriver.support.wait import WebDriverWait
import time
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import pymysql
sql_update = "update experiment_date set ibm =%s,where ip = %s"

def sql(tup):
    db = pymysql.connect("219.216.65.41", "root", "123456", "mydata")
    cursor = db.cursor()
    cursor.execute(sql_update,tup)
    cursor.close()
    db.commit()
    db.close()

def getip():
    db = pymysql.connect("219.216.65.41", "root", "123456", "mydata")
    cursor = db.cursor()
    # cursor.execute(sql_insert,dict1)
    cursor.execute("select  ip from experiment_date where multirbl  ")
    # cur.execute("select ip,proxy from proxy_ip1_text")
    results = cursor.fetchall()
    new = []
    for row in results:
        new.append(row[0])
        # print new
    new = list(set(new))
    cursor.close()
    db.commit()
    db.close()
    # print(new)
    return new

def reConnect():
    print ("reconnecting.....")
    for p in psutil.process_iter():
        if p.name() == 'db93b1cb6e42db089b843413fb4bf3e4.exe':
            p.kill()
    time.sleep(5)
    os.startfile(r'C:\Users\mxf\AppData\Local\db93b1cb6e42db089b843413fb4bf3e4\db93b1cb6e42db089b843413fb4bf3e4.exe')
    print ("reconnect done")

def getpage(iplist):

    headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0',
               'Connection': 'close', }
    driver = webdriver.Chrome()
    # driver.set_page_load_timeout(15)
    times = 0
    while iplist:
        times +=1
        if times>= 30:
            times = 0
            # reConnect()
        else:
            ip = iplist.pop()
            print(len(iplist))
            while True:
                try:
                    url = 'https://exchange.xforce.ibmcloud.com/ip/'+str(ip)
                    driver.get(url)

                    WebDriverWait(driver, 20, 0.5).until(EC.title_is(driver.title))
                    # time.sleep(10)
                    # print(driver.page_source.encode('GBK', 'ignore').decode('GBk'))
                    judgepage(driver.page_source.encode('GBK', 'ignore').decode('GBk'),ip)
                    break
                    # print(driver.page_source.encode('GBK', 'ignore').decode('GBk'))
                except:
                    driver.quit()
                    driver = webdriver.Chrome()
                    # driver.set_page_load_timeout(15)
                    continue


def judgepage(pagesource,ip):
        dict1 = {'whois_update': '', 'whois_organization': '', 'whois_ontactEmail': '', 'whois_country': '',
                 'whois_registrarName': '', 'detail_categories': '', 'detail_hosted': '',
                 'detail_actions': '', 'detail_risks': '', 'detail_country_of_ip': '', 'detail_asn': '',
                 'dns':'', 'group': ''}
        soup = BeautifulSoup(pagesource, 'lxml')
        whois = soup.find(id = 'whois')
        try:
            dict1['whois_update'] = whois.find(attrs={'ng-if': 'model.updatedDate'}).find('div').text
            # print(type(dict1['whois_update']))
            # print(11111)
        except:print()
        try:
            dict1['whois_organization'] =  whois.find(attrs={'ng-if': 'model.contact && model.contact[0].organization'}).find('div').text
        except:print()
        try:
            dict1['whois_country'] = whois.find(attrs={'ng-if': 'model.contact && model.contact[0].country'}).find('div').text
        except:print()
        try:
            dict1['whois_registrarName'] = whois.find(attrs={'ng-if': 'model.contact && model.contact[0].name'}).find('div').text
        except:print()
        try:
            dict1['whois_ontactEmail'] = whois.find(attrs={'ng-if': 'model.contactEmail'}).find('div').text
        except:print()
        detailsline = soup.find(class_ = 'detailsline')

        try:
            dict1['detail_categories']  = detailsline.find(id = 'categories').find('ul').text

        except:print()
        try:
            dict1['detail_hosted'] = detailsline.find(id = 'hosted').find_all('a')[1].text
        except:print()
        try:
            dict1['detail_actions'] = detailsline.find(id='actions').find('td').text
        except:print()
        try:
            dict1['detail_risks'] = detailsline.find(id='risks').find('td').text
        except:print()
        try:
            dict1['detail_country_of_ip']= detailsline.find(id='country-of-ip').find('td').text
        except:print()
        try:
            dict1['detail_asn']= detailsline.find(id='asn').find('td').text
        except:print()
        dnsTable = soup.find(id = 'dnsTable')
        s2 = ''
        try:
            for tbody in dnsTable.find_all('tbody'):
                try:
                    s2 +='&&' + tbody.find_all('td')[1].find('a').text
                except:continue
            dict1['dns'] = s2
        except: print()
        # print(tbody[0].find_all('td')[1].find('a').text)
        # td = tr[1].find_all('td')
        # try:
                # dict1['dns_risk_low'] = tr[1].find(class_ = 'risk low')['title']
        # except:dict1['dns_risk_low'] = ''
        # try:
        #     s = ''
        #     for i in td[1].find_all(class_ = 'none'):
        #         s += i.text
        #     dict1['dns_category'] = s
        #         # print(dict1['dns_category'])
        # except : dict1['dns_category'] = ''
        # dict1['dns_reason']  = td[2].text
        # dict1['dns_location'] = td[3].text
        # dict1['dns_date'] = td[4].text
        group = soup.find(id = 'subnetsTable')
        s1 = ''
        try:
            for tr1 in group.find_all('tr')[1:]:
                td2 = tr1.find_all('td')[1].find('a').text
                s1 +='&&'+ td2
            dict1['group'] = s1
        except:print()
        dict1['check'] = 'done'
        # dict1['group'] = td2[1].find('a').text
         # print(td2.find(class_ = 'risk low'))
        # dict1['group_category'] = td2[2].text.replace('\n','')
        # dict1['group_location'] = td2[3].text
        print(dict1)
            #     tup = tuple({dict1['whois_update']}) + tuple({dict1['whois_organization']}) + tuple(
            # {dict1['whois_ontactEmail']}) + tuple({dict1['whois_country']}) + tuple(
            # {dict1['whois_registrarName']}) + tuple({dict1['detail_categories']}) + tuple(
            # {dict1['detail_hosted']}) + tuple({dict1['detail_actions']}) + tuple({dict1['detail_risks']}) + tuple(
            # {dict1['detail_country_of_ip']}) + tuple({dict1['detail_asn']}) + tuple({dict1['dns']}) + tuple(
            # {dict1['group']}) + tuple({ip})
        tup = tuple({dict1['detail_categories']})+ tuple({ip})

        print(tup)
        # sql_update = "update pot_data set whois_updatedDate = %s ,whois_organization = %s ,whois_ontactEmail = %s ,whois_country = %s ,whois_registrarName = %s ,detail_categories = %s ," \
        #              "detail_hosted = %s ,detail_actions = %s ,detail_risks = %s ,detail_country_of_ip = %s ,detail_asn = %s ,dns = %s ,subnet= %s where ip = %s"
        sql(tup)

# getpage('https://exchange.xforce.ibmcloud.com/ip/103.235.46.39')
if __name__ == '__main__':
    # getip()
    getpage(getip())
    # reConnect()
    # sql(('2008年9月4日', 'CNCGROUP-FJ-FUZHOU-MAN', 'jiangxw@wo.com.cn', 'China', 'Fuzhou city, fujian provincial network of CNCGROUP', '', '', '  找不到任何内容  ', '  不可疑的  ', '  中国 ', '   AS4837: CHINA169-BACKBONE CNCGROUP China169 Backbone, CN   ', '', '&&220.248.0.0/14&&220.250.0.0/17', '220.250.10.120'))