import os
import requests
from UA_info import UA_list
import random
from bs4 import BeautifulSoup
from bs4.element import SoupStrainer
import time


class HostsSpider:
    def __init__(self):
        self.url = 'https://ipaddress.com/website/{}'
        self.address = None

    def get(self):
        headers = {'User-Agent': random.choice(UA_list)}
        response = requests.get(url=self.url.format(self.address), headers=headers)
        html = response.text
        self.parse(html)

    def parse(self, html):
        only_ul_tag = SoupStrainer('ul', class_='comma-separated')
        BS = BeautifulSoup(html, 'lxml', parse_only=only_ul_tag)
        content = BS.find('li').get_text()
        self.save(content)

    def save(self, host):
        if len(host) != 15:
            host += ' '*(15-len(host))
        with open('hosts', 'a', newline='', encoding='utf-8') as file:
            file.write(host)
            file.write(' ')
            file.write(self.address)
            file.write('\n')

    def run(self):
        with open('Github Address.txt', 'r') as file:
            for address in file.readlines():
                self.address = address.replace('\n', '')
                print(f'正在进行{self.address}...')
                self.get()


if __name__ == '__main__':
    start = time.time()
    if os.path.exists("hosts"):
        os.remove("hosts")
    spider = HostsSpider()
    spider.run()
    end = time.time()
    print(f'所用时间：{end - start}')
