import requests
import hashlib
import base64
from bs4 import BeautifulSoup
from multiprocessing.dummy import Pool


class http_request:
    def __init__(self, port="9000"):
        self.url = "http://localhost:" + port

    def getwebbody(self, domain):
        #print(self.url)
        # base_domain = base64.b64encode(domain)
        # md5_domain = hashlib.md5(base_domain).hexdigest()
        # payload = {domain: md5_domain}
        payload = {domain:domain}
        try:
            response = requests.post(self.url, data=payload, timeout=30).content
            return response
        except BaseException as e:
            print(e)
        return


if __name__ == "__main__":
    port = "9123"
    cur = http_request(port)
    domain_list = ["http://thief.one"] * 10


    def test(domain):
        return cur.getwebbody(domain)

    html_data=test("https://www.ele.me/shop/160998694/rate")
    #html_data=test("http://cg.zhangsiming.com/info.html")
    print(html_data,type(html_data))
    exit()
    html_obj = BeautifulSoup(html_data, 'html.parser')
    html_list = html_obj.find_all('span', attrs={"class": "shoprate-itemratinglist-name"})
    #html_list = html_obj.find_all('td')
    for var in html_list:
        print(var)
    exit()
    # pool = Pool(processes=10)
    # for domain in domain_list:  # 并发下发任务
    #     pool.apply_async(test, args=(domain,))  # 维持执行的进程总数为10，当一个进程执行完后添加新进程.
    # pool.close()
    # pool.join()