import socket
from urllib.parse import urlparse
import time

def get_html_block(url):
    client = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
    #设置非阻塞
    #client.setblocking(False)
    u = urlparse(url)
    client.connect((u.netloc,80))
    client.send(f"GET {u.path} HTTP/1.1\r\nHost:{u.hostname}\r\nConnection:close\r\n\r\n".encode("utf-8"))
    html = b""
    while True:
        data = client.recv(1024)
        if data:
            html += data
        else:
            break
    print(html.decode("utf-8"))

def get_html(url):
    client = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
    #设置非阻塞
    client.setblocking(False)
    u = urlparse(url)
    #由于非阻塞立刻返回，连接可能未建立,这个会抛一个异常
    try:
        client.connect((u.netloc,80))
    except BlockingIOError:
        pass

    while True:
        try:
            client.send(f"GET {u.path} HTTP/1.1\r\nHost:{u.hostname}\r\nConnection:close\r\n\r\n".encode("utf-8"))
            break;
        except OSError:
            pass

    html = b""
    while True:
        try:
            data = client.recv(1024)
        except BlockingIOError:
            continue
        if data:
            html += data
        else:
            break
    print(html.decode("utf-8").split("\r\n\r\n")[1])

from selectors import DefaultSelector,EVENT_READ,EVENT_WRITE

selector = DefaultSelector()
urls = []
gstop = False
class Fetcher:
    def sendable(self,key):
        print("sendable----------------------")
        selector.unregister(key.fd)
        self.client.send(f"GET {self.path} HTTP/1.1\r\nHost:{self.host}\r\nConnection:close\r\n\r\n".encode("utf-8"))
        #self.client.send("GET {} HTTP/1.1\r\nHost:{}\r\nConnection:close\r\n\r\n".format(self.path, self.host).encode("utf8"))
        selector.register(self.client.fileno(), EVENT_READ, self.readable)

    def readable(self,key):
        data = self.client.recv(1024)
        print(f"readable-----{len(data)}-----")
        if data:
            self.html += data
        else:
            selector.unregister(key.fd)
            print(self.html.decode("utf-8").split("\r\n\r\n")[1])
            self.client.close()
            global urls
            urls.remove(self.url)
            if not urls:
                global gstop
                gstop = True

    def rwable(self,key):
        print(key)

    def get_html(self,url):
        self.url = url
        self.html = b""

        u = urlparse(url)
        self.host = u.netloc
        self.path = u.path
        if self.path == "":
            self.path = "/"

        self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        # 设置非阻塞
        self.client.setblocking(False)
        try:
            self.client.connect((self.host,80))
        except BlockingIOError:
            pass
        selector.register(self.client.fileno(),EVENT_WRITE,data=self.sendable)
        #selector.register(self.client.fileno(),EVENT_READ,data=self.readable)

def loop():
    while not gstop:
    #while urls:
        ready = selector.select()
        for key,mark in ready:
            callback = key.data
            callback(key)
if __name__ == '__main__':
    start = time.time()
    for i in range(1):
        url = f"http://shop.projectsedu.com/goods/{i+1}"
        urls.append(url)
        fetcher = Fetcher()
        fetcher.get_html(url)
    loop()
    print(time.time() - start)