# codeing= utf-8
# %%
# from logformat import logger
import requests
from requests.cookies import RequestsCookieJar
import time
import random
import re

# %%


def _parse_url(
    url, headers, method="GET", timeout=10, data=None, proxies={}, encodeing="GBK"
):
    # print("start parse".center(40, "="))
    sess = requests.session()
    cookies = {}
    if method == "POST":
        response = sess.post(
            url,
            headers=headers,
            cookies=cookies,
            timeout=timeout,
            data=data,
            proxies=proxies,
        )
    else:
        response = sess.get(
            url, headers=headers, cookies=cookies, timeout=timeout, proxies=proxies
        )

    cookies = response.cookies
    get_cookie(cookies)
    print(
        "<< STATUS CODE : {} >>".format(response.status_code).center(60, " "),
        flush=True,
    )
    if response.status_code == 200:
        return response.content.decode(encoding=encodeing)
    return None


# %%
def parse_url(
    url, headers, method="GET", timeout=5, data=None, proxies={}, encodeing="GBK"
):
    """
    请求自动重试两次
    url: 请求地址
    headers: {"user-agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:71.0) Gecko/20100101 Firefox/71.0"} 默认
    method: 默认 get
    data:默认空
    timeout: 默认5
    proxies: 代理(请使用高匿) 默认{}
    """

    try:
        html_str = _parse_url(url, headers, method, timeout, data, proxies, encodeing)
    except:
        html_str = None
    return html_str


# %%


def get_cookie(cookie_jar):
    # print(cookie_jar)
    global co
    co = RequestsCookieJar()
    # print(co)
    for i in cookie_jar:
        # cookie_jar.
        a = re.findall("<Cookie(.+)/>", str(i))
        cookie_list_t1 = a[0].split(" for ")
        cookie_list = cookie_list_t1[0].split("=")
        cookie_list.append(cookie_list_t1[1])
        # print(cookie_list)
        co.set(cookie_list[0], cookie_list[1], domain=cookie_list[2])
        # cookie_jar.update()
    # print(co)


# %%


def sleeper(mini, maxi, gaps):
    """
    最小睡眠时间， 最大睡眠时间，间距
    t = random.randint(mini, maxi)/gaps
    """
    mini = mini
    maxi = mini * gaps
    t = (random.randint(mini, maxi)) / gaps
    print("sleep {}s".format(t).center(60, " "))
    time.sleep(t)
    return t


# %%
if __name__ == "__main__":

    proxy = {}
    # proxy = {"https": "http://115.231.5.230:44524"}
    header = {
        "user-agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:71.0) Gecko/20100101 Firefox/71.0"
    }
    # print(proxy)

    url = "http://quotes.money.163.com/service/zcfzb_601398.html"
    # url = 'http://123.123.123.html'

    a = ""

    for i in range(5):
        if a == "":
            a = _parse_url(url, headers=header, encodeing="GBK", timeout=2)
        else:
            # print(cookie)
            print(a)
            with open("1zcfzb_600009.csv", "w") as f:
                f.write(a)
            time.sleep(5)


# %%
