import random
from loguru import logger
import consul
import requests
import json


class ProxyClass:
    def getServiceIp(self, env_dict):
        con = consul.Consul(env_dict["consul_host"], env_dict["consul_port"])
        data = con.health.service(env_dict["data_dc"], dc=env_dict["fiona_dc"])
        if len(data) < 2:
            return None
        serviceList = data[1]
        ipList = []
        for item in serviceList:
            ip = self.getNodeIp(item)
            if ip is not None:
                ipList.append(ip)
        if len(ipList) == 0:
            return None
        index = random.randint(0, len(ipList) - 1)
        return ipList[index]

    def getNodeIp(self, data):
        checks = data.get("Checks")
        isPassing = True
        for check in checks:
            if check.get("Status") == 'passing':
                continue
            isPassing = False
            break
        if not isPassing:
            return None
        ip = data.get("Service").get("Address")
        if ip is None or len(ip) == 0:
            ip = data.get("Node").get("Address")
        port = data.get("Service").get("Port")
        return ip + ":" + str(port)

    def get_proxyman_ip(self, env_dict):
        return self.getServiceIp(env_dict)

    def get_proxies(self, env="dev", proxies_type='http', timeout=20):
        dev_dict = {
            "consul_host": "dc1.u51-inc.com",
            "consul_port": 8500,
            "fiona_dc": "stable",
            "fiona_profile": "dev",
            "data_dc": "proxyman",
            "Authorization": "service 4ZHZDMGjMD5RFKyAeDiXZpPtIXS8y7TXPPvWCmN9YNs9CxZfbRQ0ruj8sH2LKM",
        }
        prod_dict = {
            "consul_host": "127.0.0.1",
            "consul_port": 8500,
            "fiona_dc": "xydc",
            "fiona_profile": "product",
            "data_dc": "proxyman",
            "Authorization": "service 4ZHZDMGjMD5RFKyAeDiXZpPtIXS8y7TXPPvWCmN9YNs9CxZfbRQ0ruj8sH2LKM",
        }
        if env == "dev":
            env_dict = dev_dict
        else:
            env_dict = prod_dict

        proxyman_ip = self.get_proxyman_ip(env_dict)

        level = 9
        holdSeconds = timeout
        url = f"http://{proxyman_ip}/proxyman/api/v5/proxies?level={level}&holdSeconds={holdSeconds}"
        # 正式服/172.19.135.22:8080
        headers = {
            "Content-Type": "application/json",
            "Authorization": "service 4ZHZDMGjMD5RFKyAeDiXZpPtIXS8y7TXPPvWCmN9YNs9CxZfbRQ0ruj8sH2LKM",
            "X-Consumer-ID": "u51.com",
        }
        try:
            result = requests.get(url, headers=headers, verify=False, timeout=6).json()
            ipaddr = result["ipaddr"]
            port = result["port"]
            proxies = {
                # "http": f"http://{ipaddr}:{port}",
                "https": f"https://{ipaddr}:{port}"
            }
        except Exception as e:
            print("获取代理出现异常")
            proxies = {}
        return proxies


pc = ProxyClass()


def get_list():
    headers = {
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Cache-Control": "no-cache",
        "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1",
        "Content-Type": "application/json",
        "Accept": "*/*",
        "Origin": "https://m.dianping.com",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-Mode": "cors",
        "Sec-Fetch-Dest": "empty",
        "Referer": "https://m.dianping.com/shanghai/ch10/d1",
        "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8"
    }
    cookies = {"_lxsdk_s": "17e28eb9f70-99c-3a2-156%7C%7C60"}
    url = "https://m.dianping.com/isoapi/module"
    data = {
        "pageEnName":
        "shopList",
        "moduleInfoList": [{
            "moduleName": "mapiSearch",
            "query": {
                "search": {
                    "start": 60,
                    "categoryId": "10",
                    "parentCategoryId": 10,
                    "locateCityid": 0,
                    "limit": 20,
                    "sortId": "0",
                    "cityId": 1,
                    "range": "-1",
                    "maptype": 0,
                    "keyword": ""
                }
            }
        }]
    }
    data = json.dumps(data)
    proxies = pc.get_proxies("dev")
    response = requests.post(url, headers=headers, cookies=cookies, data=data)  # ,proxies=proxies

    print(response.text)


def get_list_no_cookie_token(page ,proxies):

    headers = {
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Cache-Control": "no-cache",
        "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1",
        "Content-Type": "application/json",
        "Accept": "*/*",
        "Origin": "https://m.dianping.com",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-Mode": "cors",
        "Sec-Fetch-Dest": "empty",
        "Referer": "https://m.dianping.com/shanghai/ch10/d1",
        "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8"
    }
    cookies = {
        "msource": "default",
        "default_ab": "shopList%3AA%3A5",
        "cityid": "1",
        "logan_custom_report": ""
    }
    url = "https://m.dianping.com/isoapi/module"
    data = {
        "pageEnName":
        "shopList",
        "moduleInfoList": [{
            "moduleName": "mapiSearch",
            "query": {
                "search": {
                    "start": page * 20,
                    "categoryId": "10",
                    "parentCategoryId": 10,
                    "locateCityid": 0,
                    "limit": 20,
                    "sortId": "0",
                    "cityId": 1,
                    "range": "-1",
                    "maptype": 0,
                    "keyword": ""
                }
            }
        }]
    }
    data = json.dumps(data)
    response = requests.post(url, headers=headers, cookies=cookies, data=data, proxies=proxies,timeout=10) 
    try:
        res = response.json()
        logger.info(res)
        logger.info(f"{proxies} > seccuss")
    except:
        logger.error(f"{proxies} > error")
        logger.error('error')


def get_page(proxies):
    headers = {
        "Connection": "keep-alive",
        "Pragma": "no-cache",
        "Cache-Control": "no-cache",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1",
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
        "Sec-Fetch-Site": "same-origin",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-User": "?1",
        "Sec-Fetch-Dest": "document",
        "Referer": "https://m.dianping.com/",
        "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8"
    }
    cookies = {
        # "_hc.v": "4610935c-c220-7685-1d38-b0883fa82764.1641365381",
        "msource": "default",
        "default_ab": "shopList%3AA%3A5",
        "cityid": "1",
        # "logan_session_token": "88oaae00l5aus5wlcw1c",
        "logan_custom_report": ""
    }
    url = "https://m.dianping.com/shanghai/ch10/d1"
    response = requests.get(url, headers=headers, proxies=proxies,timeout=10)


# for i in range(0, 50):
#     proxies = pc.get_proxies("prod")
#     try:
#         get_page(proxies)
#         get_list_no_cookie_token(i, proxies)
#     except:
#         logger.error(f"{proxies} > req_error")
proxies = pc.get_proxies("dev")
get_list_no_cookie_token(1,proxies)