import json
import time

import requests

from internal import init
from internal.parse import parseHeader, parseUrlsPage, parseSteamInfo, parseFriendship

# header
fp = open("../file/defaultHeader", "r", encoding="utf-8")
headers = parseHeader(fp.read())
fp.close()


# 搜索steam用户
def searchSteamUser(params):
    url = 'https://steamcommunity.com/search/SearchCommunityAjax?'
    url = url + params
    # fp.close()
    try:
        res = requests.get(url, headers=headers)
    except :
        print(f"搜索出错{params}")
        return [],0
    # print(res)
    # print(res.text)
    jsonData = json.loads(res.text)
    # jsonData 该数据含匹配结果 但是最钟页码由parseUrlsPage解析
    # print(jsonData)
    steamUrls, pageTotal = parseUrlsPage(jsonData["html"])
    return steamUrls, jsonData["search_result_count"]


# 获取steam信息
def getSteamInfo(steamUrls,page):
    steamUserInfoList = []
    for i,steamUrl in enumerate(steamUrls):
        # i = "https://steamcommunity.com/profiles/76561198373629632"
        # i = "https://steamcommunity.com/profiles/76561198870658498"
        print(f"第 {page} 页搜索进度 {i}/{len(steamUrls)} ")
        try:
            res = requests.get(steamUrl, headers=headers)
            # print(res)
            steamUserInfoList.append(parseSteamInfo(res.text, steamUrl))
            # print(parseSteamInfo(res.text))
        except:
            print(f"获取 {steamUrl} 信息出错")
        time.sleep(init.defaultReqDelay)
    return steamUserInfoList


# 分析重复的好友
def analyseFriendship(friendUrl):
    if len(friendUrl) == 0:
        return [], []
        # friendUrl = 'https://steamcommunity.com/profiles/76561199517424304/friends/'
    friendUrl = friendUrl[0]
    try:
        res = requests.get(friendUrl, headers=headers)
    except:
        print(f"获取 {friendUrl} 朋友信息出错")
        return [], []
    friendNames, friendUrls = parseFriendship(res.text)
    # friendNameUrlMap = zip(friendNames, friendUrls)
    # for i in friendNameUrlMap:
    #     print(i)
    friendNamesKeyPool = friendNames
    multiFriendPool = []
    # print(friendNamesKeyPool)
    for i in friendUrls:
        i = i + "/friends/"
        try:
            res = requests.get(i, headers=headers)
            friendNames, friendUrls = parseFriendship(res.text)
            # print(friendNames)
            for j in friendNames:
                if j in friendNamesKeyPool:
                    i = i.replace("/friends/", "")
                    # print(i)
                    # for h in friendNameUrlMap:
                    #     if i == h[1]:
                    #         print(friendNames)
                    multiFriendPool.append(f"{i} know {j}")
        except:
            print(f"获取 {i} 朋友信息出错")
        time.sleep(init.defaultReqDelay)
    # print(multiFriendPool)
    return friendNamesKeyPool, multiFriendPool

# def getBeforeName(url):
#     url += "ajaxaliases/"
#     res = requests.get(url, headers=headers)
#     res.encoding = "utf-8"
#     return res.text
