import requests
import json
import os
import re
import time

global connectCount
connectCount = 0


def getEcharts(chartID):

    if os.path.exists('https://www.makeapie.com/chart/get/' + chartID) == False:
        pageInfo = useUrlGetData(
            'https://www.makeapie.com/chart/get/' + chartID)
        if pageInfo == '':
            return False

        listInfoObject = json.loads(pageInfo.text)
        file = open(os.path.join('website', 'pages',
                                 chartID + '.js'), 'w', encoding=('utf-8'))

    # 下载引用的数据
    getExtensionData(listInfoObject['data']['code'], 'json')
    # 下载引用的图片
    getExtensionData(listInfoObject['data']['code'], 'jpg')
    getExtensionData(listInfoObject['data']['code'], 'png')

    file.write(listInfoObject['data']['code'].replace('\r', ''))
    file.close()
    return True


def getExtensionData(text, ext):

    extension = re.findall(r'data[\S]*' + ext, text)
    for i in range(0, len(extension)):

        # 过滤一下base64的图片
        if len(extension[i]) > 120:
            continue

        name = extension[i].split('/')

        if os.path.exists(os.path.join('website', 'data', name[len(name) - 1])) == False:

            data = useUrlGetData(
                'https://www.makeapie.com/asset/get/s/' + extension[i])
            if data == '':
                continue

            if ext == 'json':
                file = open(os.path.join('website', 'data',
                            name[len(name) - 1]), 'w', encoding=('utf-8'))
                file.write(data.text)
                file.close()
            elif ext == 'jpg' or ext == 'png':
                file = open(os.path.join('website', 'data',
                            name[len(name) - 1]), 'wb')
                file.write(data.content)
                file.close()

            writeLog('------数据资源：' + extension[i])


def createDir(path):
    if os.path.exists(path) == False:
        os.mkdir(path)


# 解决连接超时的问题
def useUrlGetData(url):
    global connectCount
    try:
        pageInfo = requests.get(url, timeout=10)
        connectCount = 0
        return pageInfo
    except:
        connectCount += 1
        writeLog('------连接失败，重试：' + connectCount + '次')
        if connectCount > 3:
            connectCount = 0
            return ''
        else:
            time.sleep(5)
            return useUrlGetData(url)


def writeLog(info):
    log = open(os.path.join('website', 'log.txt'), 'a', encoding=('utf-8'))
    print(info)
    print(info, file=log)
    log.close()


def writeList(info):
    list = open(os.path.join('website', 'list.txt'), 'a', encoding=('utf-8'))
    print(info, file=list)
    list.close()
