import oss2 #oss2
import pathlib
import os
import hashlib
import base64
from itertools import islice
import sys
from datetime import datetime
from progressbar import * # progressbar
import time
sys.path.append("..")
from oss_.secret import *

rootPath=pathlib.PurePath(__file__).parent
appdata=pathlib.PurePath(os.getenv("APPDATA"))
savePath=appdata/"Factorio/saves"
modPath=appdata/"Factorio/mods"
keyRoot="Factorio/"
ignoreList=['_autosave1.zip','_autosave2.zip','_autosave3.zip','_autosave4.zip']
widgets = [Percentage(), ' ', Bar('#'),' ', Timer(),  ' ', ETA(), ' ', FileTransferSpeed()]
pbar=None

# 阿里云主账号AccessKey拥有所有API的访问权限，风险很高。强烈建议您创建并使用RAM账号进行API访问或日常运维，请登录 https://ram.console.aliyun.com 创建RAM账号。
auth = oss2.Auth(ak, sk)
# Endpoint以杭州为例，其它Region请按实际情况填写。
bucket = oss2.Bucket(auth, 'http://oss-cn-chengdu.aliyuncs.com', 'zthubo')
def dirList(path):
    return oss2.ObjectIteratorV2(bucket, prefix=path, delimiter = '/')
def getMd5(file_name, block_size=64 * 1024):
    """计算文件的MD5
    :param file_name: 文件名
    :param block_size: 计算MD5的数据块大小，默认64KB
    :return 文件内容的MD5值
    """
    with open(file_name, 'rb') as f:
        md5 = hashlib.md5()
        while True:
            data = f.read(block_size)
            if not data:
                break
            md5.update(data)
            
    return base64.b64encode(md5.digest())

def calculate_file_crc64(file_name, block_size=64 * 1024, init_crc=0):
    """计算文件的MD5
    :param file_name: 文件名
    :param block_size: 计算MD5的数据块大小，默认64KB
    :return 文件内容的MD5值
    """
    with open(file_name, 'rb') as f:
        crc64 = oss2.utils.Crc64(init_crc)
        while True:
            data = f.read(block_size)
            if not data:
                break
            crc64.update(data)
            
    return crc64.crc
def calculate_file_blake(file_name, block_size=64 * 1024):
    blake = hashlib.blake2b()
    with open(file_name, 'rb') as f:
        while True:
            data = f.read(block_size)
            if not data:
                break
            blake.update(data)
    return base64.b64encode(blake.digest())

def getMd5_(file_name, block_size=64 * 1024):
    with open(file_name, 'rb') as f:
        md5 = hashlib.md5()
        while True:
            data = f.read(block_size)
            if not data:
                break
            md5.update(data)
            
    return md5.hexdigest() 
def calculate_data_md5(data):
    """计算数据的MD5
    :param data: 数据
    :return MD5值
    """
    md5 = hashlib.md5()
    md5.update(data)
    return base64.b64encode(md5.digest())
def main():
    for obj in dirList("Factorio/"):
        print(obj.key)
        if obj.is_prefix():  # 判断obj为文件夹。
            print('directory: ' + obj.key)
        else:                # 判断obj为文件。
            print('file: ' + obj.key)
            simplifiedmeta =bucket.head_object(obj.key)
            print(simplifiedmeta.headers)
        print(getMd5(rootPath/"A4.zip"))

def up():
    for ifile in pathlib.Path(savePath).iterdir():
        if ifile.name not in ignoreList:
            key=keyRoot+ifile.name
            crc_=str(calculate_file_crc64(ifile))
            if bucket.object_exists(key):
                fiedmeta =bucket.head_object(key)
                kcrc=fiedmeta.headers.get('x-oss-hash-crc64ecma')
                for v in fiedmeta.headers:
                    print(v,fiedmeta.headers[v])
                if kcrc==crc_:
                    Kbalke=fiedmeta.headers.get('x-oss-meta-blake')
                    #if Kbalke == calculate_file_blake(ifile).decode() :
                    print("文件{0}相同".format(key))
                    continue
            print("正在上传{}".format(key))
            multipartUpload(ifile,key)
            print(calculate_file_crc64(ifile))
            #upFile(ifile,key)

def upFile(file,key):
    global pbar
    blake=calculate_file_blake(file)
    print("上传文件 {}".format(file.name))
    with open(file, 'rb') as fileobj:
        pbar = ProgressBar(widgets=widgets, maxval=os.path.getsize(file)).start()  
        bucket.put_object(key, fileobj,progress_callback=percentage,headers={'x-oss-meta-blake': blake.decode()})
        pbar.finish()
    print("\n文件 {} 上传完成".format(file.name))

def percentage(consumed_bytes, total_bytes):
    global pbar
    '''
    if total_bytes:
        rate = int(100 * (float(consumed_bytes) / float(total_bytes)))
        print('\r{0}% '.format(rate), end='')
        sys.stdout.flush()
    '''
    pbar.update(int(consumed_bytes))


def mpercentage(consumed_bytes, total_bytes):
    global pbar
    global offset
    pbar.update(offset+int(consumed_bytes))


def multipartUpload(file,key):
    global pbar
    global offset
    total_size = os.path.getsize(file)
    # determine_part_size方法用于确定分片大小。
    part_size = oss2.determine_part_size(total_size, preferred_size=2 * 1024 * 1024)
    parts = []
    blake=calculate_file_blake(file)
    upload_id = bucket.init_multipart_upload(key).upload_id
    # progress_callback=percentage
    pbar = ProgressBar(widgets=widgets, maxval=total_size).start()  
    with open(file, 'rb') as fileobj:
        part_number = 1
        offset = 0
        while offset < total_size:
            num_to_upload = min(part_size, total_size - offset)
            datas=fileobj.read(num_to_upload)
            md5Check = calculate_data_md5(datas)
            result = bucket.upload_part(key, upload_id, part_number,
                                        datas,progress_callback=mpercentage,headers={'Content-MD5': md5Check.decode()})
            parts.append(oss2.models.PartInfo(part_number, result.etag))
            offset += num_to_upload
            part_number += 1
    
    
        bucket.complete_multipart_upload(key, upload_id, parts)
        bucket.update_object_meta(key, headers={'x-oss-meta-blake': blake.decode()})
        #bucket.complete_multipart_upload(key, upload_id, parts)

    pbar.finish()
def getfile(file,key,meta=None):
    global pbar
    print("下载文件 {}".format(file.name))
    if meta==None:
        meta = bucket.head_object(key)
    pbar = ProgressBar(widgets=widgets, maxval=int(meta.headers.get('Content-Length'))).start()
    bucket.get_object_to_file(key, file, progress_callback=percentage)
    print("\n文件 {} 下载完成".format(file.name))
    
def sync():
    alllist=[]
    c=None
    for ifile in pathlib.Path(savePath).iterdir():
        alllist.append(ifile.name)

    for obj in oss2.ObjectIteratorV2(bucket, prefix = keyRoot, delimiter = '/'):

        if not obj.is_prefix():
            if obj.key.lstrip(keyRoot) not in alllist:
                alllist.append(obj.key.lstrip(keyRoot))
    alllist.remove("")

    for i in alllist:
        if i in ignoreList:
            continue
        filePath=savePath/i
        rPath=keyRoot+i
        if not pathlib.Path(filePath).exists():
            if c in [None,"i",'d','del']:
                c=input("本地无文件: {0} \n下载d 忽略i 删除云端文件del  全部下载D 全部忽略I 全部删除DEL\n".format(filePath.name))
            if c=='d' or c=="D":
                getfile(filePath,rPath)
            if c=="del" or c=="DEL":
                bucket.delete_object(rPath)
                print("删除文件:{0}".format(filePath.name))
            if c=="i" or c=="I":
                print('忽略文件: {0}'.format(filePath.name))
        else:
            Ldate=os.path.getmtime(filePath)
            if bucket.object_exists(rPath):
                fiedmeta = bucket.head_object(rPath)
                #Rcrc=fiedmeta.headers.get('x-oss-hash-crc64ecma')
                Rblake=fiedmeta.headers.get('x-oss-meta-blake')
                Rdate=time.mktime(time.strptime(fiedmeta.headers.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z"))
                Lblake=calculate_file_blake(filePath).decode()
                if(Lblake==Rblake):
                    print("文件{0}一致".format(i))
                    continue
                else:
                    if(Ldate >= Rdate+28800):
                        upFile(filePath,rPath)
                    else:
                        getfile(filePath,rPath,fiedmeta)
            else:
                if c in [None,'u','i']:
                    c=input("云端无文件: {0} \n上传u 忽略i  全部上传U 全部忽略I \n".format(filePath.name))
                if c=="U" or c=="u":
                    upFile(filePath,rPath)
                if c=="d" or c== "D":
                    print("d")
                else:
                    print('忽略文件: {0}'.format(filePath.name))


if __name__ =="__main__":
    sync()
    c=input("同步完成！")
