#coding=utf-8

#导入Python标准日志模块
import logging
import os 
import uuid
import time  
import requests
import threading
import io
import hashlib
import base64
import pathlib
from hashlib import md5 as hash_
from progressbar import * 
from urllib import request
import sqlite3

#从Python SDK导入BOS配置管理模块以及安全认证模块
from baidubce.bce_client_configuration import BceClientConfiguration
from baidubce.auth.bce_credentials import BceCredentials
#导入BOS相关模块
from baidubce import exception
from baidubce.services import bos
from baidubce.services.bos import canned_acl
from baidubce.services.bos.bos_client import BosClient
from baidubce.services.bos import storage_class

#设置BosClient的Host，Access Key ID和Secret Access Key
from secret import *

logging.basicConfig(level=logging.INFO,
                    format='%(asctime)s  %(message)s #%(levelname)s#%(process)d_%(module)s_%(funcName)s')

#创建BceClientConfiguration
config = BceClientConfiguration(credentials=BceCredentials(access_key_id, secret_access_key), endpoint = bos_host)
    


#新建BosClient
bos_client = BosClient(config)

'''

response = bos_client.list_objects('zthubo')
for object in response.contents:
    print (object.key)
'''
#tgf.start()


bucket_name='zthubo'
'''
object_key='Autosave-2.rws'
filePath="d:/temp/temp.tmp"
'''
root=pathlib.Path.cwd()
save_root =pathlib.Path.home( ) / 'AppData/LocalLow/Ludeon Studios/RimWorld by Ludeon Studios/'
remote_root="gamesave/rimworld/"

widgets = [Percentage(), ' ', Bar('#'),' ', Timer(),  ' ', ETA(), ' ', FileTransferSpeed()]
pbar=None
db=None
cur=None
o=None
ignoreFile=['Autosave-1.rws','Autosave-2.rws','Autosave-3.rws','Autosave-4.rws','Autosave-5.rws']
def cbk(a,b,c):
    '''回调函数
    @a:已经下载的数据块
    @b:数据块的大小
    @c:远程文件的大小
    '''
    global pbar
    if(c<a*b) :
        pbar.update(c)
        pbar.finish()  
    else:
        pbar.update(a*b)


def getFile(object_key,filePath,title=None):
    global pbar
    global ignoreFile
    if (filePath.name in ignoreFile or filePath.suffix == ".log") :
        print("忽略文件：",filePath.name)
        return False
    try:
        url = bos_client.generate_pre_signed_url(bucket_name, object_key,)
        print(url.decode('utf-8'))
        
        r = requests.get(url, stream=True)
        

        if(title==None) :
            print("正在下载:",object_key)
        else:
            print("正在下载:",title)
        pbar = ProgressBar(widgets=widgets, maxval=int(r.headers['content-length'])).start()  
        dirpath=pathlib.Path(filePath).parent
        if(dirpath.is_dir()==False): dirpath.mkdir(parents=True)
        request.urlretrieve(url.decode('utf-8'),filePath,cbk)
    except:
        print("\n")
        o=input("下载失败,r重试,i忽略,a中止")
        if(o=="r" or o == "R"):
            getFile(object_key,filePath,title)
        elif(o=="i" or o=="I"):
            return False
        else:
            sys.exit()
def putFile(file_name,object_key):
    global ignoreFile
    if (file_name.name in ignoreFile or file_name.suffix == ".log") :
        print("忽略文件：",file_name.name)
        return False
    file_name=str(file_name)
    object_key=str(object_key).replace("\\","/")
    # left_size用于设置分块开始位置
    left_size = os.path.getsize(file_name)
    if(left_size==0):
        bos_client.put_object_from_file(bucket_name,object_key,file_name,user_metadata ={"md5":getBase64_md5(file_name)})
        return False
    upload_id = bos_client.initiate_multipart_upload(bucket_name, object_key,content_type=None).upload_id



    # 设置分块的开始偏移位置
    offset = 0
    part_number = 1
    part_list = []
    file=open(file_name,"rb")
    print("正在上传: " , object_key)
    pbar = ProgressBar(widgets=widgets, maxval=left_size).start()  
    while left_size > 0:
        # 设置每块为5MB
        part_size = 1024*1024
        if left_size < part_size:
            part_size = left_size
        data = file.read(part_size)
        hash = hash_()
        hash.update(data)
        content_md5 = base64.standard_b64encode(hash.digest())
        try:
            response = bos_client.upload_part_from_file(
                bucket_name, object_key, upload_id, part_number, part_size, file_name, offset,part_md5=content_md5)
        except exception.BceHttpClientError as e:
            logging.error(e)
            logging.error("文件上传失败！")
            bos_client.abort_multipart_upload(bucket_name, object_key, upload_id)
            pbar.finish()  
            file.close()
            return False

        left_size -= part_size
        offset += part_size
        part_list.append({
            "partNumber": part_number,
            "eTag": response.metadata.etag
        })

        part_number += 1
        pbar.update(offset)

    bos_client.complete_multipart_upload(bucket_name, object_key, upload_id, part_list,user_metadata ={"md5":getBase64_md5(file_name)})
    pbar.finish()  
    file.close()
    id=getShortId()
    upFileLog(file_name)

def getHash(filepath):
    if(os.path.isfile(filepath)):
        size = os.path.getsize(filepath)
        # print(file,size)
        try:
            file = open(filepath, "rb")
        except:
            logging.error("can't open file!")
            return "fail!"
        hash = hash_()
        n = 0
        bs = 1024*1024*32
        while True :
            # 处理该行的代码
            data = file.read(bs)
            if(len(data) == 0):
                break
            hash.update(data)
        file.close()
        return hash.hexdigest()
        # return hash.hexdigest()
    else:
        logging.error("error")

def getBase64_md5(file_name):
    buf_size = 8192
    fp = open(file_name, 'rb')
    md5 = hash_()
    while True:
        bytes_to_read = buf_size
        buf = fp.read(bytes_to_read)
        if not buf:
            break
        md5.update(buf)
    content_md5 = base64.standard_b64encode(md5.digest())
    return content_md5

def getBase64Md5Str(str):
    md5 = hash_()
    md5.update(str.encode("utf-8"))
    content_md5 = base64.standard_b64encode(md5.digest())
    return content_md5

def getShortId():
    array = ["a", "b", "c", "d", "e", "f",
         "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s",
         "t", "u", "v", "w", "x", "y", "z", "0", "1", "2", "3", "4", "5",
         "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "I",
         "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V",
         "W", "X", "Y", "Z"]
    id = str(uuid.uuid4()).replace("-", '')
    buffer = []

    for i in range(8):
        start = i * 4
        end = i * 4 + 4
        val = int(id[start:end], 16)
        buffer.append(array[val % 62])
    return "".join(buffer)

def upFileLog(file_name,append_key=None):
    ID=getShortId()
    if(append_key==None):
        append_key=localConfig("remoteLog")
    try:
        response = bos_client.get_object_meta_data(bucket_name, append_key)
    except :
        bos_client.append_object_from_string(bucket_name=bucket_name, 
        key=append_key,
        data="<!DOCTYPE html><meta http-equiv=\"content-type\" content=\"text/html;charset=utf-8\"><style>body{font-family:\"Consolas\"}</style><br>\n",
        content_type = 'text/html')
        response = bos_client.get_object_meta_data(bucket_name, append_key)
    if(int(response.metadata.content_length)>1024*1024):
        fileNext="sys/{file}.html".format(file=getShortId())
        fileEnd="<a href=\"/{fileNext}\">{fileNext}</a>".format(fileNext=fileNext)
        upFileLog(file_name,fileNext)
        bos_client.append_object_from_string(bucket_name=bucket_name,
                                        key=append_key,
                                        data=fileEnd,
                                        offset=response.metadata.content_length,
                                        content_type = 'text/html')
        getFileLog()
        return None


    
    data=(
        str(ID)+","+
        str(time.strftime("%Y/%m/%d %H:%M:%S,",time.localtime()))+
        str(getBase64_md5(file_name).decode("utf-8"))+","+
        getRelativePath(file_name)+
        '<br>\n')
    bos_client.append_object_from_string(bucket_name=bucket_name,
                                        key=append_key,
                                        data=data,
                                        offset=response.metadata.content_length,
                                        content_type = 'text/html')

def getRelativePath(path):
    path=pathlib.Path(path)
    rePath=str(path.relative_to(save_root)).replace("\\","/")
    return rePath
'''
file=r"C:/Users/zthubo/AppData/LocalLow/Ludeon Studios/RimWorld by Ludeon Studios/Saves/阿特贝伦.rws"
upFileLog(file)
'''

'''
getFile('Autosave-1.rws',"d:/temp/123/123/1/Autosave-1.rws")
getFile(object_key,filePath)

print(getHash(os.path.expanduser('~/AppData/LocalLow/Ludeon Studios/RimWorld by Ludeon Studios/Saves/Autosave-2.rws')))
print(getHash(filePath))
putFile(os.path.expanduser('~/AppData/LocalLow/Ludeon Studios/RimWorld by Ludeon Studios/Saves/Autosave-3.rws'),"Saves/Autosave-3.rws")





        #logging.debug(getHash(filePath))
'''
def getLocalList():
    localList=[]
    for dir_path, subpaths, files in os.walk(save_root, True):
        for file in files:
            filePath=pathlib.Path(dir_path,file)
            localList.append(filePath)
    return localList

def initRemoteLog():
    localList=getLocalList()
    for f in localList :
        try:
            metadata = bos_client.get_object_meta_data(bucket_name, append_key)
        except :
            putFile(file_name, append_key)
        else :
            if(metadata.metadata.bce_meta_md5!=md5):
                putFile(file_name, append_key)

def initLocaldb():
    global db
    global cur
    db = sqlite3.connect("gamesave.db", check_same_thread=False)
    cur = db.cursor()
    cur.execute("select name from sqlite_master where type='table' and name='config' order by name")
    if(len(cur.fetchall())==0):
        db.execute(r'CREATE TABLE "filelog" (  "ID" text NOT NULL,  "file" TEXT,  "md5" TEXT,  "time" TEXT,  PRIMARY KEY ("ID"));')
        db.execute(r'CREATE INDEX "f" ON "filelog" (  "file" ASC,  "time" DESC );')
        db.execute(r'CREATE TABLE "main"."config" (  "k" TEXT,  "v" TEXT , PRIMARY KEY ("k"));')
        db.execute(r'CREATE VIEW "main"."local" AS select a.* from filelog a ,(select MAX(time) tim,file FROM filelog GROUP BY file) b where a.file = b.file and a.time = b.tim;')
        localConfig("remoteLog","sys/rimworld.html")
        localConfig("remoteLogSize","0")  
        getFileLog()

def getFileLog():
    print("正在获取更新记录...")
    object_key=localConfig("remoteLog")
    leftSize=localConfig("remoteLogSize")
    response = bos_client.get_object_meta_data(bucket_name, object_key)
    content_length=int(response.metadata.content_length)
    if(int(leftSize)>content_length):
        leftSize=0
    elif(leftSize==response.metadata.content_length):
        return False
    range=[int(leftSize),content_length]
    data=bos_client.get_object_as_string(bucket_name, object_key, range = range)
    data=data.decode("utf-8")
    data=data.split("<br>\n")
    for i in data:
        logging.debug(i)
        if(i[0:8]=="<!DOCTYP") :
            pass
        elif(i[0:8]=="<a href=") :
            nextFile=i[10:27]
            logging.debug(nextFile)
            localConfig("remoteLog",nextFile)
            localConfig("remoteLogSize","0")
            getFileLog()
        elif(len(i)>0):
            id=i[0:8]
            time=i[9:28]
            hash=i[29:53]
            file=i[54:]
            sql="INSERT INTO filelog(\"ID\", \"file\", \"md5\", \"time\") VALUES ('{id}', '{file}', '{md5}', '{time}')".format(id=id,file=file,md5=hash,time=time)
            logging.debug(sql)
            executeSql(sql)
    
    localConfig("remoteLogSize",response.metadata.content_length)




def localConfig(k,v=None):
    global db  
    global cur
    sql="select * from config where k=\'{k}\'".format(k=k)
    logging.debug(sql)
    response=executeSql(sql)
    response=response.fetchone()
    if(v==None):
        if (response==None):
            return None
        else:
            return response[1]
    else:
        sql="REPLACE INTO config(\"k\", \"v\") VALUES (\'{k}\', \'{v}\')".format(v=v,k=k)
        logging.debug(sql)
        executeSql(sql)
        return v      

def executeSql(sql):
    global db
    global cur
    try:
        cur.execute(sql)
    except :
        logging.error(sql)
        db.rollback()
    else:
        db.commit()
        return cur

def checkLocalFile(re=1):
    global o
    global ignoreFile
    LocalList=getLocalList()
    for file in LocalList:
        if (file.name in ignoreFile or file.suffix == ".log"):
            continue
        sql="select * from local where file='{file}'".format(file=getRelativePath(file))
        r=executeSql(sql)
        r=r.fetchone()
        if(r==None):
            putFile(file,remote_root + getRelativePath(file))
        else:
            md5=getBase64_md5(file)
            print(r[2],getRelativePath(file))
            if(r[2]==md5.decode("utf-8")):
                print(getRelativePath(file),)
            elif(re==1):
                print(getRelativePath(file) ,"文件异常！")
                if(o==None):
                    o=input("U上传文件，D下载文件,I忽略文件")
                if(o=='u' or o=='U'):
                    putFile(file,remote_root + getRelativePath(file))
                elif (o=='d' or o=='D'):
                    getFile(remote_root + getRelativePath(file),file,getRelativePath(file))
            elif (re==2):
                putFile(file,remote_root + getRelativePath(file))
    getFileLog()

def checkRemoteFile():
    sql="select * from local "
    r=executeSql(sql)
    r=r.fetchall()
    for i in r:
        print(i[1])

        ap=save_root/ i[1]
        print(ap)
        if(os.path.isfile(ap)):
            if(i[2]!=getBase64_md5(ap).decode("utf-8")):
                getFile(remote_root + i[1],ap,i[1])
        else:
            getFile(remote_root + i[1],ap,i[1])
if __name__ == '__main__':
    initLocaldb()
    checkLocalFile(1)
    checkRemoteFile()
    i=input("文件已下载")
    if(i=="e" or i=="E"):
        sys.exit()
    else:
        checkLocalFile(2)
    i=input("文件已上传")
     

    
    
