import paramiko
import sys
from io import BytesIO
import sqlite3
import paramiko.client
from tqdm import tqdm
import keyboard
from threading import Thread

# pip install paramiko
from hashlib import blake2b as hash_
hash = hash_()
flag=False

# private_key = paramiko.RSAKey.from_private_key_file('sshprv')
ssh=paramiko.SSHClient()
private_key = paramiko.Ed25519Key.from_private_key_file("private.txt")
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy)
ssh.connect(hostname="hb",username="hb",pkey=private_key)
sftp=ssh.open_sftp()
sqliteio=BytesIO()


def keyWait():
    global flag
    keyboard.wait("c")
    print("\r exit thread"," "*80)
    flag=True
t=Thread(target=keyWait)
t.start()

def progress_bar(transferred, remote_file_size, suffix=''):
    global flag
    global sftp
    hash.update(f.getvalue())
    downloadTqdm.update(len(f.getvalue()))
    f.seek(0)
    f.truncate()
    if(flag):
        sftp.close()


def db_progress(status, remaining, total):
    print(f'Copied {total-remaining} of {total} pages...')

def getFileSize(sftp,path):
    file_attr=sftp.stat(path)
    return file_attr.st_size
def downloadFileChunk(sftp,path,start_offset,end_offset):
    remoteFile=sftp.open(path,'rb')
    remoteFile.seek(start_offset)
    f = BytesIO((remoteFile.read(end_offset-start_offset)))
    return f
'''
ssh = paramiko.SSHClient()
ssh._transport = transport
stdin, stdout, stderr = ssh.exec_command('df -h')
# 获取命令结果
res,err = stdout.read(),stderr.read()
# 三元运算
result = res if res else err
print(result.decode())
# 关闭连接
transport.close()

print("size:",getFileSize(sftp,'/home/hb/share/t/blake2.log.db'))
# 将myblog.sql 下载到本地 myblog.sql文件中
sftp.getfo('/home/hb/share/t/blake2.log.db', sqliteio,callback=progress_bar)
with open('test.db', 'wb') as file:
    file.write(sqliteio.getvalue())  
    hash.update(sqliteio.getvalue())
    print("1",hash.hexdigest().upper())

fdb = sqlite3.connect('test.db')
db =  db = sqlite3.connect(":memory:", check_same_thread=False)
fdb.backup(db)
fdb.close()
# db.eserialize('main', byte_stream.read())
cur = db.cursor()
result = cur.execute(
    "select * from sqlite_master where type = 'table' and name = 'list'")
row = result.fetchall()
print(row)
hash.update(sqliteio.getvalue())
print(hash.hexdigest().upper())
# myblog.sql 上传至服务器 /root/myblog.sql
# sftp.put('myblog.sql', '/root/myblog.sql')
'''
dbpath="/home/hb/share/D/bakup/blake2.log.db"
path_='/home/hb/share/Downloads/WeChat_20221222174407.mp4'
f=BytesIO()
fileSize=getFileSize(sftp,path_)
downloadTqdm=tqdm(total=fileSize,ncols=80,ascii=True,unit='B', unit_scale=True, unit_divisor=1024)
try:
    sftp.getfo(path_, f,callback=progress_bar)
except :
    print("sftp closed")
    sftp=ssh.open_sftp()

print("sftp:",hash.hexdigest().upper())

def sftp_hash(path,blockSize):
    hash=hash_()
    fileSize=getFileSize(sftp=sftp,path=path)
    numBlocks=(fileSize+blockSize-1)//blockSize
    for i in tqdm(range(numBlocks)):
        startOffset=i*blockSize
        endOffset=min((i+1)*blockSize,fileSize)
        tempfile=downloadFileChunk(sftp,path,startOffset,endOffset)
        hash.update(tempfile.getvalue())
    print(hash.hexdigest().upper())

sftp_hash(path_,1024*1024)
ssh.close()

