#!/bin/env python
# -*- coding: utf-8 -*-
"""
使用百度云盘同步模型文件
pip install wget bypy beautifulsoup4 lxml huggingface_hub
"""
from pydantic import BaseModel, Field
from typing import List, Optional, Tuple, Dict
import requests
import os
import argparse
import re
from bs4 import BeautifulSoup
from huggingface_hub import hf_hub_url,HfApi
from huggingface_hub.constants import DEFAULT_REVISION, HUGGINGFACE_CO_URL_HOME,REPO_TYPE_MODEL
from huggingface_hub.utils import build_hf_headers,filter_repo_objects
from bypy import ByPy
from tqdm import tqdm
import hashlib

bp = ByPy()


def download(url: str, filepath: str, headers: Optional[Dict[str, str]] = None) -> str:
    """下载URL
    Args:
        url(`str`): 要下载的文件的URL
        filepath(`str`): 保存到的目录或文件名称
        headers(`dict`): 如果下载了特定位置，可以指定下载的起始位置
    Returns:
        `str`: 本地下载的文件地址
    <Tip>

        Raises the following errors:
    </Tip>

    Example:
    ```py
        >>> Downloader().download("http://www.baidu.com", path="/tmp/123")
        /tmp/123
    ```
    """
    resume_byte_pos: int | None = None
    headers = headers or {}
    r = requests.head(url, headers=headers, allow_redirects=True)
    remote_file_size = int(r.headers.get('content-length', 0))
    if os.path.isdir(filepath):
        filename = url.split('/')[-1]
        filepath = os.path.join(filepath, filename)
        if os.path.exists(filepath):
            # 已有文件尝试resume
            file_size = os.stat(filepath).st_size
            if file_size >= remote_file_size:
                print(f"{filepath} have already downloaded!")
                return filepath
            else:
                resume_byte_pos = file_size
    elif os.path.exists(filepath):
        file_size = os.stat(filepath).st_size
        if file_size >= remote_file_size:
            print(f"{filepath} have already downloaded!")
            return filepath
        else:
            resume_byte_pos = file_size
    else:
        dir = os.path.dirname(filepath)
        if not os.path.exists(dir):
            os.makedirs(dir, exist_ok=True)

    # Append information to resume download at specific byte position
    # to header
    resume_header = ({'Range': f'bytes={resume_byte_pos}-'}
                     if resume_byte_pos else None)
    if resume_header is not None:
        headers.update(resume_header)
    r = requests.get(url, stream=True, headers=resume_header, allow_redirects=True)
    block_size = 1024
    initial_pos = resume_byte_pos if resume_byte_pos else 0
    mode = 'ab' if resume_byte_pos else 'wb'
    with open(filepath, mode) as f:
        pbar = tqdm(total=remote_file_size, unit='B', unit_scale=True,
                    unit_divisor=1024, desc=filepath, initial=initial_pos,
                    ascii=True, miniters=1)
        with pbar:
            for chunk in r.iter_content(32 * block_size):
                f.write(chunk)
                pbar.update(len(chunk))
    return filepath


class ModelRepo(BaseModel):
    token: Optional[str] = Field(description="token for user of huggingface")
    repoid: str = Field(description="repo id for huggingface")
    repo_type: str = Field(default=REPO_TYPE_MODEL, description="repo type for huggingface")
    revision: str = Field(default=DEFAULT_REVISION, description="revision for huggingface")
    local_dir: Optional[str] = Field(description="default local path to store all repos")

    @property
    def local_storage(self) -> str:
        """download path"""
        return self.local_dir or os.path.join(os.getcwd(), self.repoid.replace("/", "-"))
    
    def get_all_files(self) -> List[str]:
        _api = HfApi()
        repo_info = _api.repo_info(
            repo_id=self.repoid,
            repo_type=self.repo_type,
            revision=self.revision,
            token=self.token,
        )
        assert repo_info.sha is not None, "repo info returned from server must have a revision sha."
        filtered_repo_files = list(
            filter_repo_objects(
                items=[f.rfilename for f in repo_info.siblings],
            )
        )
        return filtered_repo_files
    
    def get_remote_sha256(self, file: str) -> str | None:
        detail_url = HUGGINGFACE_CO_URL_HOME + self.repoid + "/blob/" + self.revision + "/" + file
        response = requests.get(detail_url)
        html = response.text
        soup = BeautifulSoup(html, 'lxml')
        uls = soup.find_all("ul")
        for ul in uls:
            if'SHA256' in ul.text:
                checksum = re.search(r'SHA256:\s*([^\s]+)', ul.text).group(1)
                return checksum
        return None

    def get_local_sha256(self, file: str) -> str:
        BLOCKSIZE = 65536
        sha256_hash = hashlib.sha256()
        with open(file, 'rb') as f:
            file_buffer = f.read(BLOCKSIZE)
            while len(file_buffer) > 0:
                sha256_hash.update(file_buffer)
                file_buffer = f.read(BLOCKSIZE)
                
        return sha256_hash.hexdigest()

    def download(self, url: str, storage_dir: str | None = None, verbose: bool = False) -> Tuple[bool, str]:
        """
        Downloads the file from the given url to the given path
        """
        storage_dir = storage_dir or self.local_storage
        filename = url.split('/')[-1]
        local_filename = os.path.join(storage_dir, filename)
        if os.path.exists(local_filename):
            return True, None
        headers = build_hf_headers(token=self.token)
        location = download(url, filepath=local_filename, headers=headers)
        if verbose:
            print(f"download complete:{location}")
        return True, location

    def upload(self, filepath: str) -> bool:
        """
        Uploads the file from the local file to baidu disk
        """
        global bp
        remote_path = self.repoid.replace("/", "-") + "/" + os.path.basename(filepath)
        ret = bp.upload(filepath, remote_path)
        return ret == 0
    
    def sync(self, filename: str, storage_dir: str | None = None, remain: bool = False) -> bool:
        """
        Sync the local file with baidu disk
        """
        storage_dir = storage_dir or self.local_storage
        local_file_path = os.path.join(storage_dir, filename)
        headers = build_hf_headers(token=self.token)
        url = hf_hub_url(repo_id=self.repoid, 
                         filename=filename,
                         repo_type=self.repo_type,
                         revision=self.revision)
        location = download(url, filepath=local_file_path, headers=headers)
        # check sha256
        remote_sha = self.get_remote_sha256(filename)
        if remote_sha:
            local_sha = self.get_local_sha256(location)
            assert local_sha == remote_sha, "download file sha256 is not match remote"
            print(f"{filename} sha256:{local_sha} check success")

        print(f"download complete:{location}")
        success = self.upload(location)
        if success:
            print(f"upload complete:{location}")
            if not remain:
                os.remove(location)

        return success

    def sync_all(self, remain: bool = False):
        """sync all file in repo"""
        files = self.get_all_files()
        for f in files:
            self.sync(f, remain=remain)
    
    def download_from_baidu(self, filename: str, storage_dir: str | None = None) -> bool:
        """
        Download file from baidu disk
        """
        global bp
        remote_path = self.repoid.replace("/", "-") + "/" + os.path.basename(filename)
        storage_dir = storage_dir or self.local_storage
        local_path = os.path.join(storage_dir, filename)
        ret = bp.downfile(remote_path, local_path)
        return ret == 0

    def download_from_baidu_all(self, storage_dir: str | None = None):
        """
        Sync all file from baidu disk
        """
        global bp
        remote_path = self.repoid.replace("/", "-")
        storage_dir = storage_dir or self.local_storage
        ret = bp.syncdown(remote_path, storage_dir)
        return ret == 0


def sync(args, argv):
    print(f"begin sync model:{args.repoid}")
    repo = ModelRepo(repoid=args.repoid, token=args.token)
    if args.file is not None:
        repo.sync(filename=args.file, remain=args.remain)
    else:
        repo.sync_all(remain=args.remain)
    print("sync complete")


def download_baidu(args, argv):
    print(f"begin download model:{args.repoid} from baidu netdisk")
    repo = ModelRepo(repoid=args.repoid)
    if args.file is not None:
        repo.download_from_baidu(filename=args.file)
    else:
        repo.download_from_baidu_all()
    print("download complete")


def main():
    parser = argparse.ArgumentParser(
        description='同步huggingface的模型到个人百度网盘')
    parser.add_argument('-i', '--repoid',dest='repoid', type=str, help='id of huggingface repo')
    parser.add_argument('-f', '--file', dest='file', type=str, required=False, help='filename of huggingface repo if not given sync all')
    sub_parsers = parser.add_subparsers(help='sub commands')
    parser_sync = sub_parsers.add_parser('sync', help="sync models from huggingface to baidu netdisk")
    parser_sync.set_defaults(func=sync)
    parser_sync.add_argument('-t', '--token', dest='token', type=str, help='token of huggingface')
    parser_sync.add_argument('--remain', action="store_true", default=False, help='if remain the download file in local dir')

    parser_download = sub_parsers.add_parser('download', help="download models from baidudisk")
    parser_download.set_defaults(func=download_baidu)
    
    args,argv = parser.parse_known_args()
    if hasattr(args, 'func'):
        return args.func(args,argv)
    else:
        parser.print_help()
        return 1


if __name__ == '__main__':
    """
    Example:
    python sync.py -t TOKEN -i REPOID sync
    同步huggingface模型到个人百度网盘中。

    python sync.py -i REPOID download
    下载个人百度网盘中huggingface模型到本地文件系统中。

    如果需要直接下载huggingface模型到本地文件系统中，请参考https://huggingface.co/docs/hub/models-downloading 
    """
    main()
