import sys
import io
import os
import json
import re
import logging
import time
from datetime import datetime
import pytz
import pyperclip
from webdav3.client import Client
import hashlib
import frontmatter
from send2trash import send2trash

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y/%m/%d %H:%M:%S')
shanghai_zone = pytz.timezone('Asia/Shanghai')
conf_file = "inboxConf.json"
temp_data_file = "tempData.json"
inBox_data_file = "inBoxData.json"
assets_title = "InboxAssets"

def is_card_id(s):
    # 使用正则表达式进行匹配
    pattern = re.compile(r'^Card\d+$', re.IGNORECASE)
    return bool(pattern.match(s))

def contains_any_tag(input_string, tag_string):
  # 检查输入字符串是否包含标签字符串中的任何一个标签
  for tag in tag_string.split(','):
    tag = tag.strip()
    if tag in input_string:
      return True
  return False

def safeFilename(filename, replace='-'):
    """
    将文件名中的非法字符替换为指定字符
    
    Args:
        filename (str): 原始文件名
        replace (str, optional): 替换字符. 默认为 '-'
        
    Returns:
        str: 处理后的安全文件名
    """
    return re.sub(re.compile('[/\\\:*?"<>|]'), replace, filename)

def utc_to_local_date(utc_time_str, source_format, target_format):
    """
    将UTC时间字符串转换为本地时间字符串
    
    Args:
        utc_time_str (str): UTC时间字符串
        source_format (str): 源时间格式
        target_format (str): 目标时间格式
        
    Returns:
        str: 转换后的本地时间字符串
    """
    # 解析UTC时间字符串
    utc_time = datetime.strptime(utc_time_str, source_format)
    # 将时间与UTC时区关联
    utc_zone = pytz.timezone('UTC')
    utc_time = utc_zone.localize(utc_time)
    # 转换到上海时区
    shanghai_time = utc_time.astimezone(shanghai_zone)
    # 格式化为新的时间字符串格式
    return shanghai_time.strftime(target_format)

def get_now_utc_date(target_format):
    """
    获取当前时间的UTC时间字符串
    
    Args:
        target_format (str): 目标时间格式
        
    Returns:
        str: 当前时间的UTC时间字符串
    """
    # 获取当前上海时区的时间
    shanghai_now = datetime.now(shanghai_zone)
    # 将上海时间转换为 UTC 时间
    utc_now = shanghai_now.astimezone(pytz.utc)
    # 格式化 UTC 时间
    return utc_now.strftime(target_format)

def local_to_utc_date(local_time_str, source_format, target_format):
    """
    将本地时间字符串转换为UTC时间字符串
    
    Args:
        local_time_str (str): 本地时间字符串
        source_format (str): 源时间格式
        target_format (str): 目标时间格式
        
    Returns:
        str: 转换后的UTC时间字符串
    """
    # 解析上海时区的日期时间字符串
    shanghai_time = datetime.strptime(local_time_str, source_format)
    # 关联上海时区
    shanghai_time = shanghai_zone.localize(shanghai_time)
    # 转换为UTC时区
    utc_time = shanghai_time.astimezone(pytz.utc)
    # 格式化为UTC时区的日期时间字符串
    return utc_time.strftime(target_format)

def compute_md5(input_string):
    """
    计算字符串的MD5哈希值
    
    Args:
        input_string (str): 输入字符串
        
    Returns:
        str: 32位MD5哈希值
    """
    # 创建一个md5 hash对象
    hash_object = hashlib.md5()
    # 将字符串转换为字节对象，并更新到hash对象中
    hash_object.update(input_string.encode())
    # 获取16进制哈希值
    hex_dig = hash_object.hexdigest()
    return hex_dig

def read_file(path):
    """
    读取文件内容
    
    Args:
        path (str): 文件路径
        
    Returns:
        str: 文件内容，如果读取失败则返回空字符串
    """
    str = ""
    try:
        with open(path, 'r', encoding='utf-8') as n:
            str = n.read()
    except:
        pass
    return str

def map_put(map, key):
    """
    安全地访问和更新字典，如果键不存在则创建新的空字典
    
    Args:
        map (dict): 目标字典
        key: 键值
        
    Returns:
        dict: 键对应的值（如果不存在则返回新创建的空字典）
    """
    if key in map:
        return map.get(key)
    empty_map = {}
    map[key] = empty_map
    return empty_map

def init_webdav_client(conf):
    """
    初始化WebDAV客户端
    
    Args:
        conf (dict): 配置信息，包含服务器地址、用户名和密码
        
    Returns:
        Client: 初始化后的WebDAV客户端实例
    """
    # 读取配置
    webdav_server = conf.get("settings", {}).get("webdav_server", "")
    user_name = conf.get("settings", {}).get("user_name", "")
    password = conf.get("settings", {}).get("password", "")
    # 实例化 webdav 客户端
    options = {
        "webdav_hostname": webdav_server,
        "webdav_login": user_name,
        "webdav_password": password,
        "disable_check": True
    }
    return Client(options)

def get_remote_update_time(conf, webdav_client):
    """
    获取远程文件的最后更新时间
    
    Args:
        conf (dict): 配置信息，包含远程文件路径
        webdav_client: WebDAV客户端实例
        
    Returns:
        int: 最后更新时间的毫秒时间戳
    """
    remote_path = conf.get("settings", {}).get("remote_path", "")
    # 获取远程笔记更新时间
    update_time = 0
    info = webdav_client.info(remote_path)
    modified_str = info.get("modified", "")
    if modified_str:
        modified_time = datetime.strptime(modified_str, "%a, %d %b %Y %H:%M:%S GMT")
        update_time = int(modified_time.timestamp() * 1000)
    return update_time

def write_conf(conf, conf_dir):
    """
    将配置信息写入配置文件
    
    Args:
        conf (dict): 配置信息
        conf_dir (str): 配置目录路径
    """
    conf_path = os.path.join(conf_dir, conf_file)
    conf_str_new = json.dumps(conf, indent=4, ensure_ascii=False)
    open(conf_path, 'w', encoding='utf-8').write(conf_str_new)
    return

def get_remote_data(conf_dir):
    """
    从本地JSON文件读取远程笔记数据
    
    Args:
        conf_dir (str): 配置目录路径
        
    Returns:
        list: 包含笔记数据的字典列表，每个字典包含笔记的完整信息
        
    Raises:
        ValueError: 当本地数据文件不存在时抛出
    """
    # 读取json笔记数据
    path = os.path.join(conf_dir, inBox_data_file)
    post = read_file(path)
    if not post:
        raise ValueError("本地数据不存在，请重新下载")
    data = json.loads(post)
    remote_data = []
    blockList = data.get("mBlockList")
    for block in blockList:
        isRemoved = block.get("isRemoved", False)
        if isRemoved:
            continue
        block_data = {
            "blockId": str(block.get("blockId", "")),  # ID
            "title": block.get("title", ""),  # 标题
            "content": block.get("content", ""),  # 内容
            "parent_id": str(block.get("parentId", 0)) if block.get("parentId", 0) else "",  # 父笔记ID
            "images": "",  # 图片
            "attachs": "",  # 附件
            "extras": "",  # 录音
            "published": "",  # 创建时间
            "updated": ""  # 更新时间
        }
        
        # 图片
        image_content = ""
        # 附件
        attach_content = ""
        if block.get("imageJson", ""):
            images = json.loads(block.get("imageJson"))
            for image in images:
                if image.get("resourceType", "") == "image":
                    image_url = image.get("remoteUrl") if image.get("remoteUrl", "") else image.get("src", "")
                    image_content = image_content + image_url + "\n"
                elif image.get("resourceType", "") == "attach":
                    attach_url = image.get("remoteUrl") if image.get("remoteUrl", "") else image.get("src", "")
                    attach_content = attach_content + attach_url + "\n"
        block_data["images"] = image_content.strip()
        block_data["attachs"] = attach_content.strip()
        
        # 录音
        extra_content = ""
        if block.get("extra", ""):
            extras = json.loads(block.get("extra"))
            for extra_key, extra_value in extras.items():
                try:
                    for extra_item in extra_value:
                        extra_url = extra_item.get("pathRemote") if extra_item.get("pathRemote", "") else extra_item.get("pathLocal", "")
                        extra_content = extra_content + extra_url + "\n"
                except:
                    pass
        block_data["extras"] = extra_content.strip()
        
        # DateTime
        block_data["published"] = utc_to_local_date(block.get("published", ""), "%b %d, %Y %H:%M:%S", "%Y/%m/%d %H:%M:%S")
        block_data["updated"] = utc_to_local_date(block.get("updated", ""), "%b %d, %Y %H:%M:%S", "%Y/%m/%d %H:%M:%S")
        
        remote_data.append(block_data)
    return remote_data

def write_to_markdown(local_dir, remote_data, conf):
    """
    将远程笔记数据写入本地Markdown文件
    
    Args:
        local_dir (str): 本地目录路径
        remote_data (list): 远程笔记数据列表
        conf (dict): 配置信息，用于存储MD5值
    """
    # 获取现有文件列表
    file_map = {}
    files = os.listdir(local_dir)
    for file in files:
        file_path = os.path.join(local_dir, file)
        if os.path.isfile(file_path) and file.endswith(".md"):
            file_map[file_path] = file

    # 获取跳过标签配置
    skip_tags = conf.get("settings", {}).get("skip_tags", "").strip()

    # 处理每个笔记
    for block in remote_data:
        md_id = block["blockId"].strip()
        title = block["title"].strip()
        content = block["content"].strip()
        parent_id = block["parent_id"].strip()
        images = block["images"].strip()
        attachs = block["attachs"].strip()
        extras = block["extras"].strip()
        published_time = block["published"].strip()
        updated_time = block["updated"].strip()

        # 生成文件名
        md_name = "Card" + md_id
        if title:
            md_name = safeFilename(title)
        if not md_name:
            continue

        md_path = os.path.join(local_dir, md_name + ".md")
        # 从待删除列表中移除
        if md_path in file_map:
            del file_map[md_path]
        
        # 跳过标签
        if skip_tags and contains_any_tag(content, skip_tags):
          continue
        
        # 计算MD5
        curr_md5 = compute_md5(md_name + "||" + content + "||" + images + "||" + attachs + "||" + extras)
        last_md5 = conf.get("blocks", {}).get(md_id, {}).get("md5", "")
        
        # 如果MD5相同，跳过更新
        if curr_md5 and curr_md5 == last_md5:
            continue

        # 构建Markdown内容
        md_content = content

        # 添加资源部分
        assets_content = []
        if images or attachs or extras or parent_id:
            assets_content.append(f"# {assets_title}\n")
            
            # 添加图片
            if images:
                for image_url in images.split('\n'):
                    if image_url.strip():
                        assets_content.append(f"![image.png]({image_url.strip()})")

            # 添加附件
            if attachs:
                assets_content.append("\n> 附件:")
                for attach_url in attachs.split('\n'):
                    if attach_url.strip():
                        assets_content.append(f"> [{os.path.basename(attach_url.strip())}]({attach_url.strip()})")  

            # 添加录音
            if extras:
                assets_content.append("\n> 录音:")
                for extra_url in extras.split('\n'):
                    if extra_url.strip():
                        assets_content.append(f"> [{os.path.basename(extra_url.strip())}]({extra_url.strip()})")

            # 添加父笔记
            if parent_id:
                assets_content.append(f"\n> 父笔记:: [[Card{parent_id}]]")

            # 将资源内容添加到主内容后面
            md_content = md_content.rstrip() + "\n\n" + "\n".join(assets_content)

        post = None
        try:
            with open(md_path, 'r', encoding='utf-8') as f:
                post = frontmatter.load(f)
        except:
            pass
        if post: 
            post.content = md_content.strip()
            post["inbox_id"] = md_id
            post["inbox_published"] = published_time
            post["inbox_updated"] = updated_time
        else:
            # 创建Post对象
            post = frontmatter.Post(
                md_content.strip(),
                inbox_id = md_id,
                inbox_published = published_time,
                inbox_updated = updated_time
            )

        # 写入文件
        with open(md_path, 'w', encoding='utf-8') as f:
            f.write(frontmatter.dumps(post))

        # 更新MD5
        map_put(map_put(conf, "blocks"), md_id)["md5"] = curr_md5

    # 删除不再存在的文件
    for del_path in file_map.keys():
        send2trash(del_path)

def get_local_data(local_dir, conf):
    """
    从本地Markdown文件读取笔记数据
    
    Args:
        local_dir (str): 本地目录路径
        conf (dict): 配置信息
        
    Returns:
        tuple: (create_data, update_data) 创建和更新的笔记数据
    """
    create_data = []
    update_data = {}
    
    # 获取跳过标签配置
    skip_tags = conf.get("settings", {}).get("skip_tags", "").strip()
    # 获取所有md文件
    md_files = [f for f in os.listdir(local_dir) if f.endswith('.md')]
    for md_file in md_files:
        md_path = os.path.join(local_dir, md_file)
        try:
            with open(md_path, 'r', encoding='utf-8') as f:
                post = frontmatter.load(f)
        except:
            continue
            
        # 获取元数据
        md_id = str(post.get("inbox_id", ""))
        title = md_file[:-3]
        if is_card_id(title): 
            title = ''
        content = post.content.strip()
        published_time = post.get("inbox_published", "")
        updated_time = post.get("inbox_updated", "")

        # 如果frontmatter中没有时间信息，则从文件本身获取
        if not published_time or not updated_time:
            try:
                # 获取文件的创建时间和修改时间
                file_stat = os.stat(md_path)
                # 将时间戳直接转换为UTC时间字符串
                if not published_time:
                    published_time = datetime.fromtimestamp(file_stat.st_ctime, shanghai_zone).astimezone(pytz.utc).strftime("%b %d, %Y %H:%M:%S")
                if not updated_time:
                    updated_time = datetime.fromtimestamp(file_stat.st_mtime, shanghai_zone).astimezone(pytz.utc).strftime("%b %d, %Y %H:%M:%S")
            except:
                pass
        else:
            try:
                # 使用local_to_utc_date方法转换时间
                published_time = local_to_utc_date(published_time, "%Y/%m/%d %H:%M:%S", "%b %d, %Y %H:%M:%S")
                updated_time = local_to_utc_date(updated_time, "%Y/%m/%d %H:%M:%S", "%b %d, %Y %H:%M:%S")
            except:
                pass

        # 分离笔记内容和资源内容
        content_parts = content.split(f'\n# {assets_title}')
        note_content = content_parts[0].strip()
            
        # 构建数据字典
        block_data = {
            "blockId": md_id,
            "title": title,
            "content": note_content,  # 只使用笔记内容部分
            "published": published_time,
            "updated": updated_time,
            "path": md_path,
        }

        # 跳过标签
        if skip_tags and contains_any_tag(note_content, skip_tags):
          block_data["isSkip"] = True
        
        if md_id:
            update_data[md_id] = block_data
        else:
            create_data.append(block_data)
            
    return create_data, update_data

def write_to_remote(create_data, update_data, conf_dir, conf, do_delete=True):
    """
    将本地笔记数据写入远程JSON格式
    
    Args:
        create_data (list): 需要创建的笔记数据
        update_data (dict): 需要更新的笔记数据
        conf_dir (str): 配置目录路径
        do_delete (bool, optional): 是否删除不存在的笔记. 默认为 True
        
    Returns:
        dict: 准备上传的JSON数据
        
    Raises:
        ValueError: 当本地数据文件不存在时抛出
    """
    # 读取json笔记数据
    path = os.path.join(conf_dir, inBox_data_file)
    post = read_file(path)
    if not post:
        raise ValueError("本地数据不存在，请重新下载")
    # 获取跳过标签配置
    skip_tags = conf.get("settings", {}).get("skip_tags", "").strip()
    max_id = 0
    now_str = get_now_utc_date("%b %d, %Y %H:%M:%S")
    data = json.loads(post)
    data["mUpdateTime"] = int(datetime.now().timestamp() * 1000)
    blockList = data.get("mBlockList")
    for block in blockList:
        block_id = block.get("blockId", 0)
        remote_title = block.get("title", "").strip()
        remote_content = block.get("content", "").strip()
        # 获取最大的Id
        max_id = max(max_id, int(block_id))
        # 跳过标签
        if skip_tags and contains_any_tag(remote_content, skip_tags):
            continue
        update_block = update_data.get(str(block_id))
        if not update_block:
            if do_delete:
                block["isRemoved"] = True  #删除逻辑
                block["updated"] = now_str
                # 更新MD5
                map_put(map_put(conf, "blocks"), str(block_id))["md5"] = ""
        else:
            # 跳过标签
            if update_block.get("isSkip", False):
                continue
            # 更新逻辑
            update_title = str(update_block["title"]).strip()
            update_content = str(update_block["content"]).strip()
            is_update = False
            if update_title != remote_title:
                block["title"] = update_title
                is_update = True
            if update_content != remote_content:
                block["content"] = update_content
                is_update = True
            if is_update:
                block["updated"] = now_str
                # 更新MD5
                map_put(map_put(conf, "blocks"), str(block_id))["md5"] = ""
    # 新增逻辑
    for new_block in create_data:
        max_id = max_id + 1
        
        # 跳过标签
        if new_block.get("isSkip", False):
          continue
        new_block_data = {}
        new_block_data["blockId"] = max_id
        new_block_data["title"] = str(new_block["title"]).strip()
        new_block_data["content"] = str(new_block["content"]).strip()
        
        # 处理创建时间和更新时间
        published_time = new_block["published"]
        new_block_data["published"] = published_time if published_time else now_str
        new_block_data["updated"] = now_str

        # 更新本地数据
        new_block["blockId"] = str(max_id)
        new_block["title"] = new_block_data["title"]
        new_block["content"] = new_block_data["content"]
        new_block["published"] = utc_to_local_date(new_block_data.get("published", ""), "%b %d, %Y %H:%M:%S", "%Y/%m/%d %H:%M:%S")
        new_block["updated"] = utc_to_local_date(new_block_data.get("updated", ""), "%b %d, %Y %H:%M:%S", "%Y/%m/%d %H:%M:%S")
        
        blockList.insert(0, new_block_data)
    return data

def update_create_markdown(local_dir, create_data):
    """
    更新本地创建Markdown文件信息
    
    Args:
        local_dir (str): 本地目录路径
        create_data (list): 新创建的笔记数据列表
    """

    # 处理每个笔记
    for block in create_data:
        md_id = block["blockId"]
        content = block["content"]
        published_time = block["published"]
        updated_time = block["updated"]
        md_path = block["path"]

        if not md_id:
            continue

        if not md_path:
            md_path = os.path.join(local_dir, "Card" + md_id + ".md")

        post = None
        try:
            with open(md_path, 'r', encoding='utf-8') as f:
                post = frontmatter.load(f)
        except:
            pass
        if post: 
            post.content = content.strip()
            post["inbox_id"] = md_id
            post["inbox_published"] = published_time
            post["inbox_updated"] = updated_time
        else:
            # 创建Post对象
            post = frontmatter.Post(
                content.strip(),
                inbox_id = md_id,
                inbox_published = published_time,
                inbox_updated = updated_time
            )

        # 写入文件
        with open(md_path, 'w', encoding='utf-8') as f:
            f.write(frontmatter.dumps(post))

def upload_data_to_webdav(upload_data, conf, local_data_path, webdav_client, do_remove=True):
    """
    将数据上传到WebDAV服务器
    
    Args:
        upload_data (dict): 要上传的数据
        conf (dict): 配置信息
        local_data_path (str): 本地临时文件路径
        webdav_client: WebDAV客户端实例
        do_remove (bool, optional): 上传后是否删除临时文件. 默认为 True
    """
    upload_data_str = json.dumps(upload_data, ensure_ascii=False)
    remote_path = conf.get("settings", {}).get("remote_path", "")
    open(local_data_path, 'w', encoding='utf-8').write(upload_data_str)
    webdav_client.upload_sync(remote_path=remote_path, local_path=local_data_path)
    if do_remove:
        os.remove(local_data_path)
    return

def download_from_webdav(conf, conf_dir, local_dir, webdav_client, do_markdown=True):
    """
    从WebDAV服务器下载笔记数据
    
    Args:
        conf (dict): 配置信息
        conf_dir (str): 配置目录路径
        local_dir (str): 笔记目录路径
        webdav_client: WebDAV客户端实例
        do_markdown (bool, optional): 是否转换为Markdown格式. 默认为 True
    """
    remote_path = conf.get("settings", {}).get("remote_path", "")
    data_path = os.path.join(conf_dir, inBox_data_file)
    download_update = conf.get("settings", {}).get("download_update", 0)
    curr_update = get_remote_update_time(conf, webdav_client)
    update_stat = 0 if curr_update <= download_update else -1
    # 下载笔记
    if not os.path.exists(data_path) or update_stat < 0:
        webdav_client.download_sync(remote_path=remote_path, local_path=data_path)
    if do_markdown:
        remote_data = get_remote_data(conf_dir)
        write_to_markdown(local_dir, remote_data, conf)
    # 更新时间
    map_put(conf, "settings")["upload_update"] = 0
    if update_stat < 0:
        map_put(conf, "settings")["download_update"] = curr_update
    write_conf(conf, conf_dir)
    return

def upload_to_webdav(conf, conf_dir, local_dir, webdav_client):
    """
    将本地笔记数据上传到WebDAV服务器
    
    Args:
        conf (dict): 配置信息
        conf_dir (str): 配置目录路径
        local_dir (str): 笔记目录路径
        webdav_client: WebDAV客户端实例
        
    Raises:
        ValueError: 当远程有新数据或本地数据不存在时抛出
    """
    upload_update = conf.get("settings", {}).get("upload_update", 0)
    download_update = conf.get("settings", {}).get("download_update", 0)
    curr_update = get_remote_update_time(conf, webdav_client)
    upload_update_stat = 0 if curr_update <= upload_update and upload_update > 0 else -1
    download_update_stat = 0 if curr_update <= download_update else -1
    if upload_update_stat < 0 and download_update_stat < 0:
        raise ValueError("远程有新数据，请先下载更新")
    data_path = os.path.join(conf_dir, inBox_data_file)
    if not os.path.exists(data_path):
        raise ValueError("本地数据不存在，请重新下载")
    create_data, update_data = get_local_data(local_dir, conf)
    upload_data = write_to_remote(create_data, update_data, conf_dir, conf, True)
    temp_data_path = os.path.join(conf_dir, temp_data_file)
    upload_data_to_webdav(upload_data, conf, temp_data_path, webdav_client)
    update_create_markdown(local_dir, create_data)
    time.sleep(0.5)
    new_update = get_remote_update_time(conf, webdav_client)
    map_put(conf, "settings")["upload_update"] = new_update
    write_conf(conf, conf_dir)
    return

def quick_add_to_webdav(conf, conf_dir, local_dir, note_content, webdav_client):
    """
    快速添加笔记到WebDAV服务器
    
    Args:
        conf (dict): 配置信息
        conf_dir (str): 配置目录路径
        local_dir (str): 笔记目录路径
        note_content (str): 笔记内容
        webdav_client: WebDAV客户端实例
    """
    if not note_content or not note_content.strip():
        return
    download_from_webdav(conf, conf_dir, local_dir, webdav_client, False)
    new_block = {
        "blockId": "",
        "title": "",
        "content": note_content.strip(),
        "published": "",
        "updated": "",
        "path": ""
    }
    create_data = []
    create_data.append(new_block)
    upload_data = write_to_remote(create_data, {}, conf_dir, conf, False)
    data_path = os.path.join(conf_dir, inBox_data_file)
    upload_data_to_webdav(upload_data, conf, data_path, webdav_client, False)
    update_create_markdown(local_dir, create_data)
    time.sleep(0.5)
    new_update = get_remote_update_time(conf, webdav_client)
    map_put(conf, "settings")["download_update"] = new_update
    write_conf(conf, conf_dir)
    return

def reset_sync_record(conf, conf_dir):
    """
    重置同步记录，将upload_update和download_update置为0，并清空blocks中的md5记录
    
    Args:
        conf (dict): 配置信息
        conf_dir (str): 配置目录路径
    """
    # 重置更新时间
    map_put(conf, "settings")["upload_update"] = 0
    map_put(conf, "settings")["download_update"] = 0
    
    # 清空blocks中的md5记录
    if "blocks" in conf:
        conf["blocks"] = {}
        
    # 保存配置
    write_conf(conf, conf_dir)
    return

if __name__ == '__main__':
    #改变标准输出的默认编码 
    sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='gb18030')
    if len(sys.argv) < 2:
        exit()
    do_type = sys.argv[1].strip()
    conf_dir = sys.argv[2].strip()
    
    if not conf_dir:
        # 默认使用脚本所在目录作为配置目录
        conf_dir = os.path.dirname(os.path.realpath(__file__))
    
    logging.info("InboxSync：任务开始......")
    # 首先从脚本所在目录读取配置文件
    conf_path = os.path.join(conf_dir, conf_file)
    conf_str = read_file(conf_path)
    if not conf_str:
        logging.error("InboxSync：未检测到配置文件，退出......")
        exit
    try:
        conf = json.loads(conf_str)
        # 从配置文件中读取local_dir，如果没有则使用conf_dir
        local_dir = conf.get("settings", {}).get("local_dir", conf_dir)
        webdav_client = init_webdav_client(conf)
        if do_type == "download":
            download_from_webdav(conf, conf_dir, local_dir, webdav_client)
        elif do_type == "upload":
            upload_to_webdav(conf, conf_dir, local_dir, webdav_client)
        elif do_type == "add":
            content = pyperclip.paste()
            if len(content) == 0:
                logging.error("InboxSync：未检测到笔记内容，退出......")
                exit()
            quick_add_to_webdav(conf, conf_dir, local_dir, content, webdav_client)
        elif do_type == "reset":
            reset_sync_record(conf, conf_dir)
            logging.info("InboxSync：同步记录已重置")
    except Exception as ex:
        pyperclip.copy("InboxSyncError:" + str(ex))
        logging.exception("InboxSync：任务错误")
    logging.info("InboxSync：任务结束......")