# @FileName  : main.py
# @Time      : 2025/2/6 7:18
# @Author    : LuZhaoHui
# @Software  : PyCharm

from tool import *

def downLoadModel(model):
    # 下载大模型
    # 定义目标目录
    target_directory = '%s/%s' % (AIMODELDIR, model)
    # 指定要下载的模型ID
    model_id = model
    # 下载模型
    download_path = snapshot_download(model_id, local_dir=target_directory)
    return download_path

if __name__ == '__main__':
    if AIMODELDIR == None:
        print('system error')
        exit(-1)
    print('ocr-tools test')
    # model = 'MiniMaxAI/MiniMax-VL-01'
    # model = 'MiniMaxAI/MiniMax-Text-01'
    # model = 'AI-ModelScope/MiniMax-Text-01'
    # model = 'AI-ModelScope/MiniMax-VL-01'

    # deepseek-vl2
    # model = "deepseek-ai/deepseek-vl2"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-8bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-6bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-4bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-3bit"
    # downLoadModel(model)
    # model = "deepseek-ai/deepseek-vl2-tiny"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-tiny-8bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-tiny-6bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-tiny-4bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-tiny-3bit"
    # downLoadModel(model)
    # model = "deepseek-ai/deepseek-vl2-small"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-small-8bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-small-6bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-small-4bit"
    # downLoadModel(model)
    # model = "mlx-community/deepseek-vl2-small-3bit"
    # downLoadModel(model)

    # Qwen2.5
    # model = 'Qwen/Qwen2.5-7B-Instruct'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-7B-Instruct-8bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-7B-Instruct-4bit'
    # downLoadModel(model)

    # model = 'mlx-community/Qwen2.5-7B-Instruct-3bit'
    # downLoadModel(model)
    # model = 'Qwen/Qwen2.5-3B-Instruct'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-3B-Instruct-8bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-3B-Instruct-4bit'
    # downLoadModel(model)

    # Qwen2.5-VL
    # model = 'Qwen/Qwen2.5-VL-72B-Instruct'
    # model = 'mlx-community/Qwen2.5-VL-72B-Instruct-8bit'
    # model = 'mlx-community/Qwen2.5-VL-72B-Instruct-6bit'
    # model = 'mlx-community/Qwen2.5-VL-72B-Instruct-4bit'
    # model = 'mlx-community/Qwen2.5-VL-72B-Instruct-3bit'
    # downLoadModel(model)

    # model = 'Qwen/Qwen2.5-VL-7B-Instruct'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-VL-7B-Instruct-8bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-VL-7B-Instruct-6bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-VL-7B-Instruct-3bit'
    # downLoadModel(model)
    # model = 'Qwen/Qwen2.5-VL-3B-Instruct'
    # downLoadModel(model)

    # model = 'mlx-community/Qwen2.5-VL-3B-Instruct-8bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-VL-3B-Instruct-6bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-VL-3B-Instruct-4bit'
    # downLoadModel(model)
    # model = 'mlx-community/Qwen2.5-VL-3B-Instruct-3bit'
    # downLoadModel(model)

    # loadUseModel(model)
    # modelscopeConvertOllama(model)
    # modelTask(model)
    # model = 'bartowski/Qwen2-VL-7B-Instruct-GGUF'
    # downLoadModel(model)
    # model = 'lmstudio-community/Qwen2-VL-2B-Instruct-GGUF'

    # downLoadModel(model)
    # modelscopeConvertOllama(model)
    # userPrompt = "你好,我是小白"
    # userPrompt = "你好,我是小白,想知道如何学习siliconFlow的API调用"
    # result = test1(userPrompt)
    # 输出结果
    # if result:
    # 假设响应格式为 {"choices": [{"message": {"content": "..."}}]}
    # generated_text = result["choices"][0]["message"]["content"]
    # print("生成的回复：\n", generated_text)
    # saveLog(result, log='api.txt')
    # else:
    #     print("请求未成功。")
