File size: 10,014 Bytes
624d203
7875741
 
624d203
7875741
 
 
 
8a5e8bc
7875741
 
624d203
7875741
 
 
624d203
7875741
 
a1fe67d
624d203
 
7875741
 
 
 
 
 
624d203
7875741
 
 
a1fe67d
7875741
 
 
 
8a5e8bc
7875741
 
a1fe67d
7875741
 
 
a1fe67d
7875741
 
 
 
624d203
59a4bca
 
 
 
 
 
 
 
 
624d203
59a4bca
a1fe67d
 
 
 
59a4bca
 
 
 
 
 
8a5e8bc
59a4bca
 
 
 
 
 
1bb45d4
59a4bca
 
478f3a7
59a4bca
478f3a7
7875741
 
 
 
 
 
 
 
 
 
 
8a5e8bc
9f9848c
 
 
8a5e8bc
9f9848c
 
 
 
8a5e8bc
9f9848c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8a5e8bc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
#【请修改完参数后,删除此行】请在以下方案中选择一种,然后删除其他的方案,最后docker-compose up运行 | Please choose from one of these options below, delete other options as well as This Line

## ===================================================
## 【方案一】 如果不需要运行本地模型(仅chatgpt,newbing类远程服务)
## ===================================================
version: '3'
services:
  gpt_academic_nolocalllms:
    image: ghcr.io/binary-husky/gpt_academic_nolocal:master # (Auto Built by Dockerfile: docs/GithubAction+NoLocal)
    environment:
      # 请查阅 `config.py` 以查看所有的配置信息
      API_KEY:                  '    sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx                                            '
      USE_PROXY:                '    True                                                                                           '
      proxies:                  '    { "http": "socks5h://localhost:10880", "https": "socks5h://localhost:10880", }                 '
      LLM_MODEL:                '    gpt-3.5-turbo                                                                                  '
      AVAIL_LLM_MODELS:         '    ["gpt-3.5-turbo", "api2d-gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "newbing"]                    '
      WEB_PORT:                 '    22303                                                                                          '
      ADD_WAIFU:                '    True                                                                                           '
      # THEME:                    '    Chuanhu-Small-and-Beautiful                                                                    '
      # DEFAULT_WORKER_NUM:       '    10                                                                                             '
      # AUTHENTICATION:           '    [("username", "passwd"), ("username2", "passwd2")]                                             '

    # 与宿主的网络融合
    network_mode: "host"

    # 不使用代理网络拉取最新代码
    command: >
      bash -c "python3 -u main.py"


### ===================================================
### 【方案二】 如果需要运行ChatGLM + Qwen + MOSS等本地模型
### ===================================================
version: '3'
services:
  gpt_academic_with_chatglm:
    image: ghcr.io/binary-husky/gpt_academic_chatglm_moss:master  # (Auto Built by Dockerfile: docs/Dockerfile+ChatGLM)
    environment:
      # 请查阅 `config.py` 以查看所有的配置信息
      API_KEY:                  '    sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx                                            '
      USE_PROXY:                '    True                                                                                           '
      proxies:                  '    { "http": "socks5h://localhost:10880", "https": "socks5h://localhost:10880", }                 '
      LLM_MODEL:                '    gpt-3.5-turbo                                                                                  '
      AVAIL_LLM_MODELS:         '    ["chatglm", "qwen", "moss", "gpt-3.5-turbo", "gpt-4", "newbing"]                               '
      LOCAL_MODEL_DEVICE:       '    cuda                                                                                           '
      DEFAULT_WORKER_NUM:       '    10                                                                                             '
      WEB_PORT:                 '    12303                                                                                          '
      ADD_WAIFU:                '    True                                                                                           '
      # AUTHENTICATION:           '    [("username", "passwd"), ("username2", "passwd2")]                                             '

    # 显卡的使用,nvidia0指第0个GPU
    runtime: nvidia
    devices:
      - /dev/nvidia0:/dev/nvidia0
      
    # 与宿主的网络融合
    network_mode: "host"
    command: >
      bash -c "python3 -u main.py"

    # P.S. 通过对 command 进行微调,可以便捷地安装额外的依赖
    # command: >
    #   bash -c "pip install -r request_llm/requirements_qwen.txt && python3 -u main.py"

### ===================================================
### 【方案三】 如果需要运行ChatGPT + LLAMA + 盘古 + RWKV本地模型
### ===================================================
version: '3'
services:
  gpt_academic_with_rwkv:
    image: ghcr.io/binary-husky/gpt_academic_jittorllms:master
    environment:
      # 请查阅 `config.py` 以查看所有的配置信息
      API_KEY:                  '    sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,fkxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  '
      USE_PROXY:                '    True                                                                                           '
      proxies:                  '    { "http": "socks5h://localhost:10880", "https": "socks5h://localhost:10880", }                 '
      LLM_MODEL:                '    gpt-3.5-turbo                                                                                  '
      AVAIL_LLM_MODELS:         '    ["gpt-3.5-turbo", "newbing", "jittorllms_rwkv", "jittorllms_pangualpha", "jittorllms_llama"]   '
      LOCAL_MODEL_DEVICE:       '    cuda                                                                                           '
      DEFAULT_WORKER_NUM:       '    10                                                                                             '
      WEB_PORT:                 '    12305                                                                                          '
      ADD_WAIFU:                '    True                                                                                           '
      # AUTHENTICATION:           '    [("username", "passwd"), ("username2", "passwd2")]                                             '

    # 显卡的使用,nvidia0指第0个GPU
    runtime: nvidia
    devices:
      - /dev/nvidia0:/dev/nvidia0
      
    # 与宿主的网络融合
    network_mode: "host"

    # 不使用代理网络拉取最新代码
    command: >
      python3 -u main.py


## ===================================================
## 【方案四】 ChatGPT + Latex
## ===================================================
version: '3'
services:
  gpt_academic_with_latex:
    image: ghcr.io/binary-husky/gpt_academic_with_latex:master  # (Auto Built by Dockerfile: docs/GithubAction+NoLocal+Latex)
    environment:
      # 请查阅 `config.py` 以查看所有的配置信息
      API_KEY:                  '    sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx                              '
      USE_PROXY:                '    True                                                                             '
      proxies:                  '    { "http": "socks5h://localhost:10880", "https": "socks5h://localhost:10880", }   '
      LLM_MODEL:                '    gpt-3.5-turbo                                                                    '
      AVAIL_LLM_MODELS:         '    ["gpt-3.5-turbo", "gpt-4"]                                                       '
      LOCAL_MODEL_DEVICE:       '    cuda                                                                             '
      DEFAULT_WORKER_NUM:       '    10                                                                               '
      WEB_PORT:                 '    12303                                                                            '

    # 与宿主的网络融合
    network_mode: "host"

    # 不使用代理网络拉取最新代码
    command: >
      bash -c "python3 -u main.py"


## ===================================================
## 【方案五】 ChatGPT + 语音助手 (请先阅读 docs/use_audio.md)
## ===================================================
version: '3'
services:
  gpt_academic_with_audio:
    image: ghcr.io/binary-husky/gpt_academic_audio_assistant:master
    environment:
      # 请查阅 `config.py` 以查看所有的配置信息
      API_KEY:                  '    fk195831-IdP0Pb3W6DCMUIbQwVX6MsSiyxwqybyS                        '
      USE_PROXY:                '    False                                                            '
      proxies:                  '    None                                                             '
      LLM_MODEL:                '    gpt-3.5-turbo                                                    '
      AVAIL_LLM_MODELS:         '    ["gpt-3.5-turbo", "gpt-4"]                                       '
      ENABLE_AUDIO:             '    True                                                             '
      LOCAL_MODEL_DEVICE:       '    cuda                                                             '
      DEFAULT_WORKER_NUM:       '    20                                                               '
      WEB_PORT:                 '    12343                                                            '
      ADD_WAIFU:                '    True                                                             '
      THEME:                    '    Chuanhu-Small-and-Beautiful                                      '
      ALIYUN_APPKEY:            '    RoP1ZrM84DnAFkZK                                                 '
      ALIYUN_TOKEN:             '    f37f30e0f9934c34a992f6f64f7eba4f                                 '
      # (无需填写) ALIYUN_ACCESSKEY:         '    LTAI5q6BrFUzoRXVGUWnekh1                                         '
      # (无需填写) ALIYUN_SECRET:            '    eHmI20AVWIaQZ0CiTD2bGQVsaP9i68                                   '

    # 与宿主的网络融合
    network_mode: "host"

    # 不使用代理网络拉取最新代码
    command: >
      bash -c "python3 -u main.py"