datastx commited on
Commit
a83266c
1 Parent(s): a752a5e
Files changed (7) hide show
  1. .gitignore +2 -0
  2. Dockerfile +25 -0
  3. Makefile.venv +272 -0
  4. app.py +70 -0
  5. download_model.sh +13 -0
  6. makefile +20 -0
  7. requirements.txt +5 -0
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ .venv
2
+ .env
Dockerfile ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use an official Python runtime as a base image
2
+ FROM python:3.9-slim
3
+
4
+ # Set the working directory in the container to /app
5
+ WORKDIR /app
6
+
7
+ # Copy the current directory contents into the container at /app
8
+ ADD . /app
9
+
10
+ # Install any needed packages specified in requirements.txt
11
+ RUN apt-get update && \
12
+ apt-get install -y aria2 curl gcc python3-dev && \
13
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y && \
14
+ . $HOME/.cargo/env && \
15
+ pip install --upgrade pip && \
16
+ pip install --no-cache-dir -r requirements.txt
17
+
18
+ # Run download_model.sh
19
+ RUN bash download_model.sh
20
+
21
+ # Make port 80 available to the world outside this container
22
+ EXPOSE 80
23
+
24
+ # Run app.py when the container launches
25
+ CMD ["python", "app.py"]
Makefile.venv ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # SEAMLESSLY MANAGE PYTHON VIRTUAL ENVIRONMENT WITH A MAKEFILE
3
+ #
4
+ # https://github.com/sio/Makefile.venv v2023.04.17
5
+ #
6
+ #
7
+ # Insert `include Makefile.venv` at the bottom of your Makefile to enable these
8
+ # rules.
9
+ #
10
+ # When writing your Makefile use '$(VENV)/python' to refer to the Python
11
+ # interpreter within virtual environment and '$(VENV)/executablename' for any
12
+ # other executable in venv.
13
+ #
14
+ # This Makefile provides the following targets:
15
+ # venv
16
+ # Use this as a dependency for any target that requires virtual
17
+ # environment to be created and configured
18
+ # python, ipython
19
+ # Use these to launch interactive Python shell within virtual environment
20
+ # shell, bash, zsh
21
+ # Launch interactive command line shell. "shell" target launches the
22
+ # default shell Makefile executes its rules in (usually /bin/sh).
23
+ # "bash" and "zsh" can be used to refer to the specific desired shell.
24
+ # show-venv
25
+ # Show versions of Python and pip, and the path to the virtual environment
26
+ # clean-venv
27
+ # Remove virtual environment
28
+ # $(VENV)/executable_name
29
+ # Install `executable_name` with pip. Only packages with names matching
30
+ # the name of the corresponding executable are supported.
31
+ # Use this as a lightweight mechanism for development dependencies
32
+ # tracking. E.g. for one-off tools that are not required in every
33
+ # developer's environment, therefore are not included into
34
+ # requirements.txt or setup.py.
35
+ # Note:
36
+ # Rules using such target or dependency MUST be defined below
37
+ # `include` directive to make use of correct $(VENV) value.
38
+ # Example:
39
+ # codestyle: $(VENV)/pyflakes
40
+ # $(VENV)/pyflakes .
41
+ # See `ipython` target below for another example.
42
+ #
43
+ # This Makefile can be configured via following variables:
44
+ # PY
45
+ # Command name for system Python interpreter. It is used only initially to
46
+ # create the virtual environment
47
+ # Default: python3
48
+ # REQUIREMENTS_TXT
49
+ # Space separated list of paths to requirements.txt files.
50
+ # Paths are resolved relative to current working directory.
51
+ # Default: requirements.txt
52
+ #
53
+ # Non-existent files are treated as hard dependencies,
54
+ # recipes for creating such files must be provided by the main Makefile.
55
+ # Providing empty value (REQUIREMENTS_TXT=) turns off processing of
56
+ # requirements.txt even when the file exists.
57
+ # SETUP_PY, SETUP_CFG, PYPROJECT_TOML, VENV_LOCAL_PACKAGE
58
+ # Space separated list of paths to files that contain build instructions
59
+ # for local Python packages. Corresponding packages will be installed
60
+ # into venv in editable mode along with all their dependencies.
61
+ # Default: setup.py setup.cfg pyproject.toml (whichever present)
62
+ #
63
+ # Non-existent and empty values are treated in the same way as for REQUIREMENTS_TXT.
64
+ # WORKDIR
65
+ # Parent directory for the virtual environment.
66
+ # Default: current working directory.
67
+ # VENVDIR
68
+ # Python virtual environment directory.
69
+ # Default: $(WORKDIR)/.venv
70
+ #
71
+ # This Makefile was written for GNU Make and may not work with other make
72
+ # implementations.
73
+ #
74
+ #
75
+ # Copyright (c) 2019-2023 Vitaly Potyarkin
76
+ #
77
+ # Licensed under the Apache License, Version 2.0
78
+ # <http://www.apache.org/licenses/LICENSE-2.0>
79
+ #
80
+
81
+
82
+ #
83
+ # Configuration variables
84
+ #
85
+
86
+ WORKDIR?=.
87
+ VENVDIR?=$(WORKDIR)/.venv
88
+ REQUIREMENTS_TXT?=$(wildcard requirements.txt) # Multiple paths are supported (space separated)
89
+ SETUP_PY?=$(wildcard setup.py) # Multiple paths are supported (space separated)
90
+ SETUP_CFG?=$(foreach s,$(SETUP_PY),$(wildcard $(patsubst %setup.py,%setup.cfg,$(s))))
91
+ PYPROJECT_TOML?=$(wildcard pyproject.toml)
92
+ VENV_LOCAL_PACKAGE?=$(SETUP_PY) $(SETUP_CFG) $(PYPROJECT_TOML)
93
+ MARKER=.initialized-with-Makefile.venv
94
+
95
+
96
+ #
97
+ # Python interpreter detection
98
+ #
99
+
100
+ _PY_AUTODETECT_MSG=Detected Python interpreter: $(PY). Use PY environment variable to override
101
+
102
+ ifeq (ok,$(shell test -e /dev/null 2>&1 && echo ok))
103
+ NULL_STDERR=2>/dev/null
104
+ else
105
+ NULL_STDERR=2>NUL
106
+ endif
107
+
108
+ ifndef PY
109
+ _PY_OPTION:=python3
110
+ ifeq (ok,$(shell $(_PY_OPTION) -c "print('ok')" $(NULL_STDERR)))
111
+ PY=$(_PY_OPTION)
112
+ endif
113
+ endif
114
+
115
+ ifndef PY
116
+ _PY_OPTION:=$(VENVDIR)/bin/python
117
+ ifeq (ok,$(shell $(_PY_OPTION) -c "print('ok')" $(NULL_STDERR)))
118
+ PY=$(_PY_OPTION)
119
+ $(info $(_PY_AUTODETECT_MSG))
120
+ endif
121
+ endif
122
+
123
+ ifndef PY
124
+ _PY_OPTION:=$(subst /,\,$(VENVDIR)/Scripts/python)
125
+ ifeq (ok,$(shell $(_PY_OPTION) -c "print('ok')" $(NULL_STDERR)))
126
+ PY=$(_PY_OPTION)
127
+ $(info $(_PY_AUTODETECT_MSG))
128
+ endif
129
+ endif
130
+
131
+ ifndef PY
132
+ _PY_OPTION:=py -3
133
+ ifeq (ok,$(shell $(_PY_OPTION) -c "print('ok')" $(NULL_STDERR)))
134
+ PY=$(_PY_OPTION)
135
+ $(info $(_PY_AUTODETECT_MSG))
136
+ endif
137
+ endif
138
+
139
+ ifndef PY
140
+ _PY_OPTION:=python
141
+ ifeq (ok,$(shell $(_PY_OPTION) -c "print('ok')" $(NULL_STDERR)))
142
+ PY=$(_PY_OPTION)
143
+ $(info $(_PY_AUTODETECT_MSG))
144
+ endif
145
+ endif
146
+
147
+ ifndef PY
148
+ define _PY_AUTODETECT_ERR
149
+ Could not detect Python interpreter automatically.
150
+ Please specify path to interpreter via PY environment variable.
151
+ endef
152
+ $(error $(_PY_AUTODETECT_ERR))
153
+ endif
154
+
155
+
156
+ #
157
+ # Internal variable resolution
158
+ #
159
+
160
+ VENV=$(VENVDIR)/bin
161
+ EXE=
162
+ # Detect windows
163
+ ifeq (win32,$(shell $(PY) -c "import __future__, sys; print(sys.platform)"))
164
+ VENV=$(VENVDIR)/Scripts
165
+ EXE=.exe
166
+ endif
167
+
168
+ touch=touch $(1)
169
+ ifeq (,$(shell command -v touch $(NULL_STDERR)))
170
+ # https://ss64.com/nt/touch.html
171
+ touch=type nul >> $(subst /,\,$(1)) && copy /y /b $(subst /,\,$(1))+,, $(subst /,\,$(1))
172
+ endif
173
+
174
+ RM?=rm -f
175
+ ifeq (,$(shell command -v $(firstword $(RM)) $(NULL_STDERR)))
176
+ RMDIR:=rd /s /q
177
+ else
178
+ RMDIR:=$(RM) -r
179
+ endif
180
+
181
+
182
+ #
183
+ # Virtual environment
184
+ #
185
+
186
+ .PHONY: venv
187
+ venv: $(VENV)/$(MARKER)
188
+
189
+ .PHONY: clean-venv
190
+ clean-venv:
191
+ -$(RMDIR) "$(VENVDIR)"
192
+
193
+ .PHONY: show-venv
194
+ show-venv: venv
195
+ @$(VENV)/python -c "import sys; print('Python ' + sys.version.replace('\n',''))"
196
+ @$(VENV)/pip --version
197
+ @echo venv: $(VENVDIR)
198
+
199
+ .PHONY: debug-venv
200
+ debug-venv:
201
+ @echo "PATH (Shell)=$$PATH"
202
+ @$(MAKE) --version
203
+ $(info PATH (GNU Make)="$(PATH)")
204
+ $(info SHELL="$(SHELL)")
205
+ $(info PY="$(PY)")
206
+ $(info REQUIREMENTS_TXT="$(REQUIREMENTS_TXT)")
207
+ $(info VENV_LOCAL_PACKAGE="$(VENV_LOCAL_PACKAGE)")
208
+ $(info VENVDIR="$(VENVDIR)")
209
+ $(info VENVDEPENDS="$(VENVDEPENDS)")
210
+ $(info WORKDIR="$(WORKDIR)")
211
+
212
+
213
+ #
214
+ # Dependencies
215
+ #
216
+
217
+ ifneq ($(strip $(REQUIREMENTS_TXT)),)
218
+ VENVDEPENDS+=$(REQUIREMENTS_TXT)
219
+ endif
220
+
221
+ ifneq ($(strip $(VENV_LOCAL_PACKAGE)),)
222
+ VENVDEPENDS+=$(VENV_LOCAL_PACKAGE)
223
+ endif
224
+
225
+ $(VENV):
226
+ $(PY) -m venv $(VENVDIR)
227
+ $(VENV)/python -m pip install --upgrade pip setuptools wheel
228
+
229
+ $(VENV)/$(MARKER): $(VENVDEPENDS) | $(VENV)
230
+ ifneq ($(strip $(REQUIREMENTS_TXT)),)
231
+ $(VENV)/pip install $(foreach path,$(REQUIREMENTS_TXT),-r $(path))
232
+ endif
233
+ ifneq ($(strip $(VENV_LOCAL_PACKAGE)),)
234
+ $(VENV)/pip install $(foreach path,$(sort $(VENV_LOCAL_PACKAGE)),-e $(dir $(path)))
235
+ endif
236
+ $(call touch,$(VENV)/$(MARKER))
237
+
238
+
239
+ #
240
+ # Interactive shells
241
+ #
242
+
243
+ .PHONY: python
244
+ python: venv
245
+ exec $(VENV)/python
246
+
247
+ .PHONY: ipython
248
+ ipython: $(VENV)/ipython
249
+ exec $(VENV)/ipython
250
+
251
+ .PHONY: shell
252
+ shell: venv
253
+ . $(VENV)/activate && exec $(notdir $(SHELL))
254
+
255
+ .PHONY: bash zsh
256
+ bash zsh: venv
257
+ . $(VENV)/activate && exec $@
258
+
259
+
260
+ #
261
+ # Commandline tools (wildcard rule, executable name must match package name)
262
+ #
263
+
264
+ ifneq ($(EXE),)
265
+ $(VENV)/%: $(VENV)/%$(EXE) ;
266
+ .PHONY: $(VENV)/%
267
+ .PRECIOUS: $(VENV)/%$(EXE)
268
+ endif
269
+
270
+ $(VENV)/%$(EXE): $(VENV)/$(MARKER)
271
+ $(VENV)/pip install --upgrade $*
272
+ $(call touch,$@)
app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import transformers
4
+ from torch import bfloat16
5
+ from threading import Thread
6
+ from gradio.themes.utils.colors import Color
7
+
8
+ # Download model and tokenizer files
9
+ os.system('bash download_model.sh')
10
+
11
+ model_id = "/app/medllama2_7b"
12
+ tokenizer = transformers.AutoTokenizer.from_pretrained(model_id)
13
+ model_config = transformers.AutoConfig.from_pretrained(model_id)
14
+
15
+ bnb_config = transformers.BitsAndBytesConfig(
16
+ load_in_4bit=True,
17
+ bnb_4bit_quant_type='nf4',
18
+ bnb_4bit_use_double_quant=True,
19
+ bnb_4bit_compute_dtype=bfloat16
20
+ )
21
+ model = transformers.AutoModelForCausalLM.from_pretrained(
22
+ model_id,
23
+ trust_remote_code=True,
24
+ config=model_config,
25
+ quantization_config=bnb_config,
26
+ device_map='auto'
27
+ )
28
+
29
+ prompts = ["You are a helpful AI Doctor."]
30
+
31
+ def prompt_build(system_prompt, user_inp, hist):
32
+ prompt = f"""### System:\n{system_prompt}\n\n"""
33
+
34
+ for pair in hist:
35
+ prompt += f"""### User:\n{pair[0]}\n\n### Assistant:\n{pair[1]}\n\n"""
36
+
37
+ prompt += f"""### User:\n{user_inp}\n\n### Assistant:"""
38
+ return prompt
39
+
40
+ def chat(user_input, history, system_prompt):
41
+
42
+ prompt = prompt_build(system_prompt, user_input, history)
43
+ model_inputs = tokenizer([prompt], return_tensors="pt").to("cuda")
44
+
45
+ streamer = transformers.TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
46
+
47
+ generate_kwargs = dict(
48
+ model_inputs,
49
+ streamer=streamer,
50
+ max_length=2048,
51
+ do_sample=True,
52
+ top_p=0.95,
53
+ temperature=0.8,
54
+ top_k=50
55
+ )
56
+ t = Thread(target=model.generate, kwargs=generate_kwargs)
57
+ t.start()
58
+
59
+ model_output = ""
60
+ for new_text in streamer:
61
+ model_output += new_text
62
+ yield model_output
63
+ return model_output
64
+
65
+ if __name__ == "__main__":
66
+ with gr.Blocks() as demo:
67
+ dropdown = gr.Dropdown(choices=prompts, label="Type your own or select a system prompt", value="You are a helpful AI Doctor.", allow_custom_value=True)
68
+ chatbot = gr.ChatInterface(fn=chat, additional_inputs=[dropdown])
69
+
70
+ demo.queue(api_open=False).launch(show_api=False, share=True)
download_model.sh ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ mkdir -p /app/medllama2_7b
4
+
5
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/resolve/main/model-00001-of-00002.safetensors -d /app/medllama2_7b -o model-00001-of-00002.safetensors
6
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/resolve/main/model-00002-of-00002.safetensors -d /app/medllama2_7b -o model-00002-of-00002.safetensors
7
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/model.safetensors.index.json -d /app/medllama2_7b -o model.safetensors.index.json
8
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/special_tokens_map.json -d /app/medllama2_7b -o special_tokens_map.json
9
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/tokenizer.json -d /app/medllama2_7b -o tokenizer.json
10
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/tokenizer_config.json -d /app/medllama2_7b -o tokenizer_config.json
11
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/config.json -d /app/medllama2_7b -o config.json
12
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/raw/main/generation_config.json -d /app/medllama2_7b -o generation_config.json
13
+ aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://huggingface.co/4bit/medllama2_7b_s/resolve/main/tokenizer.model -d /app/medllama2_7b -o tokenizer.model
makefile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ PYTHON_BINARY = $(VENV)/python
4
+ STREAMLIT_BINARY = $(VENV)/streamlit
5
+
6
+
7
+ re-venv: clean-venv venv
8
+
9
+ run: venv
10
+ $(PYTHON_BINARY) app.py
11
+ $(STREAMLIT_BINARY) run app.py
12
+
13
+ build:
14
+ docker build -t medllama2_7b .
15
+
16
+ run-docker:
17
+ docker run -p 4000:80 medllama2_7b
18
+
19
+
20
+ include Makefile.venv
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ gradio
2
+ transformers
3
+ accelerate
4
+ bitsandbytes
5
+ peft