Dean commited on
Commit
9cd8f4a
โ€ข
1 Parent(s): cdbc70e

Transition to MLWorkspace docker and setup makefile with environment commands

Browse files
.gitignore ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ .vscode/
2
+ .DS_Store
3
+ .idea/
4
+ .ipynb_checkpoints/
5
+ .workspace/
Dockerfile DELETED
@@ -1,9 +0,0 @@
1
- FROM pytorch/pytorch
2
-
3
- RUN apt-get update && apt-get install -y software-properties-common && apt-get update
4
- RUN add-apt-repository -y ppa:git-core/ppa && apt-get update && apt-get install -y git libglib2.0-dev
5
-
6
- COPY requirements.txt ./
7
- RUN pip install -r requirements.txt
8
-
9
- RUN pip install jupyterlab
 
 
 
 
 
 
 
 
 
 
Makefile ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #################################################################################
2
+ # GLOBALS #
3
+ #################################################################################
4
+
5
+ PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
6
+ PROJECT_NAME = savta_depth
7
+ PYTHON_INTERPRETER = python3
8
+
9
+ ifeq (,$(shell which conda))
10
+ HAS_CONDA=False
11
+ else
12
+ HAS_CONDA=True
13
+ endif
14
+
15
+ #################################################################################
16
+ # COMMANDS #
17
+ #################################################################################
18
+
19
+ env:
20
+ ifeq (True,$(HAS_CONDA))
21
+ @echo ">>> Detected conda, creating conda environment."
22
+ conda create --name $(PROJECT_NAME) python=3
23
+ @echo ">>> New conda env created. Activate with:\nconda activate $(PROJECT_NAME)"
24
+ else
25
+ @echo ">>> No conda detected, creating venv environment."
26
+ $(PYTHON_INTERPRETER) -m venv env
27
+ @echo ">>> New virtual env created. Activate with:\nsource env/bin/activate ."
28
+ endif
29
+
30
+ requirements:
31
+ @echo ">>> Installing requirements. Make sure your virtual environment is activated."
32
+ $(PYTHON_INTERPRETER) -m pip install -U pip setuptools wheel
33
+ $(PYTHON_INTERPRETER) -m pip install -r requirements.txt
Notebooks/SavtaDepth_sanity_check.ipynb CHANGED
The diff for this file is too large to render. See raw diff
 
README.md CHANGED
@@ -14,20 +14,40 @@ If you'd like to take part, please follow the guide.
14
  * Next, clone the repository you just forked by typing the following command in your terminal:
15
  ```bash
16
  $ git clone https://dagshub.com/<your-dagshub-username>/SavtaDepth.git
17
- $ dvc checkout #use this to get the data, models etc
18
  ```
19
- * To get your environment up and running docker is the best way to go.
20
- We created a dockerfile that has all you need in it and will install all requirements in the 'requirements.txt' file as well as run a jupyter lab instance.
21
- * Just open the terminal in your project directory and type `docker build "savta_depth_dev" ."
22
- * After the docker image is created run the following commands:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  ```bash
24
- $ chmod +x run_dev_env.sh
25
- $ ./run_dev_env.sh
26
  ```
27
- * Open localhost:8888 and you are good to go
28
- * After you are finished your modification, don't forget to push your code to DAGsHub, and your dvc managed files to your dvc remote. In order to setup a dvc remote please refer to [this guide](https://dagshub.com/docs/getting-started/set-up-remote-storage-for-data-and-models/).
29
- * Create a Pull Request on DAGsHub!
30
- * ๐Ÿถ
 
 
 
 
 
 
 
 
31
  ### TODO:
32
  - [ ] Web UI
33
  - [ ] Testing various datasets as basis for training
 
14
  * Next, clone the repository you just forked by typing the following command in your terminal:
15
  ```bash
16
  $ git clone https://dagshub.com/<your-dagshub-username>/SavtaDepth.git
 
17
  ```
18
+ * To get your environment up and running docker is the best way to go. We use an instance of [MLWorkspace](https://github.com/ml-tooling/ml-workspace).
19
+ * You can Just run the following commands to get it started.
20
+
21
+ ```bash
22
+ $ chmod +x run_dev_env.sh
23
+ $ ./run_dev_env.sh
24
+ ```
25
+
26
+ * Open localhost:8080 to see the workspace you have created. You will be asked for a token โ€“ enter `dagshub_savta`
27
+ * In the top right you have a menu called `Open Tool`. Click that button and choose terminal (alternatively open VSCode and open terminal there) and type in the following commands to install a virtualenv and dependencies:
28
+
29
+ ```bash
30
+ $ make env
31
+ $ conda activate savta_depth
32
+ $ make requirements
33
+ ```
34
+ * Pull the dvc files to your workspace by typing:
35
+
36
  ```bash
37
+ $ dvc checkout #use this to get the data, models etc
 
38
  ```
39
+
40
+ * After you are finished your modification, make sure to do the following:
41
+ * Freeze your virtualenv by typing in the terminal:
42
+
43
+ ```bash
44
+ pip freeze > requirements.txt
45
+ ```
46
+
47
+ * Push your code to DAGsHub, and your dvc managed files to your dvc remote. In order to setup a dvc remote please refer to [this guide](https://dagshub.com/docs/getting-started/set-up-remote-storage-for-data-and-models/).
48
+ * Create a Pull Request on DAGsHub!
49
+ * ๐Ÿถ
50
+
51
  ### TODO:
52
  - [ ] Web UI
53
  - [ ] Testing various datasets as basis for training
requirements.txt CHANGED
@@ -0,0 +1 @@
 
 
1
+ certifi==2020.6.20
run_dev_env.sh CHANGED
@@ -1,10 +1,7 @@
1
- docker run --rm -p 8888:8888 \
2
- --ipc=host \
3
- --volume="$PWD:/workspace" \
4
- savta_depth_dev jupyter lab \
5
- --ip=0.0.0.0 \
6
- --port=8888 \
7
- --allow-root \
8
- --no-browser \
9
- --NotebookApp.token='' \
10
- --NotebookApp.password=''
 
1
+ docker run -d \
2
+ -p 8080:8080 \
3
+ --name "ml-workspace" -v "${PWD}:/workspace" \
4
+ --env AUTHENTICATE_VIA_JUPYTER="dagshub_savta" \
5
+ --shm-size 512m \
6
+ --restart always \
7
+ mltooling/ml-workspace:latest
 
 
 
src/code/__pycache__/make_dataset.cpython-37.pyc ADDED
Binary file (143 Bytes). View file
 
src/code/make_dataset.py ADDED
@@ -0,0 +1 @@
 
 
1
+ print('hello world')