FROM bewithmeallmylife/tensorrt-inference-runtime:cuda10.2-cudnn8-trt7-devel-ubuntu18.04
#FROM bewithmeallmylife/tensorrt-inference-runtime:cuda11-cudnn8-trt8-ubuntu18.04

WORKDIR /app/tensorrt_inference

ADD code code
ADD configs configs
ADD depends depends
ADD example example
ADD project project
ADD python  python
ADD samples samples
ADD weights weights

RUN bash -xc "cd project && mkdir build && cd build && cmake .. && make -j "

#sudo docker build -t='bewithmeallmylife/tensorrt-inference-app:cuda10.2-cudnn8-trt7-devel-ubuntu18.04' .
#sudo docker build -t='bewithmeallmylife/tensorrt-inference-app:cuda11-cudnn8-trt8-ubuntu18.04' .
#sudo docker run --net=host  --gpus '"device=1"' --privileged  -it -v /dev/video0:/dev/video0 -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY=$DISPLAY -d  bewithmeallmylife/tensorrt-inference-app:cuda10.2-cudnn8-trt7-devel-ubuntu18.04
