# FROM nvidia/cuda:12.2.0-devel-ubuntu20.04 FROM julia:1.11 # FROM debian:latest # ---------------------------------------------- 100 --------------------------------------------- # # install required APT packages RUN export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -y \ software-properties-common \ build-essential \ busybox \ g++ \ gcc \ wget \ net-tools \ curl \ iputils-ping \ unzip \ unixodbc \ unixodbc-dev \ libicu-dev \ nano \ libmosquitto-dev \ git \ ffmpeg \ libsm6 \ libxext6 \ tar \ zip \ libssl-dev \ # python3 \ # python3-pip \ # python-is-python3 \ postgresql-client \ cargo \ procps # # For webapp frontend # RUN apt-get update && apt-get install -y nginx # COPY nginx.conf /etc/nginx/nginx.conf # # Copy your static website files to the Nginx HTML directory # COPY . /usr/share/nginx/html # # install nodejs https://deb.nodesource.com/ # RUN curl -sL https://deb.nodesource.com/setup_20.x | bash - && \ # apt-get update && apt-get install -y nodejs # set up the app RUN mkdir /appfolder RUN mkdir /appfolder/mountvolume RUN mkdir /appfolder/app RUN mkdir /appfolder/app/temp COPY . /appfolder/app/temp RUN mv /appfolder/app/temp/env_preparation.jl /appfolder/app # install Conda as primary python environment with specified python version WORKDIR /appfolder/app/temp # RUN PATH="${HOME}/conda/bin:${PATH}" \ # # && wget -O Miniforge3.sh https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh \ # && wget -O Miniforge3.sh https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Linux-x86_64.sh \ # && bash Miniforge3.sh -b -p "${HOME}/conda" \ # && rm -f Miniforge3.sh \ # && echo "Running $(conda --version)" \ # && conda init bash \ # && . /root/.bashrc \ # && conda update conda -y \ # && conda install python=3.10 -y # # install pip into conda's base env # RUN PATH="${HOME}/conda/bin:${PATH}" \ # && conda install pip \ # && pip install --trusted-host pypi.python.org -r required_python_packages.txt \ # # && CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS -DLLAMA_AVX2=OFF -DLLAMA_F16C=OFF -DLLAMA_FMA=OFF" FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir # && CMAKE_ARGS="-DLLAMA_CUBLAS=on -DLLAMA_AVX2=OFF -DLLAMA_F16C=OFF -DLLAMA_FMA=OFF" FORCE_CMAKE=1 pip install llama-cpp-python==0.2.77 --no-cache-dir # # https://github.com/abetlen/llama-cpp-python/issues/412 old CPU cause CUBLAS compile problem # # to be able to use CMD python -m at the last line # ENV PATH=/root/conda/bin:$PATH # # install powershell # WORKDIR /appfolder/app/temp # RUN wget https://github.com/PowerShell/PowerShell/releases/download/v7.4.5/powershell_7.4.5-1.deb_amd64.deb # RUN dpkg -i powershell_7.4.5-1.deb_amd64.deb # RUN apt-get install -f # using juliaup # RUN curl -fsSL https://install.julialang.org | sh -s -- -y # SHELL ["/bin/bash", "--login" , "-c"] # install julia package for my app WORKDIR /appfolder/app RUN julia env_preparation.jl RUN rm -r /appfolder/app/temp # Make port 80 available to the world outside this container. If I use --publish at docker run command, there is no need to use EXPOSE # EXPOSE 1883 # Run app when the container launches CMD julia -t auto --project -e 'include("main.jl");' # CMD ["nginx", "-g", "daemon off;"] # CMD [ "sleep", "infinity" ]