@@ -81,17 +81,23 @@ RUN sed -i "s/httpredir.debian.org/debian.uchicago.edu/" /etc/apt/sources.list &
81
81
# b/128333086: Set PROJ_DATA to points to the proj4 cartographic library.
82
82
ENV PROJ_DATA=/opt/conda/share/proj
83
83
84
+ # Install micromamba, setup channels, and replace conda with micromamba
85
+ ENV MAMBA_ROOT_PREFIX=/opt/conda
86
+ RUN curl -L "https://micro.mamba.pm/install.sh" -o /tmp/micromamba-install.sh \
87
+ && bash /tmp/micromamba-install.sh \
88
+ && rm /tmp/micromamba-install.sh \
89
+ && mv ~/.local/bin/micromamba /usr/bin/micromamba \
90
+ && (!(which conda) || cp /usr/bin/micromamba $(which conda)) \
91
+ && micromamba config append channels nvidia \
92
+ && micromamba config append channels rapidsai \
93
+ && micromamba config append channels conda-forge \
94
+ && micromamba config set channel_priority flexible \
95
+ && python -m nb_conda_kernels.install --disable
96
+
84
97
# Install conda packages not available on pip.
85
98
# When using pip in a conda environment, conda commands should be ran first and then
86
99
# the remaining pip commands: https://www.anaconda.com/using-pip-in-a-conda-environment/
87
- RUN conda config --add channels nvidia && \
88
- conda config --add channels rapidsai && \
89
- conda config --set solver libmamba && \
90
- # b/299991198: remove curl/libcurl install once DLVM base image includes version >= 7.86
91
- conda install -c conda-forge mamba curl libcurl && \
92
- # Base image channel order: conda-forge (highest priority), defaults.
93
- # End state: rapidsai (highest priority), nvidia, conda-forge, defaults.
94
- mamba install -y mkl cartopy imagemagick pyproj "shapely<2" && \
100
+ RUN micromamba install -y mkl cartopy imagemagick pyproj "shapely<2" && \
95
101
rm -rf /opt/conda/lib/python3.10/site-packages/pyproj/proj_dir/ && \
96
102
/tmp/clean-layer.sh
97
103
@@ -100,8 +106,7 @@ RUN conda config --add channels nvidia && \
100
106
# b/341938540: unistall grpc-cpp to allow >=v24.4 cudf and cuml to be installed.
101
107
{{ if eq .Accelerator "gpu" }}
102
108
RUN pip uninstall -y pyarrow && \
103
- mamba remove -y --force grpc-cpp && \
104
- mamba install -y -c conda-forge spacy cudf>=24.4 cuml>=24.4 cupy cuda-version=$CUDA_MAJOR_VERSION.$CUDA_MINOR_VERSION && \
109
+ micromamba install -vvvy spacy "cudf>=24.4" "cuml>=24.4" cupy cuda-version=$CUDA_MAJOR_VERSION.$CUDA_MINOR_VERSION && \
105
110
/tmp/clean-layer.sh
106
111
{{ else }}
107
112
RUN pip install spacy && \
@@ -114,7 +119,7 @@ RUN pip install spacy && \
114
119
COPY --from=torch_whl /tmp/whl/*.whl /tmp/torch/
115
120
# b/356397043: We are currently using cuda 12.3,
116
121
# but magma-cuda121 is the latest compatible version
117
- RUN mamba install -y -c pytorch magma-cuda121 && \
122
+ RUN micromamba install -y -c pytorch magma-cuda121 && \
118
123
pip install /tmp/torch/*.whl && \
119
124
sudo apt -y install libsox-dev && \
120
125
rm -rf /tmp/torch && \
@@ -507,7 +512,7 @@ RUN pip install wandb \
507
512
pip install --no-dependencies fastai fastdownload && \
508
513
# b/343971718: remove duplicate aiohttp installs, and reinstall it
509
514
rm -rf /opt/conda/lib/python3.10/site-packages/aiohttp* && \
510
- mamba install --force-reinstall -y aiohttp && \
515
+ micromamba install --force-reinstall -y aiohttp && \
511
516
/tmp/clean-layer.sh
512
517
513
518
# Download base easyocr models.
0 commit comments