Skip to content

Commit f21d9e4

Browse files
authored
update: Bump awscli version and constrain spyder on conda (#153)
* Update awscli version and constrain spyder on conda
1 parent 6dd8cae commit f21d9e4

File tree

5 files changed

+19
-14
lines changed

5 files changed

+19
-14
lines changed

docker/1.3.1/py2/Dockerfile.cpu

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@ RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-la
6161
&& /opt/conda/bin/conda install -y -c anaconda \
6262
python=$PYTHON_VERSION \
6363
numpy==1.16.4 \
64-
scipy==1.2.1 \
6564
ipython==5.8.0 \
6665
mkl==2019.4 \
6766
mkl-include==2019.4 \
@@ -70,12 +69,12 @@ RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-la
7069
/opt/conda/bin/conda clean -ya
7170

7271
RUN conda install -c conda-forge \
73-
awscli==1.16.296 \
72+
awscli==1.17.7 \
7473
opencv==4.0.1 \
7574
&& conda install -y \
7675
scikit-learn==0.20.3 \
7776
pandas==0.24.2 \
78-
pillow==6.2.1 \
77+
Pillow==6.2.0 \
7978
h5py==2.9.0 \
8079
requests==2.22.0 \
8180
&& conda clean -ya \
@@ -90,6 +89,7 @@ COPY sagemaker_pytorch_container-1.3.1-py2.py3-none-any.whl /sagemaker_pytorch_c
9089
# The following section uninstalls torch and torchvision before installing the
9190
# custom versions from an S3 bucket. This will need to be removed in the future
9291
RUN pip install --no-cache-dir \
92+
scipy==1.2.2 \
9393
/sagemaker_pytorch_container-1.3.1-py2.py3-none-any.whl \
9494
&& pip uninstall -y torch \
9595
&& pip install -U --no-cache-dir https://pytorch-aws.s3.amazonaws.com/pytorch-1.3.1/py2/cpu/torch-1.3.1-cp27-cp27mu-manylinux1_x86_64.whl \

docker/1.3.1/py2/Dockerfile.gpu

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ RUN ompi_info --parsable --all | grep mpi_built_with_cuda_support:value \
8989
&& /opt/conda/bin/conda install -y anaconda \
9090
python=$PYTHON_VERSION \
9191
numpy==1.16.4 \
92-
scipy==1.2.1 \
9392
ipython==5.8.0 \
9493
mkl==2019.4 \
9594
mkl-include==2019.4 \
@@ -100,14 +99,19 @@ RUN ompi_info --parsable --all | grep mpi_built_with_cuda_support:value \
10099

101100
RUN conda install -c pytorch magma-cuda101==2.5.1 \
102101
&& conda install -c conda-forge \
103-
awscli==1.16.296 \
102+
awscli==1.17.7 \
104103
opencv==4.0.1 \
105104
&& conda install -y \
106105
scikit-learn==0.20.3 \
107106
pandas==0.24.2 \
108-
pillow==6.2.1 \
107+
Pillow==6.2.0 \
109108
h5py==2.9.0 \
110109
requests==2.22.0 \
110+
&& pip install -U \
111+
scipy==1.2.2 \
112+
"spyder<4.0" \
113+
argparse \
114+
&& pip uninstall -y QDarkStyle \
111115
&& conda clean -ya
112116

113117
# The following section uninstalls torch and torchvision before installing the

docker/1.3.1/py3/Dockerfile.cpu

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,20 +69,19 @@ RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-la
6969
&& /opt/conda/bin/conda install -y -c anaconda \
7070
python=$PYTHON_VERSION \
7171
numpy==1.16.4 \
72-
scipy==1.3.0 \
7372
ipython==7.10.1 \
7473
mkl==2019.4 \
7574
mkl-include==2019.4 \
7675
cython==0.29.12 \
7776
typing==3.6.4 \
7877
"pyopenssl>=17.5.0" \
7978
&& conda install -c conda-forge \
80-
awscli==1.16.314 \
79+
awscli==1.17.7 \
8180
opencv==4.0.1 \
8281
&& conda install -y \
8382
scikit-learn==0.21.2 \
8483
pandas==0.25.0 \
85-
pillow==6.2.1 \
84+
Pillow==6.2.0 \
8685
h5py==2.9.0 \
8786
requests==2.22.0 \
8887
&& conda install -c dglteam -y dgl==0.4.1 \
@@ -93,7 +92,9 @@ RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-la
9392
# custom versions from an S3 bucket. This will need to be removed in the future
9493
RUN pip install --upgrade pip --trusted-host pypi.org --trusted-host files.pythonhosted.org \
9594
&& ln -s /opt/conda/bin/pip /usr/local/bin/pip3 \
96-
&& pip install --no-cache-dir fastai==1.0.59 \
95+
&& pip install --no-cache-dir -U \
96+
fastai==1.0.59 \
97+
scipy==1.2.2 \
9798
smdebug==0.5.0.post0 \
9899
sagemaker-experiments==0.1.3 \
99100
/sagemaker_pytorch_container-1.3.1-py2.py3-none-any.whl \

docker/1.3.1/py3/Dockerfile.gpu

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,6 @@ RUN ompi_info --parsable --all | grep mpi_built_with_cuda_support:value \
8383
&& /opt/conda/bin/conda install -y -c anaconda \
8484
python=$PYTHON_VERSION \
8585
numpy==1.16.4 \
86-
scipy==1.3.0 \
8786
ipython==7.10.1 \
8887
mkl==2019.4 \
8988
mkl-include==2019.4 \
@@ -96,11 +95,11 @@ RUN ompi_info --parsable --all | grep mpi_built_with_cuda_support:value \
9695

9796
RUN conda install -c pytorch magma-cuda101==2.5.1 \
9897
&& conda install -c conda-forge \
99-
awscli==1.16.314 \
98+
awscli==1.17.7 \
10099
opencv==4.0.1 \
101100
&& conda install -y scikit-learn==0.21.2 \
102101
pandas==0.25.0 \
103-
pillow==6.2.1 \
102+
Pillow==6.2.0 \
104103
h5py==2.9.0 \
105104
requests==2.22.0 \
106105
&& conda clean -ya
@@ -123,6 +122,7 @@ RUN pip install \
123122
--no-cache-dir smdebug==0.5.0.post0 \
124123
sagemaker-experiments==0.1.3 \
125124
--no-cache-dir fastai==1.0.59 \
125+
scipy==1.2.2 \
126126
&& pip install --no-cache-dir -U https://pytorch-aws.s3.amazonaws.com/pytorch-1.3.1/py3/gpu/torch-1.3.1-cp36-cp36m-manylinux1_x86_64.whl \
127127
&& pip uninstall -y torchvision \
128128
&& pip install --no-cache-dir -U \

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def read(fname):
4646
'Programming Language :: Python :: 2.7',
4747
'Programming Language :: Python :: 3.6',
4848
],
49-
install_requires=['numpy==1.16.4', 'Pillow==6.2.0', 'retrying==1.3.3', 'sagemaker-containers==2.5.11',
49+
install_requires=['numpy==1.16.4', 'Pillow==6.2.0', 'retrying==1.3.3', 'sagemaker-containers>=2.6.2',
5050
'six==1.12.0', 'torch==1.3.1'],
5151
extras_require={
5252
'test': ['boto3==1.9.169', 'coverage==4.5.3', 'docker-compose==1.23.2', 'flake8==3.7.7', 'Flask==1.1.1',

0 commit comments

Comments
 (0)