Skip to content

Commit 23a718f

Browse files
committed
Update 'build.sh'
1 parent c1e1963 commit 23a718f

File tree

1 file changed

+9
-9
lines changed

1 file changed

+9
-9
lines changed

build.sh

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
#!/bin/bash
22

3-
git lfs install
3+
(git lfs env) || git lfs install
44
git submodule update --init --recursive
55

66
# Default values will be used if not set
7-
BASE_IMAGE=${BASE_IMAGE:-nvcr.io/nvidia/tritonserver:24.11-py3-min}
8-
PYTORCH_IMAGE=${PYTORCH_IMAGE:-nvcr.io/nvidia/pytorch:24.11-py3}
9-
TRT_VERSION=${TRT_VERSION:-10.7.0.23}
10-
TRT_URL_x86=${TRT_URL_x86:-https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.7.0/tars/TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-12.6.tar.gz}
11-
TRT_URL_ARM=${TRT_URL_ARM:-https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.7.0/tars/TensorRT-${TRT_VERSION}.ubuntu-24.04.aarch64-gnu.cuda-12.6.tar.gz}
7+
BASE_IMAGE=${BASE_IMAGE:-nvcr.io/nvidia/tritonserver:25.03-py3-min}
8+
PYTORCH_IMAGE=${PYTORCH_IMAGE:-nvcr.io/nvidia/pytorch:25.03-py3}
9+
TRT_VERSION=${TRT_VERSION:-10.9.0.34}
10+
TRT_URL_x86=${TRT_URL_x86:-https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.9.0/tars/TensorRT-${TRT_VERSION}.Linux.x86_64-gnu.cuda-12.8.tar.gz}
11+
TRT_URL_ARM=${TRT_URL_ARM:-https://developer.nvidia.com/downloads/compute/machine-learning/tensorrt/10.9.0/tars/TensorRT-${TRT_VERSION}.ubuntu-24.04.aarch64-gnu.cuda-12.8.tar.gz}
1212

1313
# Build the TRT-LLM base image that has TRT-LLM installed and will be used as
1414
# the base image for building Triton server and TRT-LLM backend.
@@ -24,7 +24,7 @@ docker build -t trtllm_base \
2424
cd ../
2525
# Need to use the aligned version of the Triton server repository.
2626
# Refer to the support matrix for the aligned version: https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html
27-
TRITON_SERVER_REPO_TAG=${TRITON_SERVER_REPO_TAG:-r24.11}
27+
TRITON_SERVER_REPO_TAG=${TRITON_SERVER_REPO_TAG:-r25.05}
2828
git clone -b ${TRITON_SERVER_REPO_TAG} https://github.com/triton-inference-server/server.git
2929
cd server
3030

@@ -33,8 +33,8 @@ cd server
3333
# the tags of the TensorRT-LLM backend and Python backend repositories that will
3434
# be used to build the container.
3535
TRTLLM_BASE_IMAGE=${TRTLLM_BASE_IMAGE:-trtllm_base}
36-
TENSORRTLLM_BACKEND_REPO_TAG=${TENSORRTLLM_BACKEND_REPO_TAG:-v0.15.0}
37-
PYTHON_BACKEND_REPO_TAG=${PYTHON_BACKEND_REPO_TAG:-r24.11}
36+
TENSORRTLLM_BACKEND_REPO_TAG=${TENSORRTLLM_BACKEND_REPO_TAG:-v0.19.0}
37+
PYTHON_BACKEND_REPO_TAG=${PYTHON_BACKEND_REPO_TAG:-r25.05}
3838

3939
TRITON_GITHUB_ORGANIZATION=${TRITON_GITHUB_ORGANIZATION:-}
4040
if [ "$TRITON_GITHUB_ORGANIZATION" != "" ]

0 commit comments

Comments
 (0)