Skip to content

Commit 2cd5522

Browse files
mikekgfbmalfet
authored andcommitted
fix typo (#752)
* fix typo * fix * update * fix * fix * build runner * tab/spc
1 parent e660ab1 commit 2cd5522

File tree

1 file changed

+13
-29
lines changed

1 file changed

+13
-29
lines changed

.github/workflows/runner-cuda-dtype.yml

Lines changed: 13 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Run runner-aoti CUDA tests
1+
name: Run the aoti runner with CUDA using stories
22

33
on:
44
pull_request:
@@ -8,12 +8,14 @@ on:
88
workflow_dispatch:
99

1010
jobs:
11-
test-cuda:
11+
test-runner-aot-cuda:
1212
uses: pytorch/test-infra/.github/workflows/linux_job.yml@main
1313
with:
1414
runner: linux.g5.4xlarge.nvidia.gpu
15+
secrets-env: "HF_TOKEN_PERIODIC"
1516
gpu-arch-type: cuda
1617
gpu-arch-version: "12.1"
18+
timeout: 60
1719
script: |
1820
echo "::group::Print machine info"
1921
uname -a
@@ -29,6 +31,7 @@ jobs:
2931
# Install requirements
3032
3133
./install_requirements.sh cuda
34+
bash scripts/build_native.sh aoti
3235
pip3 list
3336
python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
3437
echo "::endgroup::"
@@ -46,30 +49,11 @@ jobs:
4649
export MODEL_NAME=stories15M
4750
export MODEL_DIR=/tmp
4851
49-
- name: Install dependencies
50-
run: |
51-
./install_requirements.sh
52-
pip3 list
53-
python3 -c 'import torch;print(f"torch: {torch.__version__, torch.version.git_version}")'
54-
bash scripts/build_native.sh aoti
52+
set -eou pipefail
53+
export MODEL_DIR=${PWD}/checkpoints/stories15M
54+
export PROMPT="Once upon a time in a land far away"
5555
56-
- name: Download checkpoint
57-
run: |
58-
mkdir -p checkpoints/stories15M
59-
pushd checkpoints/stories15M
60-
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt
61-
wget https://github.com/karpathy/llama2.c/raw/master/tokenizer.model
62-
wget https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin
63-
popd
64-
- name: Run inference
65-
run: |
66-
set -eou pipefail
67-
68-
69-
export MODEL_DIR=${PWD}/checkpoints/stories15M
70-
export PROMPT="Once upon a time in a land far away"
71-
72-
for DTYPE in bfloat16; do
56+
for DTYPE in bfloat16; do
7357
python torchchat.py generate --dtype ${DTYPE} --checkpoint-path ${MODEL_DIR}/stories15M.pt --temperature 0 --prompt "${PROMPT}" --device cuda
7458
7559
python torchchat.py export --checkpoint-path ${MODEL_DIR}/stories15M.pt --output-dso-path /tmp/model.so
@@ -87,8 +71,8 @@ jobs:
8771
python generate.py --dtype ${DTYPE} --device cuda --checkpoint-path ${MODEL_PATH} --temperature 0 --dso-path ${MODEL_DIR}/${MODEL_NAME}.so > ./output_aoti
8872
cat ./output_aoti
8973
90-
done
74+
done
9175
92-
echo "tests complete"
93-
echo "******************************************"
94-
echo "::endgroup::"
76+
echo "tests complete"
77+
echo "******************************************"
78+
echo "::endgroup::"

0 commit comments

Comments
 (0)