|
10 | 10 | PACKAGE_NAME: dpnp
|
11 | 11 | MODULE_NAME: dpnp
|
12 | 12 | CHANNELS: '-c dppy/label/dev -c intel -c defaults --override-channels'
|
| 13 | + VER_JSON_NAME: 'version.json' |
| 14 | + VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); " |
| 15 | + VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))" |
13 | 16 |
|
14 | 17 | jobs:
|
15 | 18 | build_linux:
|
|
49 | 52 | activate-environment: 'build'
|
50 | 53 | use-only-tar-bz2: true
|
51 | 54 |
|
| 55 | + - name: Install conda-build |
| 56 | + run: conda install conda-build |
| 57 | + |
52 | 58 | - name: Cache conda packages
|
53 | 59 | uses: actions/cache@v3
|
54 | 60 | env:
|
|
61 | 67 | ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
|
62 | 68 | ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-
|
63 | 69 |
|
64 |
| - - name: Install conda-build |
65 |
| - run: conda install conda-build |
66 |
| - |
67 | 70 | - name: Build conda package
|
68 | 71 | run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe
|
69 | 72 | env:
|
@@ -138,9 +141,281 @@ jobs:
|
138 | 141 | name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
|
139 | 142 | path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2
|
140 | 143 |
|
141 |
| - upload_linux: |
| 144 | + test_linux: |
142 | 145 | needs: build_linux
|
143 | 146 |
|
| 147 | + runs-on: ubuntu-latest |
| 148 | + |
| 149 | + defaults: |
| 150 | + run: |
| 151 | + shell: bash -l {0} |
| 152 | + |
| 153 | + strategy: |
| 154 | + matrix: |
| 155 | + python: ['3.8', '3.9'] |
| 156 | + dpctl: ['0.13.0'] |
| 157 | + experimental: [false] |
| 158 | + |
| 159 | + continue-on-error: ${{ matrix.experimental }} |
| 160 | + |
| 161 | + env: |
| 162 | + conda-pkgs: '/home/runner/conda_pkgs_dir/' |
| 163 | + channel-path: '${{ github.workspace }}/channel/' |
| 164 | + pkg-path-in-channel: '${{ github.workspace }}/channel/linux-64/' |
| 165 | + extracted-pkg-path: '${{ github.workspace }}/pkg/' |
| 166 | + tests-path: '${{ github.workspace }}/pkg/info/test/' |
| 167 | + ver-json-path: '${{ github.workspace }}/version.json' |
| 168 | + |
| 169 | + steps: |
| 170 | + - name: Download artifact |
| 171 | + uses: actions/download-artifact@v2 |
| 172 | + with: |
| 173 | + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} |
| 174 | + path: ${{ env.pkg-path-in-channel }} |
| 175 | + |
| 176 | + - name: Extract package archive |
| 177 | + run: | |
| 178 | + mkdir -p ${{ env.extracted-pkg-path }} |
| 179 | + tar -xvf ${{ env.pkg-path-in-channel }}/${{ env.PACKAGE_NAME }}-*.tar.bz2 -C ${{ env.extracted-pkg-path }} |
| 180 | +
|
| 181 | + - name: Setup miniconda |
| 182 | + uses: conda-incubator/setup-miniconda@v2 |
| 183 | + with: |
| 184 | + auto-update-conda: true |
| 185 | + python-version: ${{ matrix.python }} |
| 186 | + miniconda-version: 'latest' |
| 187 | + activate-environment: 'test' |
| 188 | + |
| 189 | + # Needed to be able to run conda index |
| 190 | + - name: Install conda-build |
| 191 | + run: conda install conda-build |
| 192 | + |
| 193 | + - name: Create conda channel |
| 194 | + run: conda index ${{ env.channel-path }} |
| 195 | + |
| 196 | + - name: Test conda channel |
| 197 | + run: | |
| 198 | + conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }} |
| 199 | + cat ${{ env.ver-json-path }} |
| 200 | +
|
| 201 | + - name: Collect dependencies |
| 202 | + run: | |
| 203 | + export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}") |
| 204 | + echo PACKAGE_VERSION=${PACKAGE_VERSION} |
| 205 | +
|
| 206 | + conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile |
| 207 | + cat lockfile |
| 208 | + env: |
| 209 | + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' |
| 210 | + |
| 211 | + - name: Cache conda packages |
| 212 | + uses: actions/cache@v3 |
| 213 | + env: |
| 214 | + CACHE_NUMBER: 1 # Increase to reset cache |
| 215 | + with: |
| 216 | + path: ${{ env.conda-pkgs }} |
| 217 | + key: |
| 218 | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} |
| 219 | + restore-keys: | |
| 220 | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- |
| 221 | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- |
| 222 | +
|
| 223 | + - name: Install dpnp |
| 224 | + run: | |
| 225 | + export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}") |
| 226 | + echo PACKAGE_VERSION=${PACKAGE_VERSION} |
| 227 | +
|
| 228 | + conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} dpctl=${{ matrix.dpctl }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} |
| 229 | + env: |
| 230 | + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' |
| 231 | + |
| 232 | + - name: List installed packages |
| 233 | + run: conda list |
| 234 | + |
| 235 | + - name: Smoke test |
| 236 | + run: python -c "import dpnp, dpctl; dpctl.lsplatform()" |
| 237 | + |
| 238 | + # TODO: run the whole scope once the issues on CPU are resolved |
| 239 | + - name: Run tests |
| 240 | + run: python -m pytest -q -ra --disable-warnings -vv tests/test_arraycreation.py tests/test_dparray.py tests/test_mathematical.py |
| 241 | + env: |
| 242 | + SYCL_ENABLE_HOST_DEVICE: '1' |
| 243 | + working-directory: ${{ env.tests-path }} |
| 244 | + |
| 245 | + test_windows: |
| 246 | + needs: build_windows |
| 247 | + |
| 248 | + runs-on: windows-latest |
| 249 | + |
| 250 | + defaults: |
| 251 | + run: |
| 252 | + shell: cmd /C CALL {0} |
| 253 | + |
| 254 | + strategy: |
| 255 | + matrix: |
| 256 | + python: ['3.8', '3.9'] |
| 257 | + dpctl: ['0.13.0'] |
| 258 | + experimental: [false] |
| 259 | + |
| 260 | + continue-on-error: ${{ matrix.experimental }} |
| 261 | + |
| 262 | + env: |
| 263 | + conda-pkgs: 'C:\Users\runneradmin\conda_pkgs_dir\' |
| 264 | + channel-path: '${{ github.workspace }}\channel\' |
| 265 | + pkg-path-in-channel: '${{ github.workspace }}\channel\win-64\' |
| 266 | + extracted-pkg-path: '${{ github.workspace }}\pkg' |
| 267 | + tests-path: '${{ github.workspace }}\pkg\info\test\' |
| 268 | + ver-json-path: '${{ github.workspace }}\version.json' |
| 269 | + active-env-name: 'test' |
| 270 | + miniconda-lib-path: 'C:\Miniconda3\envs\test\Library\lib\' |
| 271 | + miniconda-bin-path: 'C:\Miniconda3\envs\test\Library\bin\' |
| 272 | + |
| 273 | + steps: |
| 274 | + - name: Download artifact |
| 275 | + uses: actions/download-artifact@v2 |
| 276 | + with: |
| 277 | + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} |
| 278 | + path: ${{ env.pkg-path-in-channel }} |
| 279 | + |
| 280 | + - name: Extract package archive |
| 281 | + run: | |
| 282 | + @echo on |
| 283 | + mkdir -p ${{ env.extracted-pkg-path }} |
| 284 | +
|
| 285 | + set SEARCH_SCRIPT="DIR ${{ env.pkg-path-in-channel }} /s/b | FINDSTR /r "dpnp-.*\.tar\.bz2"" |
| 286 | + FOR /F "tokens=* USEBACKQ" %%F IN (`%SEARCH_SCRIPT%`) DO ( |
| 287 | + SET FULL_PACKAGE_PATH=%%F |
| 288 | + ) |
| 289 | + echo FULL_PACKAGE_PATH: %FULL_PACKAGE_PATH% |
| 290 | +
|
| 291 | + python -c "import shutil; shutil.unpack_archive(r\"%FULL_PACKAGE_PATH%\", extract_dir=r\"${{ env.extracted-pkg-path }}\")" |
| 292 | + dir ${{ env.extracted-pkg-path }} |
| 293 | +
|
| 294 | + - name: Setup miniconda |
| 295 | + uses: conda-incubator/setup-miniconda@v2 |
| 296 | + with: |
| 297 | + auto-update-conda: true |
| 298 | + python-version: ${{ matrix.python }} |
| 299 | + miniconda-version: 'latest' |
| 300 | + activate-environment: ${{ env.active-env-name }} |
| 301 | + |
| 302 | + # Needed to be able to run conda index |
| 303 | + - name: Install conda-build |
| 304 | + run: conda install conda-build |
| 305 | + |
| 306 | + - name: Create conda channel |
| 307 | + run: conda index ${{ env.channel-path }} |
| 308 | + |
| 309 | + - name: Test conda channel |
| 310 | + run: | |
| 311 | + @echo on |
| 312 | + conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }} |
| 313 | +
|
| 314 | + - name: Dump version.json |
| 315 | + run: more ${{ env.ver-json-path }} |
| 316 | + |
| 317 | + - name: Collect dependencies |
| 318 | + run: | |
| 319 | + @echo on |
| 320 | + set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}" |
| 321 | + FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( |
| 322 | + SET PACKAGE_VERSION=%%F |
| 323 | + ) |
| 324 | + echo PACKAGE_VERSION: %PACKAGE_VERSION% |
| 325 | +
|
| 326 | + conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% dpctl=${{ matrix.dpctl }} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile |
| 327 | + env: |
| 328 | + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' |
| 329 | + |
| 330 | + - name: Dump lockfile |
| 331 | + run: more lockfile |
| 332 | + |
| 333 | + - name: Cache conda packages |
| 334 | + uses: actions/cache@v3 |
| 335 | + env: |
| 336 | + CACHE_NUMBER: 1 # Increase to reset cache |
| 337 | + with: |
| 338 | + path: ${{ env.conda-pkgs }} |
| 339 | + key: |
| 340 | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} |
| 341 | + restore-keys: | |
| 342 | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- |
| 343 | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- |
| 344 | +
|
| 345 | + - name: Install opencl_rt |
| 346 | + run: conda install opencl_rt -c intel --override-channels |
| 347 | + |
| 348 | + - name: Install dpnp |
| 349 | + run: | |
| 350 | + @echo on |
| 351 | + set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}" |
| 352 | + FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( |
| 353 | + SET PACKAGE_VERSION=%%F |
| 354 | + ) |
| 355 | + echo PACKAGE_VERSION: %PACKAGE_VERSION% |
| 356 | +
|
| 357 | + conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% dpctl=${{ matrix.dpctl }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} |
| 358 | + env: |
| 359 | + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' |
| 360 | + |
| 361 | + - name: List installed packages |
| 362 | + run: conda list |
| 363 | + |
| 364 | + - name: Add library |
| 365 | + shell: pwsh |
| 366 | + run: | |
| 367 | + # Make sure the below libraries exist |
| 368 | + Get-Item -Path ${{ env.miniconda-bin-path }}\OpenCL.dll |
| 369 | + Get-Item -Path ${{ env.miniconda-lib-path }}\intelocl64.dll |
| 370 | +
|
| 371 | + echo "OCL_ICD_FILENAMES=${{ env.miniconda-lib-path }}\intelocl64.dll" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append |
| 372 | + try {$list = Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors | Select-Object -ExpandProperty Property } catch {$list=@()} |
| 373 | +
|
| 374 | + if ($list.count -eq 0) { |
| 375 | + if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos)) { |
| 376 | + New-Item -Path HKLM:\SOFTWARE\Khronos |
| 377 | + } |
| 378 | +
|
| 379 | + if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos\OpenCL)) { |
| 380 | + New-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL |
| 381 | + } |
| 382 | +
|
| 383 | + if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors)) { |
| 384 | + New-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors |
| 385 | + } |
| 386 | +
|
| 387 | + New-ItemProperty -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors -Name ${{ env.miniconda-lib-path }}\intelocl64.dll -Value 0 |
| 388 | + try {$list = Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors | Select-Object -ExpandProperty Property } catch {$list=@()} |
| 389 | + Write-Output $(Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors) |
| 390 | +
|
| 391 | + # Now copy OpenCL.dll into system folder |
| 392 | + $system_ocl_icd_loader="C:\Windows\System32\OpenCL.dll" |
| 393 | + $python_ocl_icd_loader="${{ env.miniconda-bin-path }}\OpenCL.dll" |
| 394 | + Copy-Item -Path $python_ocl_icd_loader -Destination $system_ocl_icd_loader |
| 395 | +
|
| 396 | + if (Test-Path -Path $system_ocl_icd_loader) { |
| 397 | + Write-Output "$system_ocl_icd_loader has been copied" |
| 398 | + $acl = Get-Acl $system_ocl_icd_loader |
| 399 | + Write-Output $acl |
| 400 | + } else { |
| 401 | + Write-Output "OCL-ICD-Loader was not copied" |
| 402 | + } |
| 403 | +
|
| 404 | + # Variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default |
| 405 | + echo "TBB_DLL_PATH=${{ env.miniconda-bin-path }}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append |
| 406 | + } |
| 407 | +
|
| 408 | + - name: Smoke test |
| 409 | + run: python -c "import dpnp, dpctl; dpctl.lsplatform()" |
| 410 | + |
| 411 | + # TODO: run the whole scope once the issues on CPU are resolved |
| 412 | + - name: Run tests |
| 413 | + run: python -m pytest -q -ra --disable-warnings -vv tests\test_arraycreation.py tests\test_dparray.py tests\test_mathematical.py |
| 414 | + working-directory: ${{ env.tests-path }} |
| 415 | + |
| 416 | + upload_linux: |
| 417 | + needs: test_linux |
| 418 | + |
144 | 419 | if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}}
|
145 | 420 |
|
146 | 421 | runs-on: ubuntu-latest
|
@@ -176,7 +451,7 @@ jobs:
|
176 | 451 | run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2
|
177 | 452 |
|
178 | 453 | upload_windows:
|
179 |
| - needs: build_windows |
| 454 | + needs: test_windows |
180 | 455 |
|
181 | 456 | if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}}
|
182 | 457 |
|
|
0 commit comments