Skip to content

Commit d72f5f7

Browse files
authored
ci : add AMD runners and workflows (ggml-org#16249)
* ci : add AMD runners and workflows * ci : move AMD jobs to separate workflow * cont : fix paths
1 parent b77e6c1 commit d72f5f7

File tree

3 files changed

+53
-28
lines changed

3 files changed

+53
-28
lines changed

.github/workflows/build-amd.yml

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
name: CI (AMD)
2+
3+
on:
4+
workflow_dispatch: # allows manual triggering
5+
push:
6+
branches:
7+
- master
8+
paths: [
9+
'.github/workflows/build-amd.yml',
10+
'**/CMakeLists.txt',
11+
'**/.cmake',
12+
'**/*.h',
13+
'**/*.hpp',
14+
'**/*.c',
15+
'**/*.cpp',
16+
'**/*.cu',
17+
'**/*.cuh',
18+
'**/*.comp'
19+
]
20+
21+
concurrency:
22+
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
23+
cancel-in-progress: true
24+
25+
jobs:
26+
ggml-ci-x64-amd-vulkan:
27+
runs-on: [self-hosted, Linux, X64, AMD]
28+
29+
steps:
30+
- name: Clone
31+
id: checkout
32+
uses: actions/checkout@v4
33+
34+
- name: Test
35+
id: ggml-ci
36+
run: |
37+
vulkaninfo --summary
38+
GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
39+
40+
ggml-ci-x64-amd-rocm:
41+
runs-on: [self-hosted, Linux, X64, AMD]
42+
43+
steps:
44+
- name: Clone
45+
id: checkout
46+
uses: actions/checkout@v4
47+
48+
- name: Test
49+
id: ggml-ci
50+
run: |
51+
amd-smi static
52+
GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp

.github/workflows/build.yml

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1461,34 +1461,6 @@ jobs:
14611461
run: |
14621462
bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
14631463
1464-
# ggml-ci-x64-amd-vulkan:
1465-
# runs-on: [self-hosted, Linux, X64, AMD]
1466-
#
1467-
# steps:
1468-
# - name: Clone
1469-
# id: checkout
1470-
# uses: actions/checkout@v4
1471-
#
1472-
# - name: Test
1473-
# id: ggml-ci
1474-
# run: |
1475-
# vulkaninfo --summary
1476-
# GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1477-
#
1478-
# ggml-ci-x64-amd-rocm:
1479-
# runs-on: [self-hosted, Linux, X64, AMD]
1480-
#
1481-
# steps:
1482-
# - name: Clone
1483-
# id: checkout
1484-
# uses: actions/checkout@v4
1485-
#
1486-
# - name: Test
1487-
# id: ggml-ci
1488-
# run: |
1489-
# amd-smi static
1490-
# GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp
1491-
14921464
ggml-ci-mac-metal:
14931465
runs-on: [self-hosted, macOS, ARM64]
14941466

ci/run.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ if [ ! -z ${GG_BUILD_NO_SVE} ]; then
114114
# arm 9 and newer enables sve by default, adjust these flags depending on the cpu used
115115
CMAKE_EXTRA="${CMAKE_EXTRA} -DGGML_NATIVE=OFF -DGGML_CPU_ARM_ARCH=armv8.5-a+fp16+i8mm"
116116
fi
117+
117118
## helpers
118119

119120
# download a file if it does not exist or if it is outdated

0 commit comments

Comments
 (0)