Skip to content

Commit c525322

Browse files
authored
Merge pull request #7132 from MicroDev1/ci
CI: Schedule PR jobs based on commit specific changes
2 parents 866ff5b + 30f07fb commit c525322

File tree

3 files changed

+267
-14
lines changed

3 files changed

+267
-14
lines changed

.github/workflows/build.yml

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -125,20 +125,29 @@ jobs:
125125
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static-raspbian s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-raspbian-${{ env.CP_VERSION }} --no-progress --region us-east-1
126126
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-amd64-linux-${{ env.CP_VERSION }} --no-progress --region us-east-1
127127
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static.exe s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-x64-windows-${{ env.CP_VERSION }}.exe --no-progress --region us-east-1
128-
- name: "Get changes"
128+
- name: Get last commit with checks
129+
id: get-last-commit-with-checks
129130
if: github.event_name == 'pull_request'
130-
uses: dorny/paths-filter@v2
131-
id: filter
132-
with:
133-
list-files: json
134-
filters: |
135-
changed:
136-
- '**'
137-
- name: "Set matrix"
131+
working-directory: tools
132+
env:
133+
REPO: ${{ github.repository }}
134+
PULL: ${{ github.event.number }}
135+
GITHUB_TOKEN: ${{ github.token }}
136+
EXCLUDE_COMMIT: ${{ github.event.after }}
137+
run: python3 -u ci_changes_per_commit.py
138+
- name: Get changes
139+
id: get-changes
140+
if: github.event_name == 'pull_request'
141+
uses: tj-actions/changed-files@v34
142+
with:
143+
json: "true"
144+
base_sha: ${{ steps.get-last-commit-with-checks.outputs.commit }}
145+
- name: Set matrix
138146
id: set-matrix
139147
working-directory: tools
140148
env:
141-
CHANGED_FILES: ${{ steps.filter.outputs.changed_files }}
149+
CHANGED_FILES: ${{ toJSON(steps.get-changes.outputs.all_changed_and_modified_files) }}
150+
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.checkruns }}
142151
run: python3 -u ci_set_matrix.py
143152

144153

tools/ci_changes_per_commit.py

Lines changed: 227 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,227 @@
1+
#! /usr/bin/env python3
2+
3+
# SPDX-FileCopyrightText: 2021 microDev
4+
#
5+
# SPDX-License-Identifier: MIT
6+
7+
# GraphQL Query
8+
9+
QUERY_COMMITS = """
10+
query ($owner: String!, $name: String!, $pullNumber: Int!, $commitsPerPage: Int!, $beforeCommit: String) {
11+
repository(owner: $owner, name: $name) {
12+
pullRequest(number: $pullNumber) {
13+
commits(last: $commitsPerPage, before: $beforeCommit) {
14+
totalCount
15+
pageInfo {
16+
startCursor
17+
hasPreviousPage
18+
}
19+
nodes {
20+
commit {
21+
checkSuites(first: 3) {
22+
nodes {
23+
conclusion
24+
workflowRun {
25+
workflow {
26+
name
27+
}
28+
}
29+
id
30+
}
31+
totalCount
32+
}
33+
oid
34+
}
35+
}
36+
}
37+
}
38+
}
39+
}
40+
"""
41+
42+
QUERY_CHECKRUNS = """
43+
query ($checkSuiteID: ID!,
44+
$afterFailedRun: String, $afterIncompleteRun: String,
45+
$includeFailedRuns: Boolean!, $includeIncompleteRuns: Boolean!) {
46+
node(id: $checkSuiteID) {
47+
... on CheckSuite {
48+
failedRuns: checkRuns(
49+
first: 100
50+
after: $afterFailedRun
51+
filterBy: {checkType: LATEST, conclusions: [ACTION_REQUIRED, TIMED_OUT, CANCELLED, FAILURE, NEUTRAL, STARTUP_FAILURE]}
52+
) @include(if: $includeFailedRuns) {
53+
nodes {
54+
name
55+
}
56+
pageInfo {
57+
endCursor
58+
hasNextPage
59+
}
60+
}
61+
incompleteRuns: checkRuns(
62+
first: 100
63+
after: $afterIncompleteRun
64+
filterBy: {checkType: LATEST, statuses: [QUEUED, IN_PROGRESS, WAITING, PENDING, REQUESTED]}
65+
) @include(if: $includeIncompleteRuns) {
66+
nodes {
67+
name
68+
}
69+
pageInfo {
70+
endCursor
71+
hasNextPage
72+
}
73+
}
74+
}
75+
}
76+
}
77+
"""
78+
79+
80+
import os
81+
import re
82+
import json
83+
import requests
84+
85+
86+
query_variables_commits = {
87+
"owner": "",
88+
"name": "",
89+
"pullNumber": int(os.environ["PULL"]),
90+
"commitsPerPage": 20,
91+
"beforeCommit": None,
92+
}
93+
94+
95+
query_variables_checkruns = {
96+
"checkSuiteID": "",
97+
"afterFailedRun": None,
98+
"afterIncompleteRun": None,
99+
"includeFailedRuns": True,
100+
"includeIncompleteRuns": True,
101+
}
102+
103+
104+
headers = {"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}"}
105+
106+
107+
class Query:
108+
def __init__(self, query, variables={}, headers={}):
109+
self.query = query
110+
self.variables = variables
111+
self.headers = headers
112+
113+
def paginate(self, page_info, name):
114+
has_page = (
115+
page_info["hasNextPage"] if name.startswith("after") else page_info["hasPreviousPage"]
116+
)
117+
if has_page:
118+
self.variables[name] = (
119+
page_info["endCursor"] if name.startswith("after") else page_info["startCursor"]
120+
)
121+
return has_page
122+
123+
def fetch(self):
124+
request = requests.post(
125+
"https://api.github.com/graphql",
126+
json={"query": self.query, "variables": self.variables},
127+
headers=self.headers,
128+
)
129+
if request.status_code == 200:
130+
return request.json()
131+
else:
132+
raise Exception("Query Failed: {}".format(request.status_code))
133+
134+
135+
def set_output(name, value):
136+
if "GITHUB_OUTPUT" in os.environ:
137+
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
138+
print(f"{name}={value}", file=f)
139+
else:
140+
print(f"Would set GitHub actions output {name} to '{value}'")
141+
142+
143+
def get_commit_and_checksuite(query_commits):
144+
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
145+
146+
if commits["totalCount"] > 0:
147+
for commit in reversed(commits["nodes"]):
148+
commit = commit["commit"]
149+
commit_sha = commit["oid"]
150+
if commit_sha == os.environ["EXCLUDE_COMMIT"]:
151+
continue
152+
checksuites = commit["checkSuites"]
153+
if checksuites["totalCount"] > 0:
154+
for checksuite in checksuites["nodes"]:
155+
if checksuite["workflowRun"]["workflow"]["name"] == "Build CI":
156+
return [
157+
commit_sha,
158+
checksuite["id"] if checksuite["conclusion"] != "SUCCESS" else None,
159+
]
160+
else:
161+
if query_commits.paginate(commits["pageInfo"], "beforeCommit"):
162+
return get_commit_and_checksuite(query_commits)
163+
164+
return [None, None]
165+
166+
167+
def append_runs_to_list(runs, list):
168+
regex_matrix = re.compile("^build-[^ ]+")
169+
regex_board = re.compile("\([^ ]+\)$")
170+
for run in runs["nodes"]:
171+
name = run["name"]
172+
res_matrix = regex_matrix.search(name)
173+
if res_matrix:
174+
matrix = res_matrix.group()
175+
if matrix not in list:
176+
list[matrix] = []
177+
list[matrix].append(regex_board.search(name).group()[1:-1])
178+
179+
180+
def get_bad_checkruns(query_checkruns, list={}):
181+
checkruns = query_checkruns.fetch()["data"]["node"]
182+
run_types = ["failed", "incomplete"]
183+
paginate = False
184+
185+
for run_type in run_types:
186+
run_type_camel = run_type.capitalize() + "Run"
187+
run_type = run_type + "Runs"
188+
189+
append_runs_to_list(checkruns[run_type], list)
190+
191+
if query_checkruns.paginate(checkruns[run_type]["pageInfo"], "after" + run_type_camel):
192+
query_checkruns.variables["include" + run_type_camel] = True
193+
paginate = True
194+
195+
return get_bad_checkruns(query_checkruns, list) if paginate else list
196+
197+
198+
def main():
199+
query_commits = Query(QUERY_COMMITS, query_variables_commits, headers)
200+
query_commits.variables["owner"], query_commits.variables["name"] = os.environ["REPO"].split(
201+
"/"
202+
)
203+
204+
commit, checksuite = get_commit_and_checksuite(query_commits)
205+
206+
if checksuite is None:
207+
if commit is None:
208+
print("No checkSuites found -> Abort")
209+
else:
210+
set_output("commit", commit)
211+
quit()
212+
213+
query_checkruns = Query(QUERY_CHECKRUNS, query_variables_checkruns, headers)
214+
query_checkruns.variables["checkSuiteID"] = checksuite
215+
216+
checkruns = get_bad_checkruns(query_checkruns)
217+
218+
if len(checkruns) == 0:
219+
print("No checkRuns found -> Abort")
220+
quit()
221+
222+
set_output("commit", commit)
223+
set_output("checkruns", json.dumps(checkruns))
224+
225+
226+
if __name__ == "__main__":
227+
main()

tools/ci_set_matrix.py

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
import os
2626
import sys
2727
import json
28-
import yaml
2928
import pathlib
3029
from concurrent.futures import ThreadPoolExecutor
3130

@@ -62,14 +61,22 @@
6261
if len(sys.argv) > 1:
6362
print("Using files list on commandline")
6463
changed_files = sys.argv[1:]
64+
last_failed_jobs = {}
6565
else:
6666
c = os.environ["CHANGED_FILES"]
6767
if c == "":
6868
print("CHANGED_FILES is in environment, but value is empty")
6969
changed_files = []
7070
else:
7171
print("Using files list in CHANGED_FILES")
72-
changed_files = json.loads(os.environ["CHANGED_FILES"])
72+
changed_files = json.loads(c)
73+
74+
j = os.environ["LAST_FAILED_JOBS"]
75+
if j == "":
76+
print("LAST_FAILED_JOBS is in environment, but value is empty")
77+
last_failed_jobs = {}
78+
else:
79+
last_failed_jobs = json.loads(j)
7380

7481

7582
def set_output(name, value):
@@ -196,7 +203,7 @@ def get_settings(board):
196203
# Split boards by architecture.
197204
print("Building boards:")
198205
arch_to_boards = {"aarch": [], "arm": [], "riscv": [], "espressif": []}
199-
for board in sorted(boards_to_build):
206+
for board in boards_to_build:
200207
print(" ", board)
201208
port = board_to_port.get(board)
202209
# A board can appear due to its _deletion_ (rare)
@@ -208,10 +215,20 @@ def get_settings(board):
208215

209216
# Set the step outputs for each architecture
210217
for arch in arch_to_boards:
218+
# Append previous failed jobs
219+
if f"build-{arch}" in last_failed_jobs:
220+
failed_boards = last_failed_jobs[f"build-{arch}"]
221+
for board in failed_boards:
222+
if not board in arch_to_boards[arch]:
223+
arch_to_boards[arch].append(board)
224+
# Set Output
211225
set_output(f"boards-{arch}", json.dumps(sorted(arch_to_boards[arch])))
212226

213227

214228
def set_docs_to_build(build_all):
229+
if "build-doc" in last_failed_jobs:
230+
build_all = True
231+
215232
doc_match = build_all
216233
if not build_all:
217234
doc_pattern = re.compile(
@@ -224,7 +241,7 @@ def set_docs_to_build(build_all):
224241

225242
# Set the step outputs
226243
print("Building docs:", doc_match)
227-
set_output(f"build-doc", doc_match)
244+
set_output("build-doc", doc_match)
228245

229246

230247
def check_changed_files():

0 commit comments

Comments
 (0)