Skip to content

Commit fa9e257

Browse files
committed
Merge remote-tracking branch 'upstream/main' into libcxx/ranges/join_with
2 parents de46d63 + 79a72c4 commit fa9e257

File tree

13,754 files changed

+1434900
-555743
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

13,754 files changed

+1434900
-555743
lines changed

.ci/compute_projects.py

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,13 @@
5757
".ci": {"llvm", "clang", "lld", "lldb"},
5858
}
5959

60+
# This mapping describes runtimes that should be enabled for a specific project,
61+
# but not necessarily run for testing. The only case of this currently is lldb
62+
# which needs some runtimes enabled for tests.
63+
DEPENDENT_RUNTIMES_TO_BUILD = {"lldb": {"libcxx", "libcxxabi", "libunwind"}}
64+
65+
# This mapping describes runtimes that should be tested when the key project is
66+
# touched.
6067
DEPENDENT_RUNTIMES_TO_TEST = {"clang": {"libcxx", "libcxxabi", "libunwind"}}
6168

6269
EXCLUDE_LINUX = {
@@ -180,16 +187,20 @@ def _compute_project_check_targets(projects_to_test: Set[str]) -> Set[str]:
180187
def _compute_runtimes_to_test(projects_to_test: Set[str]) -> Set[str]:
181188
runtimes_to_test = set()
182189
for project_to_test in projects_to_test:
183-
if project_to_test not in DEPENDENT_RUNTIMES_TO_TEST:
184-
continue
185-
runtimes_to_test.update(DEPENDENT_RUNTIMES_TO_TEST[project_to_test])
190+
if project_to_test in DEPENDENT_RUNTIMES_TO_TEST:
191+
runtimes_to_test.update(DEPENDENT_RUNTIMES_TO_TEST[project_to_test])
192+
if project_to_test in DEPENDENT_RUNTIMES_TO_BUILD:
193+
runtimes_to_test.update(DEPENDENT_RUNTIMES_TO_BUILD[project_to_test])
186194
return runtimes_to_test
187195

188196

189-
def _compute_runtime_check_targets(runtimes_to_test: Set[str]) -> Set[str]:
197+
def _compute_runtime_check_targets(projects_to_test: Set[str]) -> Set[str]:
190198
check_targets = set()
191-
for runtime_to_test in runtimes_to_test:
192-
check_targets.add(PROJECT_CHECK_TARGETS[runtime_to_test])
199+
for project_to_test in projects_to_test:
200+
if project_to_test not in DEPENDENT_RUNTIMES_TO_TEST:
201+
continue
202+
for runtime_to_test in DEPENDENT_RUNTIMES_TO_TEST[project_to_test]:
203+
check_targets.add(PROJECT_CHECK_TARGETS[runtime_to_test])
193204
return check_targets
194205

195206

@@ -216,16 +227,16 @@ def get_env_variables(modified_files: list[str], platform: str) -> Set[str]:
216227
projects_to_test = _compute_projects_to_test(modified_projects, platform)
217228
projects_to_build = _compute_projects_to_build(projects_to_test)
218229
projects_check_targets = _compute_project_check_targets(projects_to_test)
219-
runtimes_to_test = _compute_runtimes_to_test(projects_to_test)
220-
runtimes_check_targets = _compute_runtime_check_targets(runtimes_to_test)
230+
runtimes_to_build = _compute_runtimes_to_test(projects_to_test)
231+
runtimes_check_targets = _compute_runtime_check_targets(projects_to_test)
221232
# We use a semicolon to separate the projects/runtimes as they get passed
222233
# to the CMake invocation and thus we need to use the CMake list separator
223234
# (;). We use spaces to separate the check targets as they end up getting
224235
# passed to ninja.
225236
return {
226237
"projects_to_build": ";".join(sorted(projects_to_build)),
227238
"project_check_targets": " ".join(sorted(projects_check_targets)),
228-
"runtimes_to_build": ";".join(sorted(runtimes_to_test)),
239+
"runtimes_to_build": ";".join(sorted(runtimes_to_build)),
229240
"runtimes_check_targets": " ".join(sorted(runtimes_check_targets)),
230241
}
231242

.ci/compute_projects_test.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def test_flang(self):
147147

148148
def test_invalid_subproject(self):
149149
env_variables = compute_projects.get_env_variables(
150-
[".ci/compute_projects.py"], "Linux"
150+
["third-party/benchmark/CMakeLists.txt"], "Linux"
151151
)
152152
self.assertEqual(env_variables["projects_to_build"], "")
153153
self.assertEqual(env_variables["project_check_targets"], "")
@@ -163,7 +163,7 @@ def test_top_level_file(self):
163163

164164
def test_exclude_runtiems_in_projects(self):
165165
env_variables = compute_projects.get_env_variables(
166-
[".ci/compute_projects.py", "libcxx/CMakeLists.txt"], "Linux"
166+
["libcxx/CMakeLists.txt"], "Linux"
167167
)
168168
self.assertEqual(env_variables["projects_to_build"], "")
169169
self.assertEqual(env_variables["project_check_targets"], "")
@@ -192,10 +192,10 @@ def test_ci(self):
192192
env_variables = compute_projects.get_env_variables(
193193
[".ci/compute_projects.py"], "Linux"
194194
)
195-
self.assertEqual(env_variables["projects_to_build"], "clang;lld;llvm;lldb")
195+
self.assertEqual(env_variables["projects_to_build"], "clang;lld;lldb;llvm")
196196
self.assertEqual(
197197
env_variables["project_check_targets"],
198-
"check-clang check-lld check-llvm check-lldb",
198+
"check-clang check-lld check-lldb check-llvm",
199199
)
200200
self.assertEqual(
201201
env_variables["runtimes_to_build"], "libcxx;libcxxabi;libunwind"
@@ -205,6 +205,17 @@ def test_ci(self):
205205
"check-cxx check-cxxabi check-unwind",
206206
)
207207

208+
def test_lldb(self):
209+
env_variables = compute_projects.get_env_variables(
210+
["lldb/CMakeLists.txt"], "Linux"
211+
)
212+
self.assertEqual(env_variables["projects_to_build"], "clang;lldb;llvm")
213+
self.assertEqual(env_variables["project_check_targets"], "check-lldb")
214+
self.assertEqual(
215+
env_variables["runtimes_to_build"], "libcxx;libcxxabi;libunwind"
216+
)
217+
self.assertEqual(env_variables["runtimes_check_targets"], "")
218+
208219

209220
if __name__ == "__main__":
210221
unittest.main()

.ci/generate_test_report_lib.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,9 @@ def plural(num_tests):
9292
]
9393
)
9494
elif failures:
95-
report.extend(["", "## Failed Tests", "(click on a test name to see its output)"])
95+
report.extend(
96+
["", "## Failed Tests", "(click on a test name to see its output)"]
97+
)
9698

9799
for testsuite_name, failures in failures.items():
98100
report.extend(["", f"### {testsuite_name}"])
@@ -120,6 +122,17 @@ def plural(num_tests):
120122
]
121123
)
122124

125+
if failures or return_code != 0:
126+
report.extend(
127+
[
128+
"",
129+
"If these failures are unrelated to your changes (for example "
130+
"tests are broken or flaky at HEAD), please open an issue at "
131+
"https://github.com/llvm/llvm-project/issues and add the "
132+
"`infrastructure` label.",
133+
]
134+
)
135+
123136
report = "\n".join(report)
124137
if len(report.encode("utf-8")) > size_limit:
125138
return generate_report(

.ci/generate_test_report_lib_test.py

Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,9 @@ def test_no_failures_build_failed(self):
109109
110110
All tests passed but another part of the build **failed**.
111111
112-
[Download](https://buildkite.com/organizations/organization_slug/pipelines/pipeline_slug/builds/build_number/jobs/job_id/download.txt) the build's log file to see the details."""
112+
[Download](https://buildkite.com/organizations/organization_slug/pipelines/pipeline_slug/builds/build_number/jobs/job_id/download.txt) the build's log file to see the details.
113+
114+
If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label."""
113115
),
114116
"error",
115117
),
@@ -169,7 +171,9 @@ def test_report_single_file_single_testsuite(self):
169171
```
170172
Other output goes here
171173
```
172-
</details>"""
174+
</details>
175+
176+
If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label."""
173177
),
174178
"error",
175179
),
@@ -203,7 +207,9 @@ def test_report_single_file_single_testsuite(self):
203207
```
204208
DEF/test_2 output goes here
205209
```
206-
</details>"""
210+
</details>
211+
212+
If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label."""
207213
),
208214
"error",
209215
)
@@ -311,7 +317,9 @@ def test_report_dont_list_failures(self):
311317
312318
* 1 test failed
313319
314-
Failed tests and their output was too large to report. Download the build's log file to see the details."""
320+
Failed tests and their output was too large to report. Download the build's log file to see the details.
321+
322+
If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label."""
315323
),
316324
"error",
317325
),
@@ -352,13 +360,16 @@ def test_report_dont_list_failures_link_to_log(self):
352360
353361
* 1 test failed
354362
355-
Failed tests and their output was too large to report. [Download](https://buildkite.com/organizations/organization_slug/pipelines/pipeline_slug/builds/build_number/jobs/job_id/download.txt) the build's log file to see the details."""
363+
Failed tests and their output was too large to report. [Download](https://buildkite.com/organizations/organization_slug/pipelines/pipeline_slug/builds/build_number/jobs/job_id/download.txt) the build's log file to see the details.
364+
365+
If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label."""
356366
),
357367
"error",
358368
),
359369
)
360370

361371
def test_report_size_limit(self):
372+
test_output = "f" * 1000
362373
self.assertEqual(
363374
generate_test_report_lib.generate_report(
364375
"Foo",
@@ -371,14 +382,16 @@ def test_report_size_limit(self):
371382
<testsuites time="0.02">
372383
<testsuite name="Bar" tests="1" failures="1" skipped="0" time="0.02">
373384
<testcase classname="Bar/test_1" name="test_1" time="0.02">
374-
<failure><![CDATA[Some long output goes here...]]></failure>
385+
<failure><![CDATA[{output}]]></failure>
375386
</testcase>
376387
</testsuite>
377-
</testsuites>"""
388+
</testsuites>""".format(
389+
output=test_output
390+
)
378391
)
379392
)
380393
],
381-
size_limit=128,
394+
size_limit=512,
382395
),
383396
(
384397
dedent(
@@ -387,7 +400,9 @@ def test_report_size_limit(self):
387400
388401
* 1 test failed
389402
390-
Failed tests and their output was too large to report. Download the build's log file to see the details."""
403+
Failed tests and their output was too large to report. Download the build's log file to see the details.
404+
405+
If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label."""
391406
),
392407
"error",
393408
),

0 commit comments

Comments
 (0)