Skip to content

Commit b3bcc70

Browse files
committed
Merge remote-tracking branch 'origin/main' into perry/quad-non-tf-mode
2 parents 4815fdc + fb974e8 commit b3bcc70

File tree

2,817 files changed

+146895
-65249
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,817 files changed

+146895
-65249
lines changed

.ci/metrics/metrics.py

Lines changed: 95 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"https://influx-prod-13-prod-us-east-0.grafana.net/api/v1/push/influx/write"
1313
)
1414
GITHUB_PROJECT = "llvm/llvm-project"
15-
WORKFLOWS_TO_TRACK = ["Check code formatting", "LLVM Premerge Checks"]
15+
WORKFLOWS_TO_TRACK = ["LLVM Premerge Checks"]
1616
SCRAPE_INTERVAL_SECONDS = 5 * 60
1717

1818

@@ -26,7 +26,67 @@ class JobMetrics:
2626
workflow_id: int
2727

2828

29-
def get_metrics(github_repo: github.Repository, workflows_to_track: dict[str, int]):
29+
@dataclass
30+
class GaugeMetric:
31+
name: str
32+
value: int
33+
time_ns: int
34+
35+
36+
def get_sampled_workflow_metrics(github_repo: github.Repository):
37+
"""Gets global statistics about the Github workflow queue
38+
39+
Args:
40+
github_repo: A github repo object to use to query the relevant information.
41+
42+
Returns:
43+
Returns a list of GaugeMetric objects, containing the relevant metrics about
44+
the workflow
45+
"""
46+
47+
# Other states are available (pending, waiting, etc), but the meaning
48+
# is not documented (See #70540).
49+
# "queued" seems to be the info we want.
50+
queued_workflow_count = len(
51+
[
52+
x
53+
for x in github_repo.get_workflow_runs(status="queued")
54+
if x.name in WORKFLOWS_TO_TRACK
55+
]
56+
)
57+
running_workflow_count = len(
58+
[
59+
x
60+
for x in github_repo.get_workflow_runs(status="in_progress")
61+
if x.name in WORKFLOWS_TO_TRACK
62+
]
63+
)
64+
65+
workflow_metrics = []
66+
workflow_metrics.append(
67+
GaugeMetric(
68+
"workflow_queue_size",
69+
queued_workflow_count,
70+
time.time_ns(),
71+
)
72+
)
73+
workflow_metrics.append(
74+
GaugeMetric(
75+
"running_workflow_count",
76+
running_workflow_count,
77+
time.time_ns(),
78+
)
79+
)
80+
# Always send a hearbeat metric so we can monitor is this container is still able to log to Grafana.
81+
workflow_metrics.append(
82+
GaugeMetric("metrics_container_heartbeat", 1, time.time_ns())
83+
)
84+
return workflow_metrics
85+
86+
87+
def get_per_workflow_metrics(
88+
github_repo: github.Repository, workflows_to_track: dict[str, int]
89+
):
3090
"""Gets the metrics for specified Github workflows.
3191
3292
This function takes in a list of workflows to track, and optionally the
@@ -43,14 +103,14 @@ def get_metrics(github_repo: github.Repository, workflows_to_track: dict[str, in
43103
Returns a list of JobMetrics objects, containing the relevant metrics about
44104
the workflow.
45105
"""
46-
workflow_runs = iter(github_repo.get_workflow_runs())
47-
48106
workflow_metrics = []
49107

50108
workflows_to_include = set(workflows_to_track.keys())
51109

52-
while len(workflows_to_include) > 0:
53-
workflow_run = next(workflow_runs)
110+
for workflow_run in iter(github_repo.get_workflow_runs()):
111+
if len(workflows_to_include) == 0:
112+
break
113+
54114
if workflow_run.status != "completed":
55115
continue
56116

@@ -139,12 +199,27 @@ def upload_metrics(workflow_metrics, metrics_userid, api_key):
139199
metrics_userid: The userid to use for the upload.
140200
api_key: The API key to use for the upload.
141201
"""
202+
203+
if len(workflow_metrics) == 0:
204+
print("No metrics found to upload.", file=sys.stderr)
205+
return
206+
142207
metrics_batch = []
143208
for workflow_metric in workflow_metrics:
144-
workflow_formatted_name = workflow_metric.job_name.lower().replace(" ", "_")
145-
metrics_batch.append(
146-
f"{workflow_formatted_name} queue_time={workflow_metric.queue_time},run_time={workflow_metric.run_time},status={workflow_metric.status} {workflow_metric.created_at_ns}"
147-
)
209+
if isinstance(workflow_metric, GaugeMetric):
210+
name = workflow_metric.name.lower().replace(" ", "_")
211+
metrics_batch.append(
212+
f"{name} value={workflow_metric.value} {workflow_metric.time_ns}"
213+
)
214+
elif isinstance(workflow_metric, JobMetrics):
215+
name = workflow_metric.job_name.lower().replace(" ", "_")
216+
metrics_batch.append(
217+
f"{name} queue_time={workflow_metric.queue_time},run_time={workflow_metric.run_time},status={workflow_metric.status} {workflow_metric.created_at_ns}"
218+
)
219+
else:
220+
raise ValueError(
221+
f"Unsupported object type {type(workflow_metric)}: {str(workflow_metric)}"
222+
)
148223

149224
request_data = "\n".join(metrics_batch)
150225
response = requests.post(
@@ -176,16 +251,21 @@ def main():
176251
# Enter the main loop. Every five minutes we wake up and dump metrics for
177252
# the relevant jobs.
178253
while True:
179-
current_metrics = get_metrics(github_repo, workflows_to_track)
180-
if len(current_metrics) == 0:
181-
print("No metrics found to upload.", file=sys.stderr)
182-
continue
254+
current_metrics = get_per_workflow_metrics(github_repo, workflows_to_track)
255+
current_metrics += get_sampled_workflow_metrics(github_repo)
256+
# Always send a hearbeat metric so we can monitor is this container is still able to log to Grafana.
257+
current_metrics.append(
258+
GaugeMetric("metrics_container_heartbeat", 1, time.time_ns())
259+
)
183260

184261
upload_metrics(current_metrics, grafana_metrics_userid, grafana_api_key)
185262
print(f"Uploaded {len(current_metrics)} metrics", file=sys.stderr)
186263

187264
for workflow_metric in reversed(current_metrics):
188-
workflows_to_track[workflow_metric.job_name] = workflow_metric.workflow_id
265+
if isinstance(workflow_metric, JobMetrics):
266+
workflows_to_track[
267+
workflow_metric.job_name
268+
] = workflow_metric.workflow_id
189269

190270
time.sleep(SCRAPE_INTERVAL_SECONDS)
191271

.github/CODEOWNERS

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
/llvm/lib/Transforms/Scalar/MemCpyOptimizer.cpp @nikic
2828
/llvm/lib/Transforms/InstCombine/ @nikic
2929

30-
/clang/include/clang/Sema/Sema.h @Endilll
3130
/clang/test/CXX/drs/ @Endilll
3231
/clang/www/cxx_dr_status.html @Endilll
3332
/clang/www/make_cxx_dr_status @Endilll

.github/workflows/build-ci-container.yml

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,22 @@ jobs:
3636
tag=`date +%s`
3737
container_name="ghcr.io/$GITHUB_REPOSITORY_OWNER/ci-ubuntu-22.04"
3838
echo "container-name=$container_name" >> $GITHUB_OUTPUT
39+
echo "container-name-agent=$container_name-agent" >> $GITHUB_OUTPUT
3940
echo "container-name-tag=$container_name:$tag" >> $GITHUB_OUTPUT
41+
echo "container-name-agent-tag=$container_name-agent:$tag" >> $GITHUB_OUTPUT
4042
echo "container-filename=$(echo $container_name:$tag | sed -e 's/\//-/g' -e 's/:/-/g').tar" >> $GITHUB_OUTPUT
4143
- name: Build container
4244
working-directory: ./.github/workflows/containers/github-action-ci/
4345
run: |
44-
podman build -t ${{ steps.vars.outputs.container-name-tag }} .
46+
podman build --target ci-container -t ${{ steps.vars.outputs.container-name-tag }} .
47+
podman build --target ci-container-agent -t ${{ steps.vars.outputs.container-name-agent-tag }} .
4548
4649
# Save the container so we have it in case the push fails. This also
4750
# allows us to separate the push step into a different job so we can
4851
# maintain minimal permissions while building the container.
4952
- name: Save container image
5053
run: |
51-
podman save ${{ steps.vars.outputs.container-name-tag }} > ${{ steps.vars.outputs.container-filename }}
54+
podman save ${{ steps.vars.outputs.container-name-tag }} ${{ steps.vars.outputs.container-name-agent-tag }} > ${{ steps.vars.outputs.container-filename }}
5255
5356
- name: Upload container image
5457
uses: actions/upload-artifact@v4
@@ -86,3 +89,7 @@ jobs:
8689
podman login -u ${{ github.actor }} -p $GITHUB_TOKEN ghcr.io
8790
podman push ${{ needs.build-ci-container.outputs.container-name-tag }}
8891
podman push ${{ needs.build-ci-container.outputs.container-name }}:latest
92+
93+
podman tag ${{ needs.build-ci-container.outpus.container-name-agent-tag }} ${{ needs.build-ci-container.outputs.container-name-agent }}:latest
94+
podman push ${{ needs.build-ci-container.outputs.container-name-agent-tag }}
95+
podman push ${{ needs.build-ci-container.outputs.container-name-agent }}:latest

.github/workflows/containers/github-action-ci-windows/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ RUN choco install -y handle
108108
109109
RUN pip3 install pywin32 buildbot-worker==2.8.4
110110
111-
ARG RUNNER_VERSION=2.319.1
111+
ARG RUNNER_VERSION=2.321.0
112112
ENV RUNNER_VERSION=$RUNNER_VERSION
113113
114114
RUN powershell -Command \

.github/workflows/containers/github-action-ci/Dockerfile

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ RUN apt-get update && \
1313
ninja-build \
1414
python3 \
1515
git \
16-
curl
16+
curl \
17+
zlib1g-dev
1718

1819
RUN curl -O -L https://github.com/llvm/llvm-project/archive/refs/tags/llvmorg-$LLVM_VERSION.tar.gz && tar -xf llvmorg-$LLVM_VERSION.tar.gz
1920

@@ -38,7 +39,7 @@ RUN cmake -B ./build -G Ninja ./llvm \
3839

3940
RUN ninja -C ./build stage2-clang-bolt stage2-install-distribution && ninja -C ./build install-distribution
4041

41-
FROM base
42+
FROM base as ci-container
4243

4344
COPY --from=stage1-toolchain $LLVM_SYSROOT $LLVM_SYSROOT
4445

@@ -91,4 +92,15 @@ RUN adduser gha sudo
9192
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
9293

9394
USER gha
95+
WORKDIR /home/gha
96+
97+
FROM ci-container as ci-container-agent
98+
99+
ENV GITHUB_RUNNER_VERSION=2.321.0
100+
101+
RUN mkdir actions-runner && \
102+
cd actions-runner && \
103+
curl -O -L https://github.com/actions/runner/releases/download/v$GITHUB_RUNNER_VERSION/actions-runner-linux-x64-$GITHUB_RUNNER_VERSION.tar.gz && \
104+
tar xzf ./actions-runner-linux-x64-$GITHUB_RUNNER_VERSION.tar.gz && \
105+
rm ./actions-runner-linux-x64-$GITHUB_RUNNER_VERSION.tar.gz
94106

.github/workflows/premerge.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@ jobs:
3030
fetch-depth: 2
3131
- name: Setup ccache
3232
uses: hendrikmuhs/[email protected]
33+
with:
34+
max-size: "2000M"
3335
- name: Build and Test
3436
# Mark the job as a success even if the step fails so that people do
3537
# not get notified while the new premerge pipeline is in an

bolt/CMakeLists.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -163,8 +163,8 @@ if (BOLT_ENABLE_RUNTIME)
163163
add_llvm_install_targets(install-bolt_rt
164164
DEPENDS bolt_rt bolt
165165
COMPONENT bolt)
166-
set(LIBBOLT_RT_INSTR "${CMAKE_CURRENT_BINARY_DIR}/bolt_rt-bins/lib/libbolt_rt_instr.a")
167-
set(LIBBOLT_RT_HUGIFY "${CMAKE_CURRENT_BINARY_DIR}/bolt_rt-bins/lib/libbolt_rt_hugify.a")
166+
set(LIBBOLT_RT_INSTR "${CMAKE_CURRENT_BINARY_DIR}/bolt_rt-bins/lib${LLVM_LIBDIR_SUFFIX}/libbolt_rt_instr.a")
167+
set(LIBBOLT_RT_HUGIFY "${CMAKE_CURRENT_BINARY_DIR}/bolt_rt-bins/lib${LLVM_LIBDIR_SUFFIX}/libbolt_rt_hugify.a")
168168
endif()
169169

170170
find_program(GNU_LD_EXECUTABLE NAMES ${LLVM_DEFAULT_TARGET_TRIPLE}-ld.bfd ld.bfd DOC "GNU ld")

bolt/include/bolt/Core/BinaryContext.h

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1363,6 +1363,12 @@ class BinaryContext {
13631363
if (std::optional<uint32_t> Size = MIB->getSize(Inst))
13641364
return *Size;
13651365

1366+
if (MIB->isPseudo(Inst))
1367+
return 0;
1368+
1369+
if (std::optional<uint32_t> Size = MIB->getInstructionSize(Inst))
1370+
return *Size;
1371+
13661372
if (!Emitter)
13671373
Emitter = this->MCE.get();
13681374
SmallString<256> Code;

bolt/include/bolt/Core/MCPlusBuilder.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1204,6 +1204,11 @@ class MCPlusBuilder {
12041204
/// Get instruction size specified via annotation.
12051205
std::optional<uint32_t> getSize(const MCInst &Inst) const;
12061206

1207+
/// Get target-specific instruction size.
1208+
virtual std::optional<uint32_t> getInstructionSize(const MCInst &Inst) const {
1209+
return std::nullopt;
1210+
}
1211+
12071212
/// Set instruction size.
12081213
void setSize(MCInst &Inst, uint32_t Size) const;
12091214

bolt/lib/Passes/Inliner.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -310,13 +310,13 @@ Inliner::inlineCall(BinaryBasicBlock &CallerBB,
310310
if (MIB.isPseudo(Inst))
311311
continue;
312312

313-
MIB.stripAnnotations(Inst, /*KeepTC=*/BC.isX86());
313+
MIB.stripAnnotations(Inst, /*KeepTC=*/BC.isX86() || BC.isAArch64());
314314

315315
// Fix branch target. Strictly speaking, we don't have to do this as
316316
// targets of direct branches will be fixed later and don't matter
317317
// in the CFG state. However, disassembly may look misleading, and
318318
// hence we do the fixing.
319-
if (MIB.isBranch(Inst)) {
319+
if (MIB.isBranch(Inst) && !MIB.isTailCall(Inst)) {
320320
assert(!MIB.isIndirectBranch(Inst) &&
321321
"unexpected indirect branch in callee");
322322
const BinaryBasicBlock *TargetBB =

0 commit comments

Comments
 (0)