Skip to content

Commit bf45819

Browse files
authored
Upload macrobenchmark results to the metric service. (#2682)
1 parent 978bfbc commit bf45819

File tree

3 files changed

+39
-18
lines changed

3 files changed

+39
-18
lines changed

ci/fireci/fireci/uploader.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,15 @@
1919
import subprocess
2020
import urllib.parse
2121

22+
from . import prow_utils
23+
2224
_logger = logging.getLogger('fireci.uploader')
2325

2426

25-
def post_report(test_report, metrics_service_url, access_token, note=''):
27+
def post_report(test_report, metrics_service_url, access_token, metric='reports', note=''):
2628
"""Post a report to the metrics service backend."""
2729

28-
endpoint = _construct_request_endpoint(note)
30+
endpoint = _construct_request_endpoint(metric, note)
2931
headers = {'Authorization': f'Bearer {access_token}', 'Content-Type': 'application/json'}
3032
data = json.dumps(test_report)
3133

@@ -39,24 +41,26 @@ def post_report(test_report, metrics_service_url, access_token, note=''):
3941
_logger.info(f'Response: {result.text}')
4042

4143

42-
def _construct_request_endpoint(note):
44+
def _construct_request_endpoint(metric, note):
4345
repo_owner = os.getenv('REPO_OWNER')
4446
repo_name = os.getenv('REPO_NAME')
4547
branch = os.getenv('PULL_BASE_REF')
4648
pull_request = os.getenv('PULL_NUMBER')
4749

4850
commit = _get_commit_hash('HEAD@{0}')
51+
log = prow_utils.prow_job_log_link()
4952

50-
endpoint = f'/repos/{repo_owner}/{repo_name}/commits/{commit}/reports'
53+
endpoint = f'/repos/{repo_owner}/{repo_name}/commits/{commit}/{metric}?log={log}'
5154
if pull_request:
52-
base_commit = _get_commit_hash('HEAD@{1}')
53-
endpoint += f'?pull_request={pull_request}&base_commit={base_commit}'
55+
base_commit = os.getenv('PULL_BASE_SHA')
56+
head_commit = os.getenv('PULL_PULL_SHA')
57+
endpoint += f'&pull_request={pull_request}&base_commit={base_commit}&head_commit={head_commit}'
5458

5559
commit_note = _get_prow_commit_note('HEAD@{0}')
5660
note += f'\n{commit_note}\n'
5761
endpoint += f'&note={urllib.parse.quote(note)}'
5862
else:
59-
endpoint += f'?branch={branch}'
63+
endpoint += f'&branch={branch}'
6064

6165
return endpoint
6266

ci/fireci/fireciplugins/macrobenchmark.py

Lines changed: 27 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -20,17 +20,19 @@
2020
import random
2121
import re
2222
import shutil
23-
import statistics
2423
import sys
2524
import uuid
2625

2726
import click
27+
import numpy
2828
import pystache
2929
import yaml
3030
from google.cloud import storage
3131

3232
from fireci import ci_command
3333
from fireci.dir_utils import chdir
34+
from fireci import prow_utils
35+
from fireci import uploader
3436

3537
_logger = logging.getLogger('fireci.macrobenchmark')
3638

@@ -55,9 +57,7 @@ async def _launch_macrobenchmark_test():
5557
runners = [MacrobenchmarkTest(k, v, artifact_versions) for k, v in config.items()]
5658
results = await asyncio.gather(*[x.run() for x in runners], return_exceptions=True)
5759

58-
if any(map(lambda x: isinstance(x, Exception), results)):
59-
_logger.error(f'Exceptions: {[x for x in results if (isinstance(x, Exception))]}')
60-
raise click.ClickException('Macrobenchmark test failed with above errors.')
60+
await _post_processing(results)
6161

6262
_logger.info('Macrobenchmark test finished.')
6363

@@ -104,6 +104,23 @@ async def _copy_google_services():
104104
shutil.copyfile(src, dst)
105105

106106

107+
async def _post_processing(results):
108+
# Upload successful measurements to the metric service
109+
measurements = []
110+
for result in results:
111+
if not isinstance(result, Exception):
112+
measurements.extend(result)
113+
114+
metrics_service_url = os.getenv('METRICS_SERVICE_URL')
115+
access_token = prow_utils.gcloud_identity_token()
116+
uploader.post_report(measurements, metrics_service_url, access_token, metric='macrobenchmark')
117+
118+
# Raise exceptions for failed measurements
119+
if any(map(lambda x: isinstance(x, Exception), results)):
120+
_logger.error(f'Exceptions: {[x for x in results if isinstance(x, Exception)]}')
121+
raise click.ClickException('Macrobenchmark test failed with above errors.')
122+
123+
107124
class MacrobenchmarkTest:
108125
"""Builds the test based on configurations and runs the test on FTL."""
109126
def __init__(
@@ -127,7 +144,7 @@ async def run(self):
127144
await self._create_benchmark_projects()
128145
await self._assemble_benchmark_apks()
129146
await self._execute_benchmark_tests()
130-
await self._upload_benchmark_results()
147+
return await self._aggregate_benchmark_results()
131148

132149
async def _create_benchmark_projects(self):
133150
app_name = self.test_app_config['name']
@@ -205,7 +222,7 @@ async def _prepare_mustache_context(self):
205222

206223
return mustache_context
207224

208-
async def _upload_benchmark_results(self):
225+
async def _aggregate_benchmark_results(self):
209226
results = []
210227
blobs = self.gcs_client.list_blobs(self.test_results_bucket, prefix=self.test_results_dir)
211228
files = [x for x in blobs if re.search(r'artifacts/[^/]*\.json', x.name)]
@@ -222,14 +239,13 @@ async def _upload_benchmark_results(self):
222239
'name': f'{clazz}.{method}',
223240
'min': min(runs),
224241
'max': max(runs),
225-
'mean': statistics.mean(runs),
226-
'median': statistics.median(runs),
227-
'stdev': statistics.stdev(runs),
242+
'p50': numpy.percentile(runs, 50),
243+
'p90': numpy.percentile(runs, 90),
244+
'p99': numpy.percentile(runs, 99),
228245
'unit': 'ms',
229246
})
230247
self.logger.info(f'Benchmark results: {results}')
231-
232-
# TODO(yifany): upload to metric service once it is ready
248+
return results
233249

234250
async def _exec_subprocess(self, executable, args):
235251
command = " ".join([executable, *args])

ci/fireci/setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
install_requires=[
2828
'click==7.0',
2929
'google-cloud-storage==1.38.0',
30+
'numpy==1.19.5',
3031
'PyGithub==1.43.8',
3132
'pystache==0.5.4',
3233
'requests==2.23.0',

0 commit comments

Comments
 (0)