Skip to content

Commit d980b45

Browse files
committed
[unittest] Add option to allow disabling sharding in unittest
By default, googletest based unit tests uses sharding to speed up the testing. However, when these unit tests are through wrapper program on a remote platform with large round trip time, the sharding will increase the time cost dramatically. This patch adds an "--disable-gtest-sharding" option in the LLVM LIT to disable sharding on googletest based unittests.
1 parent 9aa571f commit d980b45

File tree

7 files changed

+221
-26
lines changed

7 files changed

+221
-26
lines changed

llvm/utils/lit/lit/LitConfig.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ def __init__(
3737
maxIndividualTestTime=0,
3838
parallelism_groups={},
3939
per_test_coverage=False,
40+
disableGTestSharding=False,
4041
):
4142
# The name of the test runner.
4243
self.progname = progname
@@ -87,6 +88,7 @@ def __init__(
8788
self.maxIndividualTestTime = maxIndividualTestTime
8889
self.parallelism_groups = parallelism_groups
8990
self.per_test_coverage = per_test_coverage
91+
self.disableGTestSharding = bool(disableGTestSharding)
9092

9193
@property
9294
def maxIndividualTestTime(self):

llvm/utils/lit/lit/cl_arguments.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,12 @@ def parse_args():
118118
)
119119

120120
execution_group = parser.add_argument_group("Test Execution")
121+
execution_group.add_argument(
122+
"--disable-gtest-sharding",
123+
dest="disableGTestSharding",
124+
help="Disable sharding for GoogleTest format",
125+
action="store_true",
126+
)
121127
execution_group.add_argument(
122128
"--path",
123129
help="Additional paths to add to testing environment",

llvm/utils/lit/lit/formats/googletest.py

Lines changed: 60 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -68,24 +68,49 @@ def getTestsInDirectory(self, testSuite, path_in_suite, litConfig, localConfig):
6868
self.seen_executables.add(execpath)
6969
num_tests = self.get_num_tests(execpath, litConfig, localConfig)
7070
if num_tests is not None:
71-
# Compute the number of shards.
72-
shard_size = init_shard_size
73-
nshard = int(math.ceil(num_tests / shard_size))
74-
while nshard < core_count and shard_size > 1:
75-
shard_size = shard_size // 2
71+
if not litConfig.disableGTestSharding:
72+
# Compute the number of shards.
73+
shard_size = init_shard_size
7674
nshard = int(math.ceil(num_tests / shard_size))
77-
78-
# Create one lit test for each shard.
79-
for idx in range(nshard):
80-
testPath = path_in_suite + (subdir, fn, str(idx), str(nshard))
75+
while nshard < core_count and shard_size > 1:
76+
shard_size = shard_size // 2
77+
nshard = int(math.ceil(num_tests / shard_size))
78+
79+
# Create one lit test for each shard.
80+
for idx in range(nshard):
81+
testPath = path_in_suite + (
82+
subdir,
83+
fn,
84+
str(idx),
85+
str(nshard),
86+
)
87+
json_file = (
88+
"-".join(
89+
[
90+
execpath,
91+
testSuite.config.name,
92+
str(os.getpid()),
93+
str(idx),
94+
str(nshard),
95+
]
96+
)
97+
+ ".json"
98+
)
99+
yield lit.Test.Test(
100+
testSuite,
101+
testPath,
102+
localConfig,
103+
file_path=execpath,
104+
gtest_json_file=json_file,
105+
)
106+
else:
107+
testPath = path_in_suite + (subdir, fn)
81108
json_file = (
82109
"-".join(
83110
[
84111
execpath,
85112
testSuite.config.name,
86113
str(os.getpid()),
87-
str(idx),
88-
str(nshard),
89114
]
90115
)
91116
+ ".json"
@@ -118,24 +143,33 @@ def execute(self, test, litConfig):
118143
if test.gtest_json_file is None:
119144
return lit.Test.FAIL, ""
120145

121-
testPath, testName = os.path.split(test.getSourcePath())
122-
while not os.path.exists(testPath):
123-
# Handle GTest parametrized and typed tests, whose name includes
124-
# some '/'s.
125-
testPath, namePrefix = os.path.split(testPath)
126-
testName = namePrefix + "/" + testName
127-
128-
testName, total_shards = os.path.split(testName)
129-
testName, shard_idx = os.path.split(testName)
146+
testPath = test.getSourcePath()
130147
from lit.cl_arguments import TestOrder
131148

132149
use_shuffle = TestOrder(litConfig.order) == TestOrder.RANDOM
133-
shard_env = {
134-
"GTEST_OUTPUT": "json:" + test.gtest_json_file,
135-
"GTEST_SHUFFLE": "1" if use_shuffle else "0",
136-
"GTEST_TOTAL_SHARDS": os.environ.get("GTEST_TOTAL_SHARDS", total_shards),
137-
"GTEST_SHARD_INDEX": os.environ.get("GTEST_SHARD_INDEX", shard_idx),
138-
}
150+
if not litConfig.disableGTestSharding:
151+
testPath, testName = os.path.split(test.getSourcePath())
152+
while not os.path.exists(testPath):
153+
# Handle GTest parametrized and typed tests, whose name includes
154+
# some '/'s.
155+
testPath, namePrefix = os.path.split(testPath)
156+
testName = namePrefix + "/" + testName
157+
158+
testName, total_shards = os.path.split(testName)
159+
testName, shard_idx = os.path.split(testName)
160+
shard_env = {
161+
"GTEST_OUTPUT": "json:" + test.gtest_json_file,
162+
"GTEST_SHUFFLE": "1" if use_shuffle else "0",
163+
"GTEST_TOTAL_SHARDS": os.environ.get(
164+
"GTEST_TOTAL_SHARDS", total_shards
165+
),
166+
"GTEST_SHARD_INDEX": os.environ.get("GTEST_SHARD_INDEX", shard_idx),
167+
}
168+
else:
169+
shard_env = {
170+
"GTEST_OUTPUT": "json:" + test.gtest_json_file,
171+
"GTEST_SHUFFLE": "1" if use_shuffle else "0",
172+
}
139173
test.config.environment.update(shard_env)
140174

141175
cmd = [testPath]

llvm/utils/lit/lit/main.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ def main(builtin_params={}):
4141
params=params,
4242
config_prefix=opts.configPrefix,
4343
per_test_coverage=opts.per_test_coverage,
44+
disableGTestSharding=opts.disableGTestSharding,
4445
)
4546

4647
discovered_tests = lit.discovery.find_tests_for_inputs(
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
#!/usr/bin/env python
2+
3+
import os
4+
import sys
5+
6+
if len(sys.argv) == 3 and sys.argv[1] == "--gtest_list_tests":
7+
if sys.argv[2] != "--gtest_filter=-*DISABLED_*":
8+
raise ValueError("unexpected argument: %s" % (sys.argv[2]))
9+
print(
10+
"""\
11+
FirstTest.
12+
subTestA
13+
subTestB
14+
subTestC
15+
subTestD
16+
ParameterizedTest/0.
17+
subTest
18+
ParameterizedTest/1.
19+
subTest"""
20+
)
21+
sys.exit(0)
22+
elif len(sys.argv) != 1:
23+
# sharding and json output are specified using environment variables
24+
raise ValueError("unexpected argument: %r" % (" ".join(sys.argv[1:])))
25+
26+
for e in ["GTEST_OUTPUT"]:
27+
if e not in os.environ:
28+
raise ValueError("missing environment variables: " + e)
29+
30+
if not os.environ["GTEST_OUTPUT"].startswith("json:"):
31+
raise ValueError("must emit json output: " + os.environ["GTEST_OUTPUT"])
32+
33+
output = """\
34+
{
35+
"random_seed": 123,
36+
"testsuites": [
37+
{
38+
"name": "FirstTest",
39+
"testsuite": [
40+
{
41+
"name": "subTestA",
42+
"result": "COMPLETED",
43+
"time": "0.001s"
44+
},
45+
{
46+
"name": "subTestB",
47+
"result": "COMPLETED",
48+
"time": "0.001s",
49+
"failures": [
50+
{
51+
"failure": "I am subTest B, I FAIL\\nAnd I have two lines of output",
52+
"type": ""
53+
}
54+
]
55+
},
56+
{
57+
"name": "subTestC",
58+
"result": "SKIPPED",
59+
"time": "0.001s"
60+
},
61+
{
62+
"name": "subTestD",
63+
"result": "UNRESOLVED",
64+
"time": "0.001s"
65+
}
66+
]
67+
},
68+
{
69+
"name": "ParameterizedTest/0",
70+
"testsuite": [
71+
{
72+
"name": "subTest",
73+
"result": "COMPLETED",
74+
"time": "0.001s"
75+
}
76+
]
77+
},
78+
{
79+
"name": "ParameterizedTest/1",
80+
"testsuite": [
81+
{
82+
"name": "subTest",
83+
"result": "COMPLETED",
84+
"time": "0.001s"
85+
}
86+
]
87+
}
88+
]
89+
}"""
90+
91+
dummy_output = """\
92+
{
93+
"testsuites": [
94+
]
95+
}"""
96+
97+
json_filename = os.environ["GTEST_OUTPUT"].split(":", 1)[1]
98+
with open(json_filename, "w", encoding="utf-8") as f:
99+
print("[ RUN ] FirstTest.subTestB", flush=True)
100+
print("I am subTest B output", file=sys.stderr, flush=True)
101+
print("[ FAILED ] FirstTest.subTestB (8 ms)", flush=True)
102+
f.write(output)
103+
exit_code = 1
104+
105+
sys.exit(exit_code)
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
import lit.formats
2+
3+
config.name = "googletest-no-sharding"
4+
config.test_format = lit.formats.GoogleTest("DummySubDir", "Test")
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
# Check the various features of the GoogleTest format.
2+
3+
# RUN: not %{lit} -v --disable-gtest-sharding --order=random %{inputs}/googletest-no-sharding > %t.out
4+
# FIXME: Temporarily dump test output so we can debug failing tests on
5+
# buildbots.
6+
# RUN: cat %t.out
7+
# RUN: FileCheck < %t.out %s
8+
#
9+
# END.
10+
11+
# CHECK: -- Testing:
12+
# CHECK: FAIL: googletest-no-sharding :: [[PATH:[Dd]ummy[Ss]ub[Dd]ir/]][[FILE:OneTest\.py]]
13+
# CHECK: *** TEST 'googletest-no-sharding :: [[PATH]][[FILE]]' FAILED ***
14+
# CHECK-NEXT: Script(shard):
15+
# CHECK-NEXT: --
16+
# CHECK-NEXT: GTEST_OUTPUT=json:{{[^[:space:]]*}} GTEST_SHUFFLE=1 GTEST_RANDOM_SEED=123 {{.*}}[[FILE]]
17+
# CHECK-NEXT: --
18+
# CHECK-EMPTY:
19+
# CHECK-NEXT: Script:
20+
# CHECK-NEXT: --
21+
# CHECK-NEXT: [[FILE]] --gtest_filter=FirstTest.subTestB
22+
# CHECK-NEXT: --
23+
# CHECK-NEXT: I am subTest B output
24+
# CHECK-EMPTY:
25+
# CHECK-NEXT: I am subTest B, I FAIL
26+
# CHECK-NEXT: And I have two lines of output
27+
# CHECK-EMPTY:
28+
# CHECK: Script:
29+
# CHECK-NEXT: --
30+
# CHECK-NEXT: [[FILE]] --gtest_filter=FirstTest.subTestD
31+
# CHECK-NEXT: --
32+
# CHECK-NEXT: unresolved test result
33+
# CHECK: ***
34+
# CHECK: ***
35+
# CHECK: Unresolved Tests (1):
36+
# CHECK-NEXT: googletest-no-sharding :: FirstTest/subTestD
37+
# CHECK: ***
38+
# CHECK-NEXT: Failed Tests (1):
39+
# CHECK-NEXT: googletest-no-sharding :: FirstTest/subTestB
40+
# CHECK: Skipped{{ *}}: 1
41+
# CHECK: Passed{{ *}}: 3
42+
# CHECK: Unresolved{{ *}}: 1
43+
# CHECK: Failed{{ *}}: 1

0 commit comments

Comments
 (0)