Skip to content

Commit f9fc240

Browse files
authored
Remove root project testing jar (#1439)
When executing integration test code, most integrations expect a jar to be provisioned that contains all test code so that it may be loaded by the processing frameworks directly. Historically to support this we have packaged all integration test code into one big itest jar in the root project. This PR removes that shared central jar. Projects now each build and use their own personal itest jar for use in integration tests. These jars may repackage code from other projects itest jars, but they are insulated within each project that makes use of them.
1 parent 0302dca commit f9fc240

File tree

10 files changed

+109
-80
lines changed

10 files changed

+109
-80
lines changed

buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy

Lines changed: 36 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ import org.gradle.api.tasks.bundling.Jar
3737
import org.gradle.api.tasks.compile.JavaCompile
3838
import org.gradle.api.tasks.javadoc.Javadoc
3939
import org.gradle.api.tasks.testing.Test
40-
import org.gradle.api.tasks.testing.TestReport
4140
import org.gradle.external.javadoc.JavadocOutputLevel
4241
import org.gradle.external.javadoc.MinimalJavadocOptions
4342
import org.gradle.internal.jvm.Jvm
@@ -65,7 +64,6 @@ class BuildPlugin implements Plugin<Project> {
6564
configureEclipse(project)
6665
configureMaven(project)
6766
configureIntegrationTestTask(project)
68-
configureTestReports(project)
6967
configurePrecommit(project)
7068
configureDependenciesInfo(project)
7169
}
@@ -304,12 +302,6 @@ class BuildPlugin implements Plugin<Project> {
304302
testImplementation("org.locationtech.spatial4j:spatial4j:0.6")
305303
testImplementation("com.vividsolutions:jts:1.13")
306304

307-
// TODO: Remove when we merge ITests to test dirs
308-
itestCompile("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") {
309-
// For some reason, the dependencies that are pulled in with MiniKDC have multiple resource files
310-
// that cause issues when they are loaded. We exclude the ldap schema data jar to get around this.
311-
exclude group: "org.apache.directory.api", module: "api-ldap-schema-data"
312-
}
313305
itestImplementation(project.sourceSets.main.output)
314306
itestImplementation(project.configurations.testImplementation)
315307
itestImplementation(project.configurations.provided)
@@ -590,72 +582,52 @@ class BuildPlugin implements Plugin<Project> {
590582
* @param project to be configured
591583
*/
592584
private static void configureIntegrationTestTask(Project project) {
593-
Jar hadoopTestingJar = project.rootProject.tasks.findByName('hadoopTestingJar') as Jar
594-
if (hadoopTestingJar == null) {
595-
// jar used for testing Hadoop remotely (es-hadoop + tests)
596-
hadoopTestingJar = project.rootProject.tasks.create('hadoopTestingJar', Jar)
597-
hadoopTestingJar.dependsOn(project.rootProject.tasks.getByName('jar'))
598-
hadoopTestingJar.classifier = 'testing'
599-
project.logger.info("Created Remote Testing Jar")
600-
}
585+
if (project != project.rootProject) {
586+
TaskProvider<Task> itestJar = project.tasks.register('itestJar', Jar) { Jar itestJar ->
587+
itestJar.dependsOn(project.tasks.getByName('jar'))
588+
itestJar.getArchiveClassifier().set('testing')
589+
590+
// Add this project's classes to the testing uber-jar
591+
itestJar.from(project.sourceSets.main.output)
592+
itestJar.from(project.sourceSets.test.output)
593+
itestJar.from(project.sourceSets.itest.output)
594+
}
601595

602-
// Add this project's classes to the testing uber-jar
603-
hadoopTestingJar.from(project.sourceSets.test.output)
604-
hadoopTestingJar.from(project.sourceSets.main.output)
605-
hadoopTestingJar.from(project.sourceSets.itest.output)
606-
607-
Test integrationTest = project.tasks.create('integrationTest', RestTestRunnerTask.class)
608-
integrationTest.dependsOn(hadoopTestingJar)
609-
610-
integrationTest.testClassesDirs = project.sourceSets.itest.output.classesDirs
611-
integrationTest.classpath = project.sourceSets.itest.runtimeClasspath
612-
integrationTest.excludes = ["**/Abstract*.class"]
613-
614-
integrationTest.ignoreFailures = false
615-
616-
integrationTest.executable = "${project.ext.get('runtimeJavaHome')}/bin/java"
617-
integrationTest.minHeapSize = "256m"
618-
integrationTest.maxHeapSize = "2g"
619-
620-
integrationTest.testLogging {
621-
displayGranularity 0
622-
events "started", "failed" //, "standardOut", "standardError"
623-
exceptionFormat "full"
624-
showCauses true
625-
showExceptions true
626-
showStackTraces true
627-
stackTraceFilters "groovy"
628-
minGranularity 2
629-
maxGranularity 2
630-
}
596+
Test integrationTest = project.tasks.create('integrationTest', RestTestRunnerTask.class)
597+
integrationTest.dependsOn(itestJar)
598+
599+
integrationTest.testClassesDirs = project.sourceSets.itest.output.classesDirs
600+
integrationTest.classpath = project.sourceSets.itest.runtimeClasspath
601+
integrationTest.excludes = ["**/Abstract*.class"]
602+
603+
integrationTest.ignoreFailures = false
604+
605+
integrationTest.executable = "${project.ext.get('runtimeJavaHome')}/bin/java"
606+
integrationTest.minHeapSize = "256m"
607+
integrationTest.maxHeapSize = "2g"
608+
609+
integrationTest.testLogging {
610+
displayGranularity 0
611+
events "started", "failed" //, "standardOut", "standardError"
612+
exceptionFormat "full"
613+
showCauses true
614+
showExceptions true
615+
showStackTraces true
616+
stackTraceFilters "groovy"
617+
minGranularity 2
618+
maxGranularity 2
619+
}
631620

632-
integrationTest.reports.html.enabled = false
621+
integrationTest.reports.html.enabled = false
633622

634-
// Only add cluster settings if it's not the root project
635-
if (project != project.rootProject) {
623+
// Only add cluster settings if it's not the root project
636624
project.logger.info "Configuring ${project.name} integrationTest task to use ES Fixture"
637625
// Create the cluster fixture around the integration test.
638626
// There's probably a more elegant way to do this in Gradle
639627
project.plugins.apply("es.hadoop.cluster")
640628
}
641629
}
642630

643-
/**
644-
* Configure the root testReport task with the test tasks in this project to report on, creating the report task
645-
* on root if it is not created yet.
646-
* @param project to configure
647-
*/
648-
private static void configureTestReports(Project project) {
649-
TestReport testReport = project.rootProject.getTasks().findByName('testReport') as TestReport
650-
if (testReport == null) {
651-
// Create the task on root if it is not created yet.
652-
testReport = project.rootProject.getTasks().create('testReport', TestReport.class)
653-
testReport.setDestinationDir(project.rootProject.file("${project.rootProject.getBuildDir()}/reports/allTests"))
654-
}
655-
testReport.reportOn(project.getTasks().getByName('test'))
656-
testReport.reportOn(project.getTasks().getByName('integrationTest'))
657-
}
658-
659631
/**
660632
* @param project that belongs to a git repo
661633
* @return the file containing the hash for the current branch

hive/build.gradle

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,14 @@ jar {
2121
}
2222
}
2323

24+
itestJar {
25+
from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) {
26+
include "org/elasticsearch/hadoop/**"
27+
include "esh-build.properties"
28+
include "META-INF/services/*"
29+
}
30+
}
31+
2432
javadoc {
2533
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
2634
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)

mr/build.gradle

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,11 @@ dependencies {
2020
testImplementation(project.ext.hadoopClient)
2121
testImplementation("io.netty:netty-all:4.0.29.Final")
2222
testImplementation("org.elasticsearch:securemock:1.2")
23+
itestImplementation("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") {
24+
// For some reason, the dependencies that are pulled in with MiniKDC have multiple resource files
25+
// that cause issues when they are loaded. We exclude the ldap schema data jar to get around this.
26+
exclude group: "org.apache.directory.api", module: "api-ldap-schema-data"
27+
}
2328
}
2429

2530
String generatedResources = "$buildDir/generated-resources/main"

mr/src/itest/java/org/elasticsearch/hadoop/Provisioner.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ public abstract class Provisioner {
4242
// init ES-Hadoop JAR
4343
// expect the jar under build\libs
4444
try {
45-
File folder = new File(".." + File.separator + "build" + File.separator + "libs" + File.separator).getCanonicalFile();
45+
File folder = new File("build" + File.separator + "libs" + File.separator).getCanonicalFile();
4646
// find proper jar
4747
File[] files = folder.listFiles(new FileFilter() {
4848

pig/build.gradle

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,14 @@ jar {
2121
}
2222
}
2323

24+
itestJar {
25+
from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) {
26+
include "org/elasticsearch/hadoop/**"
27+
include "esh-build.properties"
28+
include "META-INF/services/*"
29+
}
30+
}
31+
2432
javadoc {
2533
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
2634
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)

qa/kerberos/build.gradle

Lines changed: 26 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -98,8 +98,8 @@ if (disableTests) {
9898

9999
// Build uber storm jar for testing Storm remotely (es-hadoop + es-storm + qa tests)
100100
Jar qaKerberosStormJar = project.tasks.create('kerberosStormJar', Jar)
101-
qaKerberosStormJar.dependsOn(project.rootProject.tasks.getByName('jar'))
102-
qaKerberosStormJar.dependsOn(project.rootProject.tasks.getByName('hadoopTestingJar'))
101+
qaKerberosStormJar.dependsOn(project(':elasticsearch-storm').tasks.getByName('jar'))
102+
qaKerberosStormJar.dependsOn(project(':elasticsearch-storm').tasks.getByName('itestJar'))
103103
qaKerberosStormJar.classifier = 'storm-testing'
104104

105105
// Add projects to the storm testing uber-jar
@@ -233,8 +233,14 @@ if (disableTests) {
233233

234234
// Fixtures will be depending on the jar and test jar artifacts
235235
def jar = project.tasks.getByName('jar') as org.gradle.jvm.tasks.Jar
236-
def testingJar = project.rootProject.tasks.findByName('hadoopTestingJar') as Jar
237-
236+
def kerberosItestJar = project.tasks.findByName('itestJar') as Jar
237+
def mrJar = project(':elasticsearch-hadoop-mr').tasks.getByName('jar') as Jar
238+
def mrItestJar = project(':elasticsearch-hadoop-mr').tasks.getByName('itestJar') as Jar
239+
def hiveItestJar = project(':elasticsearch-hadoop-hive').tasks.getByName('itestJar') as Jar
240+
def pigItestJar = project(':elasticsearch-hadoop-pig').tasks.getByName('itestJar') as Jar
241+
def sparkItestJar = project(':elasticsearch-spark-20').tasks.getByName('itestJar') as Jar
242+
def stormItestJar = project(':elasticsearch-storm').tasks.getByName('itestJar') as Jar
243+
238244
// Need these for SSL items, test data, and scripts
239245
File resourceDir = project.sourceSets.main.resources.getSrcDirs().head()
240246
File mrItestResourceDir = project(":elasticsearch-hadoop-mr").sourceSets.itest.resources.getSrcDirs().head()
@@ -330,7 +336,7 @@ if (disableTests) {
330336
// Add the ES-Hadoop jar to the resource manager classpath so that it can load the token renewer implementation
331337
// for ES tokens. Otherwise, tokens may not be cancelled at the end of the job.
332338
s.role('resourcemanager') { RoleConfiguration r ->
333-
r.addEnvironmentVariable('YARN_USER_CLASSPATH', testingJar.archivePath.toString())
339+
r.addEnvironmentVariable('YARN_USER_CLASSPATH', mrJar.archivePath.toString())
334340
r.settingsFile('yarn-site.xml') { SettingsContainer.FileSettings f ->
335341
// Add settings specifically for ES Node to allow for cancelling the tokens
336342
f.addSetting('es.nodes', esAddress)
@@ -359,7 +365,13 @@ if (disableTests) {
359365
s.addSetting('es.nodes', esAddress)
360366
}
361367
config.addDependency(jar)
362-
config.addDependency(testingJar)
368+
config.addDependency(kerberosItestJar)
369+
config.addDependency(mrJar)
370+
config.addDependency(mrItestJar)
371+
config.addDependency(hiveItestJar)
372+
config.addDependency(pigItestJar)
373+
config.addDependency(sparkItestJar)
374+
config.addDependency(stormItestJar)
363375

364376
// We need to create a tmp directory in hadoop before history server does, because history server will set permissions
365377
// wrong.
@@ -435,7 +447,7 @@ if (disableTests) {
435447
useCluster(testClusters.integTest)
436448
dependsOn(copyData, setupUsers)
437449
jobJar = jar.archivePath
438-
libJars(testingJar.archivePath)
450+
libJars(kerberosItestJar.archivePath, mrItestJar.archivePath)
439451
jobClass = 'org.elasticsearch.hadoop.qa.kerberos.mr.LoadToES'
440452
jobSettings([
441453
'es.resource': 'qa_kerberos_mr_data',
@@ -462,7 +474,7 @@ if (disableTests) {
462474
useCluster(testClusters.integTest)
463475
dependsOn(mrLoadData)
464476
jobJar = jar.archivePath
465-
libJars(testingJar.archivePath)
477+
libJars(kerberosItestJar.archivePath, mrItestJar.archivePath)
466478
jobClass = 'org.elasticsearch.hadoop.qa.kerberos.mr.ReadFromES'
467479
jobSettings([
468480
'es.resource': 'qa_kerberos_mr_data',
@@ -495,7 +507,7 @@ if (disableTests) {
495507
// principal = clientPrincipal + realm
496508
// keytab = clientKeytab.toString()
497509
jobJar = jar.archivePath
498-
libJars(testingJar.archivePath)
510+
libJars(kerberosItestJar.archivePath, sparkItestJar.archivePath)
499511
jobClass = 'org.elasticsearch.hadoop.qa.kerberos.spark.LoadToES'
500512
jobSettings([
501513
'spark.es.resource': 'qa_kerberos_spark_data',
@@ -523,7 +535,7 @@ if (disableTests) {
523535
// principal = clientPrincipal + realm
524536
// keytab = clientKeytab.toString()
525537
jobJar = jar.archivePath
526-
libJars(testingJar.archivePath)
538+
libJars(kerberosItestJar.archivePath, sparkItestJar.archivePath)
527539
jobClass = 'org.elasticsearch.hadoop.qa.kerberos.spark.ReadFromES'
528540
jobSettings([
529541
'spark.es.resource': 'qa_kerberos_spark_data',
@@ -565,7 +577,7 @@ if (disableTests) {
565577
dependsOn(jar, setupUsers, copyData, patchBeeline)
566578
hivePrincipal = hivePrincipalName + realm
567579
script = new File(resourceDir, 'hive/load_to_es.sql')
568-
libJars(testingJar.archivePath)
580+
libJars(kerberosItestJar.archivePath, hiveItestJar.archivePath)
569581
environmentVariables.putAll([
570582
'HADOOP_CLIENT_OPTS':
571583
"-Djava.security.krb5.conf=${krb5Conf.toString()} " +
@@ -582,7 +594,7 @@ if (disableTests) {
582594
dependsOn(hiveLoadData)
583595
hivePrincipal = hivePrincipalName + realm
584596
script = new File(resourceDir, 'hive/read_from_es.sql')
585-
libJars(testingJar.archivePath)
597+
libJars(kerberosItestJar.archivePath, hiveItestJar.archivePath)
586598
environmentVariables.putAll([
587599
'HADOOP_CLIENT_OPTS':
588600
"-Djava.security.krb5.conf=${krb5Conf.toString()} " +
@@ -602,7 +614,7 @@ if (disableTests) {
602614
useCluster(testClusters.integTest)
603615
dependsOn(jar, setupUsers, copyData)
604616
script = new File(resourceDir, 'pig/load_to_es.pig')
605-
libJars(testingJar.archivePath)
617+
libJars(kerberosItestJar.archivePath, pigItestJar.archivePath)
606618
environmentVariables.putAll([
607619
'PIG_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()}"
608620
])
@@ -614,7 +626,7 @@ if (disableTests) {
614626
useCluster(testClusters.integTest)
615627
dependsOn(pigLoadData)
616628
script = new File(resourceDir, 'pig/read_from_es.pig')
617-
libJars(testingJar.archivePath)
629+
libJars(kerberosItestJar.archivePath, pigItestJar.archivePath)
618630
environmentVariables.putAll([
619631
'PIG_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()}"
620632
])

spark/core/itest/java/org/elasticsearch/spark/integration/SparkUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ public abstract class SparkUtils {
3838
// init ES-Hadoop JAR
3939
// expect the jar under build\libs
4040
try {
41-
File folder = new File(".." + File.separator + ".." + File.separator + "build" + File.separator + "libs" + File.separator).getCanonicalFile();
41+
File folder = new File("build" + File.separator + "libs" + File.separator).getCanonicalFile();
4242
System.out.println(folder.getAbsolutePath());
4343
// find proper jar
4444
File[] files = folder.listFiles(new FileFilter() {

spark/sql-13/build.gradle

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,14 @@ jar {
145145
}
146146
}
147147

148+
itestJar {
149+
from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) {
150+
include "org/elasticsearch/hadoop/**"
151+
include "esh-build.properties"
152+
include "META-INF/services/*"
153+
}
154+
}
155+
148156
javadoc {
149157
dependsOn compileScala
150158
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava

spark/sql-20/build.gradle

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,14 @@ jar {
159159
}
160160
}
161161

162+
itestJar {
163+
from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) {
164+
include "org/elasticsearch/hadoop/**"
165+
include "esh-build.properties"
166+
include "META-INF/services/*"
167+
}
168+
}
169+
162170
javadoc {
163171
dependsOn compileScala
164172
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava

storm/build.gradle

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,14 @@ jar {
2020
}
2121
}
2222

23+
itestJar {
24+
from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) {
25+
include "org/elasticsearch/hadoop/**"
26+
include "esh-build.properties"
27+
include "META-INF/services/*"
28+
}
29+
}
30+
2331
javadoc {
2432
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
2533
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)

0 commit comments

Comments
 (0)