Skip to content

Commit 7c4f97d

Browse files
committed
Use subproject to generate distribution instead of root (#1463)
Created a dist project and using it to construct the zip/uberjar Add javadocSources and javadocElements configurations. Exports for javadoc generation are different from exports required for source jars because one contains only java code (including generated), and the other contains original code (java and scala, no generated java) Add scala code to the regular sources exported configurations. In scala projects that add source dirs, add the new sources to the correct configurations.
1 parent ca60bf1 commit 7c4f97d

File tree

9 files changed

+220
-4
lines changed

9 files changed

+220
-4
lines changed

buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy

Lines changed: 52 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import org.gradle.api.plugins.JavaLibraryPlugin
2727
import org.gradle.api.plugins.JavaPlugin
2828
import org.gradle.api.plugins.MavenPlugin
2929
import org.gradle.api.plugins.MavenPluginConvention
30+
import org.gradle.api.plugins.scala.ScalaPlugin
3031
import org.gradle.api.tasks.SourceSet
3132
import org.gradle.api.tasks.SourceSetContainer
3233
import org.gradle.api.tasks.TaskProvider
@@ -104,6 +105,37 @@ class BuildPlugin implements Plugin<Project> {
104105
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'java-source'))
105106
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
106107
}
108+
109+
// Import javadoc sources
110+
Configuration javadocSources = project.configurations.create("javadocSources")
111+
javadocSources.canBeConsumed = false
112+
javadocSources.canBeResolved = true
113+
javadocSources.attributes {
114+
// Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled
115+
// into incremental compilation analysis.
116+
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'javadoc-source'))
117+
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
118+
}
119+
120+
// Export source configuration
121+
Configuration javadocElements = project.configurations.create("javadocElements")
122+
javadocElements.canBeConsumed = true
123+
javadocElements.canBeResolved = false
124+
javadocElements.extendsFrom(sources)
125+
javadocElements.attributes {
126+
// Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled
127+
// into incremental compilation analysis.
128+
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'javadoc-source'))
129+
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
130+
}
131+
132+
// Export configuration for archives that should be in the distribution
133+
Configuration distElements = project.configurations.create('distElements')
134+
distElements.canBeConsumed = true
135+
distElements.canBeResolved = false
136+
distElements.attributes {
137+
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'packaging'))
138+
}
107139
}
108140

109141
if (project.path.startsWith(":qa")) {
@@ -209,9 +241,20 @@ class BuildPlugin implements Plugin<Project> {
209241
// TODO: Remove all root project distribution logic. It should exist in a separate dist project.
210242
if (project != project.rootProject) {
211243
SourceSet mainSourceSet = project.sourceSets.main
244+
245+
// Add java source to project's source elements and javadoc elements
212246
FileCollection javaSourceDirs = mainSourceSet.java.sourceDirectories
213247
javaSourceDirs.each { File srcDir ->
214248
project.getArtifacts().add('sourceElements', srcDir)
249+
project.getArtifacts().add('javadocElements', srcDir)
250+
}
251+
252+
// Add scala sources to source elements if that plugin is applied
253+
project.getPlugins().withType(ScalaPlugin.class) {
254+
FileCollection scalaSourceDirs = mainSourceSet.scala.sourceDirectories
255+
scalaSourceDirs.each { File scalaSrcDir ->
256+
project.getArtifacts().add('sourceElements', scalaSrcDir)
257+
}
215258
}
216259
}
217260

@@ -245,6 +288,10 @@ class BuildPlugin implements Plugin<Project> {
245288
spec.expand(copyright: new Date().format('yyyy'), version: project.version)
246289
}
247290

291+
if (project != project.rootProject) {
292+
project.getArtifacts().add('distElements', jar)
293+
}
294+
248295
// Jar up the sources of the project
249296
Jar sourcesJar = project.tasks.create('sourcesJar', Jar)
250297
sourcesJar.dependsOn(project.tasks.classes)
@@ -253,6 +300,7 @@ class BuildPlugin implements Plugin<Project> {
253300
// TODO: Remove when root project does not handle distribution
254301
if (project != project.rootProject) {
255302
sourcesJar.from(project.configurations.additionalSources)
303+
project.getArtifacts().add('distElements', sourcesJar)
256304
}
257305

258306
// Configure javadoc
@@ -267,7 +315,7 @@ class BuildPlugin implements Plugin<Project> {
267315
]
268316
// TODO: Remove when root project does not handle distribution
269317
if (project != project.rootProject) {
270-
javadoc.source = project.files(project.configurations.additionalSources)
318+
javadoc.source += project.files(project.configurations.javadocSources)
271319
}
272320
// Set javadoc executable to runtime Java (1.8)
273321
javadoc.executable = new File(project.ext.runtimeJavaHome, 'bin/javadoc')
@@ -301,6 +349,9 @@ class BuildPlugin implements Plugin<Project> {
301349
Jar javadocJar = project.tasks.create('javadocJar', Jar)
302350
javadocJar.classifier = 'javadoc'
303351
javadocJar.from(project.tasks.javadoc)
352+
if (project != project.rootProject) {
353+
project.getArtifacts().add('distElements', javadocJar)
354+
}
304355

305356
// Task for creating ALL of a project's jars - Like assemble, but this includes the sourcesJar and javadocJar.
306357
Task pack = project.tasks.create('pack')

buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/IntegrationBuildPlugin.groovy

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ class IntegrationBuildPlugin implements Plugin<Project> {
5454
// Configure root javadoc process to compile and consume this project's javadocs
5555
Javadoc rootJavadoc = project.rootProject.getTasks().getByName("javadoc") as Javadoc
5656
Javadoc subJavadoc = project.getTasks().getByName('javadoc') as Javadoc
57-
rootJavadoc.setSource(subJavadoc.source)
57+
rootJavadoc.source += subJavadoc.source
5858
rootJavadoc.classpath += project.files(project.sourceSets.main.compileClasspath)
5959
}
6060
}

dist/build.gradle

Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
2+
apply plugin: 'es.hadoop.build'
3+
4+
description = "Elasticsearch for Apache Hadoop"
5+
project.archivesBaseName = 'elasticsearch-hadoop'
6+
7+
configurations {
8+
embedded {
9+
canBeResolved = true
10+
canBeConsumed = false
11+
transitive = false
12+
}
13+
dist {
14+
canBeResolved = true
15+
canBeConsumed = false
16+
attributes {
17+
attribute(Usage.USAGE_ATTRIBUTE, objects.named(Usage, 'packaging'))
18+
}
19+
}
20+
}
21+
22+
def distProjects = [":elasticsearch-hadoop-mr", ":elasticsearch-hadoop-hive", ":elasticsearch-hadoop-pig",
23+
":elasticsearch-spark-20", ":elasticsearch-storm"]
24+
distProjects.each { distProject ->
25+
dependencies {
26+
// This is only going to pull in each project's regular jar to create the project-wide uberjar.
27+
embedded(project(distProject))
28+
// To squash Javadoc warnings.
29+
compileOnly(project(distProject))
30+
// This will pull all java sources (including generated) for the project-wide javadoc.
31+
javadocSources(project(distProject))
32+
// This will pull all non-generated sources for the project-wide source jar.
33+
additionalSources(project(distProject))
34+
// This will pull in the regular jar, javadoc jar, and source jar to be packaged in the distribution.
35+
dist(project(distProject))
36+
}
37+
}
38+
39+
dependencies {
40+
// For Uber pom (and Javadoc to a lesser extent)
41+
implementation("commons-logging:commons-logging:1.1.1")
42+
implementation("commons-httpclient:commons-httpclient:3.0.1")
43+
implementation("commons-codec:commons-codec:1.4")
44+
implementation("javax.xml.bind:jaxb-api:2.3.1")
45+
implementation("org.apache.hive:hive-service:$hiveVersion") {
46+
exclude module: "log4j-slf4j-impl"
47+
}
48+
implementation("org.apache.hive:hive-exec:$hiveVersion")
49+
implementation("org.apache.hive:hive-metastore:$hiveVersion")
50+
implementation("org.apache.pig:pig:$pigVersion:h2")
51+
implementation("org.apache.spark:spark-core_${project.ext.scala211MajorVersion}:$spark20Version") {
52+
exclude group: 'javax.servlet'
53+
exclude group: 'org.apache.hadoop'
54+
}
55+
implementation("org.apache.spark:spark-yarn_${project.ext.scala211MajorVersion}:$spark20Version") {
56+
exclude group: 'org.apache.hadoop'
57+
}
58+
implementation("org.apache.spark:spark-sql_${project.ext.scala211MajorVersion}:$spark20Version") {
59+
exclude group: 'org.apache.hadoop'
60+
}
61+
implementation("org.apache.spark:spark-streaming_${project.ext.scala211MajorVersion}:$spark20Version") {
62+
exclude group: 'org.apache.hadoop'
63+
}
64+
implementation("org.scala-lang:scala-library:$scala211Version")
65+
implementation("org.scala-lang:scala-reflect:$scala211Version")
66+
implementation("org.apache.storm:storm-core:$stormVersion") {
67+
exclude module: "log4j-slf4j-impl"
68+
}
69+
implementation(project.ext.hadoopClient)
70+
implementation("org.apache.hadoop:hadoop-common:${project.ext.hadoopVersion}")
71+
implementation("org.apache.hadoop:hadoop-mapreduce-client-core:${project.ext.hadoopVersion}")
72+
implementation("org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}")
73+
implementation("org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}")
74+
implementation("joda-time:joda-time:$jodaVersion")
75+
compileOnly("org.apache.spark:spark-catalyst_${project.ext.scala211MajorVersion}:$spark20Version")
76+
}
77+
78+
// Configure uber jar
79+
jar {
80+
dependsOn(project.configurations.embedded)
81+
82+
manifest {
83+
attributes['Implementation-Title'] = 'elasticsearch-hadoop'
84+
}
85+
86+
from(project.configurations.embedded.collect { it.isDirectory() ? it : zipTree(it)}) {
87+
include "org/elasticsearch/**"
88+
include "esh-build.properties"
89+
include "META-INF/services/*"
90+
}
91+
92+
// Each integration will be copying it's entire jar contents into this master jar.
93+
// There will be lots of duplicates since they all package up the core code inside of them.
94+
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
95+
}
96+
97+
javadoc {
98+
options {
99+
header = "elasticsearch-hadoop"
100+
}
101+
}
102+
103+
// Name of the directory under the root of the zip file that will contain the zip contents
104+
String zipContentDir = "elasticsearch-hadoop-${project.version}"
105+
106+
// Create a zip task for creating the distribution
107+
task('distZip', type: Zip) {
108+
group = 'Distribution'
109+
description = "Builds zip archive, containing all jars and docs, suitable for download page."
110+
111+
dependsOn(tasks.pack)
112+
113+
from(project.rootDir) {
114+
include('README.md')
115+
include('LICENSE.txt')
116+
include('NOTICE.txt')
117+
into(zipContentDir)
118+
}
119+
120+
into("$zipContentDir/dist") {
121+
from(project.configurations.dist)
122+
from(tasks.jar)
123+
from(tasks.javadocJar)
124+
from(tasks.sourcesJar)
125+
}
126+
}
127+
128+
distribution {
129+
dependsOn(distZip)
130+
}

hive/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ dependencies {
3737
}
3838

3939
additionalSources(project(":elasticsearch-hadoop-mr"))
40+
javadocSources(project(":elasticsearch-hadoop-mr"))
4041
}
4142

4243
jar {

pig/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ dependencies {
3535
itestImplementation("dk.brics.automaton:automaton:1.11-8")
3636

3737
additionalSources(project(":elasticsearch-hadoop-mr"))
38+
javadocSources(project(":elasticsearch-hadoop-mr"))
3839
}
3940

4041
jar {

settings.gradle

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ project(":sql-20").name = "elasticsearch-spark-20"
1818
include 'storm'
1919
project(":storm").name = "elasticsearch-storm"
2020

21+
include 'dist'
22+
2123
include 'test'
2224
include 'test:shared'
2325
include 'test:fixtures'

spark/sql-13/build.gradle

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,20 @@ sourceSets {
8585
itest.resources.srcDirs += "$coreSrc/itest/resources"
8686
}
8787

88+
def javaFilesOnly = { FileTreeElement spec ->
89+
spec.file.name.endsWith('.java') || spec.isDirectory()
90+
}
91+
92+
artifacts {
93+
sourceElements(project.file("$coreSrc/main/scala"))
94+
// Add java files from core source to javadocElements.
95+
project.fileTree("$coreSrc/main/scala").include(javaFilesOnly).each {
96+
javadocElements(it)
97+
}
98+
project.fileTree("src/main/scala").include(javaFilesOnly).each {
99+
javadocElements(it)
100+
}
101+
}
88102

89103
// currently the outside project folders are transformed into linked resources however
90104
// Gradle only supports one so the project will be invalid as not all sources will be in there
@@ -169,11 +183,12 @@ dependencies {
169183
}
170184

171185
additionalSources(project(":elasticsearch-hadoop-mr"))
186+
javadocSources(project(":elasticsearch-hadoop-mr"))
172187
}
173188

174189
// Export generated Java code from the genjavadoc compiler plugin
175190
artifacts {
176-
sourceElements(project.file("$buildDir/generated/java")) {
191+
javadocElements(project.file("$buildDir/generated/java")) {
177192
builtBy compileScala
178193
}
179194
}

spark/sql-20/build.gradle

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,20 @@ sourceSets {
7171
itest.resources.srcDirs += "$coreSrc/itest/resources"
7272
}
7373

74+
def javaFilesOnly = { FileTreeElement spec ->
75+
spec.file.name.endsWith('.java') || spec.isDirectory()
76+
}
77+
78+
artifacts {
79+
sourceElements(project.file("$coreSrc/main/scala"))
80+
// Add java files from core source to javadocElements.
81+
project.fileTree("$coreSrc/main/scala").include(javaFilesOnly).each {
82+
javadocElements(it)
83+
}
84+
project.fileTree("src/main/scala").include(javaFilesOnly).each {
85+
javadocElements(it)
86+
}
87+
}
7488

7589
// currently the outside project folders are transformed into linked resources however
7690
// Gradle only supports one so the project will be invalid as not all sources will be in there
@@ -165,11 +179,12 @@ dependencies {
165179
}
166180

167181
additionalSources(project(":elasticsearch-hadoop-mr"))
182+
javadocSources(project(":elasticsearch-hadoop-mr"))
168183
}
169184

170185
// Export generated Java code from the genjavadoc compiler plugin
171186
artifacts {
172-
sourceElements(project.file("$buildDir/generated/java")) {
187+
javadocElements(project.file("$buildDir/generated/java")) {
173188
builtBy compileScala
174189
}
175190
}

storm/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ dependencies {
3333
itestImplementation("com.twitter:carbonite:1.4.0")
3434

3535
additionalSources(project(":elasticsearch-hadoop-mr"))
36+
javadocSources(project(":elasticsearch-hadoop-mr"))
3637
}
3738

3839
jar {

0 commit comments

Comments
 (0)