Skip to content

Use subproject to generate distribution instead of root #1463

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Jun 8, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import org.gradle.api.plugins.JavaLibraryPlugin
import org.gradle.api.plugins.JavaPlugin
import org.gradle.api.plugins.MavenPlugin
import org.gradle.api.plugins.MavenPluginConvention
import org.gradle.api.plugins.scala.ScalaPlugin
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.SourceSetContainer
import org.gradle.api.tasks.TaskProvider
Expand Down Expand Up @@ -104,6 +105,37 @@ class BuildPlugin implements Plugin<Project> {
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'java-source'))
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
}

// Import javadoc sources
Configuration javadocSources = project.configurations.create("javadocSources")
javadocSources.canBeConsumed = false
javadocSources.canBeResolved = true
javadocSources.attributes {
// Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled
// into incremental compilation analysis.
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'javadoc-source'))
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
}

// Export source configuration
Configuration javadocElements = project.configurations.create("javadocElements")
javadocElements.canBeConsumed = true
javadocElements.canBeResolved = false
javadocElements.extendsFrom(sources)
javadocElements.attributes {
// Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled
// into incremental compilation analysis.
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'javadoc-source'))
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
}

// Export configuration for archives that should be in the distribution
Configuration distElements = project.configurations.create('distElements')
distElements.canBeConsumed = true
distElements.canBeResolved = false
distElements.attributes {
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'packaging'))
}
}

if (project.path.startsWith(":qa")) {
Expand Down Expand Up @@ -209,9 +241,20 @@ class BuildPlugin implements Plugin<Project> {
// TODO: Remove all root project distribution logic. It should exist in a separate dist project.
if (project != project.rootProject) {
SourceSet mainSourceSet = project.sourceSets.main

// Add java source to project's source elements and javadoc elements
FileCollection javaSourceDirs = mainSourceSet.java.sourceDirectories
javaSourceDirs.each { File srcDir ->
project.getArtifacts().add('sourceElements', srcDir)
project.getArtifacts().add('javadocElements', srcDir)
}

// Add scala sources to source elements if that plugin is applied
project.getPlugins().withType(ScalaPlugin.class) {
FileCollection scalaSourceDirs = mainSourceSet.scala.sourceDirectories
scalaSourceDirs.each { File scalaSrcDir ->
project.getArtifacts().add('sourceElements', scalaSrcDir)
}
}
}

Expand Down Expand Up @@ -245,6 +288,10 @@ class BuildPlugin implements Plugin<Project> {
spec.expand(copyright: new Date().format('yyyy'), version: project.version)
}

if (project != project.rootProject) {
project.getArtifacts().add('distElements', jar)
}

// Jar up the sources of the project
Jar sourcesJar = project.tasks.create('sourcesJar', Jar)
sourcesJar.dependsOn(project.tasks.classes)
Expand All @@ -253,6 +300,7 @@ class BuildPlugin implements Plugin<Project> {
// TODO: Remove when root project does not handle distribution
if (project != project.rootProject) {
sourcesJar.from(project.configurations.additionalSources)
project.getArtifacts().add('distElements', sourcesJar)
}

// Configure javadoc
Expand All @@ -267,7 +315,7 @@ class BuildPlugin implements Plugin<Project> {
]
// TODO: Remove when root project does not handle distribution
if (project != project.rootProject) {
javadoc.source = project.files(project.configurations.additionalSources)
javadoc.source += project.files(project.configurations.javadocSources)
}
// Set javadoc executable to runtime Java (1.8)
javadoc.executable = new File(project.ext.runtimeJavaHome, 'bin/javadoc')
Expand Down Expand Up @@ -301,6 +349,9 @@ class BuildPlugin implements Plugin<Project> {
Jar javadocJar = project.tasks.create('javadocJar', Jar)
javadocJar.classifier = 'javadoc'
javadocJar.from(project.tasks.javadoc)
if (project != project.rootProject) {
project.getArtifacts().add('distElements', javadocJar)
}

// Task for creating ALL of a project's jars - Like assemble, but this includes the sourcesJar and javadocJar.
Task pack = project.tasks.create('pack')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class IntegrationBuildPlugin implements Plugin<Project> {
// Configure root javadoc process to compile and consume this project's javadocs
Javadoc rootJavadoc = project.rootProject.getTasks().getByName("javadoc") as Javadoc
Javadoc subJavadoc = project.getTasks().getByName('javadoc') as Javadoc
rootJavadoc.setSource(subJavadoc.source)
rootJavadoc.source += subJavadoc.source
rootJavadoc.classpath += project.files(project.sourceSets.main.compileClasspath)
}
}
Expand Down
130 changes: 130 additions & 0 deletions dist/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@

apply plugin: 'es.hadoop.build'

description = "Elasticsearch for Apache Hadoop"
project.archivesBaseName = 'elasticsearch-hadoop'

configurations {
embedded {
canBeResolved = true
canBeConsumed = false
transitive = false
}
dist {
canBeResolved = true
canBeConsumed = false
attributes {
attribute(Usage.USAGE_ATTRIBUTE, objects.named(Usage, 'packaging'))
Copy link
Contributor

@mark-vieira mark-vieira Apr 14, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Brilliant. Seems we've finally nailed down these configuration attributes to work the way we want.

}
}
}

def distProjects = [":elasticsearch-hadoop-mr", ":elasticsearch-hadoop-hive", ":elasticsearch-hadoop-pig",
":elasticsearch-spark-20", ":elasticsearch-storm"]
distProjects.each { distProject ->
dependencies {
// This is only going to pull in each project's regular jar to create the project-wide uberjar.
embedded(project(distProject))
// To squash Javadoc warnings.
compileOnly(project(distProject))
// This will pull all java sources (including generated) for the project-wide javadoc.
javadocSources(project(distProject))
// This will pull all non-generated sources for the project-wide source jar.
additionalSources(project(distProject))
// This will pull in the regular jar, javadoc jar, and source jar to be packaged in the distribution.
dist(project(distProject))
}
}

dependencies {
// For Uber pom (and Javadoc to a lesser extent)
implementation("commons-logging:commons-logging:1.1.1")
implementation("commons-httpclient:commons-httpclient:3.0.1")
implementation("commons-codec:commons-codec:1.4")
implementation("javax.xml.bind:jaxb-api:2.3.1")
implementation("org.apache.hive:hive-service:$hiveVersion") {
exclude module: "log4j-slf4j-impl"
}
implementation("org.apache.hive:hive-exec:$hiveVersion")
implementation("org.apache.hive:hive-metastore:$hiveVersion")
implementation("org.apache.pig:pig:$pigVersion:h2")
implementation("org.apache.spark:spark-core_${project.ext.scala211MajorVersion}:$spark20Version") {
exclude group: 'javax.servlet'
exclude group: 'org.apache.hadoop'
}
implementation("org.apache.spark:spark-yarn_${project.ext.scala211MajorVersion}:$spark20Version") {
exclude group: 'org.apache.hadoop'
}
implementation("org.apache.spark:spark-sql_${project.ext.scala211MajorVersion}:$spark20Version") {
exclude group: 'org.apache.hadoop'
}
implementation("org.apache.spark:spark-streaming_${project.ext.scala211MajorVersion}:$spark20Version") {
exclude group: 'org.apache.hadoop'
}
implementation("org.scala-lang:scala-library:$scala211Version")
implementation("org.scala-lang:scala-reflect:$scala211Version")
implementation("org.apache.storm:storm-core:$stormVersion") {
exclude module: "log4j-slf4j-impl"
}
implementation(project.ext.hadoopClient)
implementation("org.apache.hadoop:hadoop-common:${project.ext.hadoopVersion}")
implementation("org.apache.hadoop:hadoop-mapreduce-client-core:${project.ext.hadoopVersion}")
implementation("org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}")
implementation("org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}")
implementation("joda-time:joda-time:$jodaVersion")
compileOnly("org.apache.spark:spark-catalyst_${project.ext.scala211MajorVersion}:$spark20Version")
}

// Configure uber jar
jar {
dependsOn(project.configurations.embedded)

manifest {
attributes['Implementation-Title'] = 'elasticsearch-hadoop'
}

from(project.configurations.embedded.collect { it.isDirectory() ? it : zipTree(it)}) {
include "org/elasticsearch/**"
include "esh-build.properties"
include "META-INF/services/*"
}

// Each integration will be copying it's entire jar contents into this master jar.
// There will be lots of duplicates since they all package up the core code inside of them.
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
}

javadoc {
options {
header = "elasticsearch-hadoop"
}
}

// Name of the directory under the root of the zip file that will contain the zip contents
String zipContentDir = "elasticsearch-hadoop-${project.version}"

// Create a zip task for creating the distribution
task('distZip', type: Zip) {
group = 'Distribution'
description = "Builds zip archive, containing all jars and docs, suitable for download page."

dependsOn(tasks.pack)

from(project.rootDir) {
include('README.md')
include('LICENSE.txt')
include('NOTICE.txt')
into(zipContentDir)
}

into("$zipContentDir/dist") {
from(project.configurations.dist)
from(tasks.jar)
from(tasks.javadocJar)
from(tasks.sourcesJar)
}
}

distribution {
dependsOn(distZip)
}
1 change: 1 addition & 0 deletions hive/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ dependencies {
}

additionalSources(project(":elasticsearch-hadoop-mr"))
javadocSources(project(":elasticsearch-hadoop-mr"))
}

jar {
Expand Down
1 change: 1 addition & 0 deletions pig/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ dependencies {
itestImplementation("dk.brics.automaton:automaton:1.11-8")

additionalSources(project(":elasticsearch-hadoop-mr"))
javadocSources(project(":elasticsearch-hadoop-mr"))
}

jar {
Expand Down
2 changes: 2 additions & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ project(":sql-20").name = "elasticsearch-spark-20"
include 'storm'
project(":storm").name = "elasticsearch-storm"

include 'dist'

include 'test'
include 'test:shared'
include 'test:fixtures'
Expand Down
17 changes: 16 additions & 1 deletion spark/sql-13/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,20 @@ sourceSets {
itest.resources.srcDirs += "$coreSrc/itest/resources"
}

def javaFilesOnly = { FileTreeElement spec ->
spec.file.name.endsWith('.java') || spec.isDirectory()
}

artifacts {
sourceElements(project.file("$coreSrc/main/scala"))
// Add java files from core source to javadocElements.
project.fileTree("$coreSrc/main/scala").include(javaFilesOnly).each {
javadocElements(it)
}
project.fileTree("src/main/scala").include(javaFilesOnly).each {
javadocElements(it)
}
}

// currently the outside project folders are transformed into linked resources however
// Gradle only supports one so the project will be invalid as not all sources will be in there
Expand Down Expand Up @@ -169,11 +183,12 @@ dependencies {
}

additionalSources(project(":elasticsearch-hadoop-mr"))
javadocSources(project(":elasticsearch-hadoop-mr"))
}

// Export generated Java code from the genjavadoc compiler plugin
artifacts {
sourceElements(project.file("$buildDir/generated/java")) {
javadocElements(project.file("$buildDir/generated/java")) {
builtBy compileScala
}
}
Expand Down
17 changes: 16 additions & 1 deletion spark/sql-20/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,20 @@ sourceSets {
itest.resources.srcDirs += "$coreSrc/itest/resources"
}

def javaFilesOnly = { FileTreeElement spec ->
spec.file.name.endsWith('.java') || spec.isDirectory()
}

artifacts {
sourceElements(project.file("$coreSrc/main/scala"))
// Add java files from core source to javadocElements.
project.fileTree("$coreSrc/main/scala").include(javaFilesOnly).each {
javadocElements(it)
}
project.fileTree("src/main/scala").include(javaFilesOnly).each {
javadocElements(it)
}
}

// currently the outside project folders are transformed into linked resources however
// Gradle only supports one so the project will be invalid as not all sources will be in there
Expand Down Expand Up @@ -165,11 +179,12 @@ dependencies {
}

additionalSources(project(":elasticsearch-hadoop-mr"))
javadocSources(project(":elasticsearch-hadoop-mr"))
}

// Export generated Java code from the genjavadoc compiler plugin
artifacts {
sourceElements(project.file("$buildDir/generated/java")) {
javadocElements(project.file("$buildDir/generated/java")) {
builtBy compileScala
}
}
Expand Down
1 change: 1 addition & 0 deletions storm/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ dependencies {
itestImplementation("com.twitter:carbonite:1.4.0")

additionalSources(project(":elasticsearch-hadoop-mr"))
javadocSources(project(":elasticsearch-hadoop-mr"))
}

jar {
Expand Down