Skip to content

Remove deprecated Gradle configurations #1427

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,7 @@ class BuildPlugin implements Plugin<Project> {
}

project.configurations.compile.dependencies.all(disableTransitiveDeps)
project.configurations.implementation.dependencies.all(disableTransitiveDeps)
project.configurations.provided.dependencies.all(disableTransitiveDeps)
project.configurations.optional.dependencies.all(disableTransitiveDeps)
project.configurations.compileOnly.dependencies.all(disableTransitiveDeps)
Expand All @@ -289,31 +290,31 @@ class BuildPlugin implements Plugin<Project> {

// Detail all common dependencies
project.dependencies {
testCompile "junit:junit:${project.ext.junitVersion}"
testCompile "org.hamcrest:hamcrest-all:${project.ext.hamcrestVersion}"
testImplementation("junit:junit:${project.ext.junitVersion}")
testImplementation("org.hamcrest:hamcrest-all:${project.ext.hamcrestVersion}")

testCompile "joda-time:joda-time:2.8"
testImplementation("joda-time:joda-time:2.8")

testRuntime "org.slf4j:slf4j-log4j12:1.7.6"
testRuntime "org.apache.logging.log4j:log4j-api:${project.ext.log4jVersion}"
testRuntime "org.apache.logging.log4j:log4j-core:${project.ext.log4jVersion}"
testRuntime "org.apache.logging.log4j:log4j-1.2-api:${project.ext.log4jVersion}"
testRuntime "net.java.dev.jna:jna:4.2.2"
testCompile "org.codehaus.groovy:groovy:${project.ext.groovyVersion}:indy"
testRuntime "org.locationtech.spatial4j:spatial4j:0.6"
testRuntime "com.vividsolutions:jts:1.13"
testImplementation("org.slf4j:slf4j-log4j12:1.7.6")
testImplementation("org.apache.logging.log4j:log4j-api:${project.ext.log4jVersion}")
testImplementation("org.apache.logging.log4j:log4j-core:${project.ext.log4jVersion}")
testImplementation("org.apache.logging.log4j:log4j-1.2-api:${project.ext.log4jVersion}")
testImplementation("net.java.dev.jna:jna:4.2.2")
testImplementation("org.codehaus.groovy:groovy:${project.ext.groovyVersion}:indy")
testImplementation("org.locationtech.spatial4j:spatial4j:0.6")
testImplementation("com.vividsolutions:jts:1.13")

// TODO: Remove when we merge ITests to test dirs
itestCompile("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") {
// For some reason, the dependencies that are pulled in with MiniKDC have multiple resource files
// that cause issues when they are loaded. We exclude the ldap schema data jar to get around this.
exclude group: "org.apache.directory.api", module: "api-ldap-schema-data"
}
itestCompile project.sourceSets.main.output
itestCompile project.configurations.testCompile
itestCompile project.configurations.provided
itestCompile project.sourceSets.test.output
itestRuntime project.configurations.testRuntime
itestImplementation(project.sourceSets.main.output)
itestImplementation(project.configurations.testImplementation)
itestImplementation(project.configurations.provided)
itestImplementation(project.sourceSets.test.output)
itestImplementation(project.configurations.testRuntimeClasspath)
}

// Deal with the messy conflicts out there
Expand Down Expand Up @@ -575,18 +576,6 @@ class BuildPlugin implements Plugin<Project> {
connection = 'scm:git:git://github.com/elastic/elasticsearch-hadoop'
developerConnection = 'scm:git:git://github.com/elastic/elasticsearch-hadoop'
}
developers {
developer {
id = 'jbaiera'
name = 'James Baiera'
email = '[email protected]'
}
developer {
id = 'costin'
name = 'Costin Leau'
email = '[email protected]'
}
}
}

groupId = "org.elasticsearch"
Expand Down Expand Up @@ -757,7 +746,7 @@ class BuildPlugin implements Plugin<Project> {

if (!project.path.startsWith(":qa")) {
TaskProvider<DependencyLicensesTask> dependencyLicenses = project.tasks.register('dependencyLicenses', DependencyLicensesTask.class) {
dependencies = project.configurations.runtime.fileCollection {
dependencies = project.configurations.runtimeClasspath.fileCollection {
!(it instanceof ProjectDependency)
}
mapping from: /hadoop-.*/, to: 'hadoop'
Expand All @@ -780,7 +769,7 @@ class BuildPlugin implements Plugin<Project> {
private static void configureDependenciesInfo(Project project) {
if (!project.path.startsWith(":qa")) {
project.tasks.register("dependenciesInfo", DependenciesInfoTask) { DependenciesInfoTask task ->
task.runtimeConfiguration = project.configurations.getByName(JavaPlugin.RUNTIME_CONFIGURATION_NAME)
task.runtimeConfiguration = project.configurations.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)
task.compileOnlyConfiguration = project.configurations.getByName(JavaPlugin.COMPILE_ONLY_CONFIGURATION_NAME)
// Create a property called mappings that points to the same mappings in the dependency licenses task.
task.getConventionMapping().map('mappings') {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class IntegrationBuildPlugin implements Plugin<Project> {
// If this becomes a problem, we could see if there's a way to listen for new dependencies and add them
// to root at the same time.
project.afterEvaluate {
project.getConfigurations().getByName('compile').getAllDependencies()
project.getConfigurations().getByName('implementation').getAllDependencies()
.withType(ExternalDependency.class)
.each { Dependency dependency ->
// Convert the scope to optional on the root project - it will have every integration in it, and
Expand Down
8 changes: 4 additions & 4 deletions hive/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ dependencies {
provided(project(":elasticsearch-hadoop-mr"))
provided(project(path: ":elasticsearch-hadoop-mr", configuration:"compile"))

testCompile project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath
itestCompile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath
testImplementation(project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath)
itestImplementation(project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath)
}

jar {
Expand All @@ -37,10 +37,10 @@ dependencies {
provided("org.apache.hive:hive-exec:$hiveVersion")
provided("org.apache.hive:hive-metastore:$hiveVersion")

itestRuntime("org.apache.hive:hive-service:$hiveVersion") {
itestImplementation("org.apache.hive:hive-service:$hiveVersion") {
exclude module: "log4j-slf4j-impl"
}
itestRuntime("org.apache.hive:hive-jdbc:$hiveVersion") {
itestImplementation("org.apache.hive:hive-jdbc:$hiveVersion") {
exclude module: "log4j-slf4j-impl"
}
}
6 changes: 3 additions & 3 deletions mr/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ dependencies {
provided("org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}")
provided("org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}")

testCompile(project.ext.hadoopClient)
testCompile "io.netty:netty-all:4.0.29.Final"
testCompile "org.elasticsearch:securemock:1.2"
testImplementation(project.ext.hadoopClient)
testImplementation("io.netty:netty-all:4.0.29.Final")
testImplementation("org.elasticsearch:securemock:1.2")
}

String generatedResources = "$buildDir/generated-resources/main"
Expand Down
14 changes: 7 additions & 7 deletions pig/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ dependencies {
provided(project(":elasticsearch-hadoop-mr"))
provided(project(path: ":elasticsearch-hadoop-mr", configuration:"compile"))

testCompile project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath
itestCompile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath
testImplementation(project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath)
itestImplementation(project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath)
}

jar {
Expand All @@ -36,11 +36,11 @@ dependencies {
provided("org.apache.pig:pig:$pigVersion:$pigClassifier")
provided("joda-time:joda-time:$jodaVersion")

testRuntime("org.apache.pig:pig:$pigVersion:$pigClassifier")
testRuntime("joda-time:joda-time:$jodaVersion")
testImplementation("org.apache.pig:pig:$pigVersion:$pigClassifier")
testImplementation("joda-time:joda-time:$jodaVersion")

testRuntime "com.google.guava:guava:11.0"
testRuntime "jline:jline:0.9.94"
testImplementation("com.google.guava:guava:11.0")
testImplementation("jline:jline:0.9.94")

itestRuntime "dk.brics.automaton:automaton:1.11-8"
itestImplementation("dk.brics.automaton:automaton:1.11-8")
}
17 changes: 9 additions & 8 deletions qa/kerberos/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -48,22 +48,23 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
boolean localRepo = project.getProperties().containsKey("localRepo")

dependencies {
compile project(":elasticsearch-hadoop-mr")
compile project(":elasticsearch-storm")
implementation(project(":elasticsearch-hadoop-mr"))
implementation(project(":elasticsearch-storm"))

compile 'org.scala-lang:scala-library:2.11.8'
compile project(":elasticsearch-spark-20")
implementation('org.scala-lang:scala-library:2.11.12')
implementation('org.scala-lang:scala-reflect:2.11.12')
implementation(project(":elasticsearch-spark-20"))

compileOnly("com.fasterxml.jackson.module:jackson-module-scala_2.11:2.6.7.1")
compileOnly("com.fasterxml.jackson.core:jackson-annotations:2.6.7")
compileOnly("org.json4s:json4s-jackson_2.11:3.2.11")
compileOnly("org.slf4j:slf4j-api:1.7.6")

compile("org.apache.hadoop:hadoop-client:${HadoopClusterConfiguration.HADOOP.defaultVersion()}")
compile("org.apache.spark:spark-sql_2.11:$project.ext.spark20Version")
implementation("org.apache.hadoop:hadoop-client:${HadoopClusterConfiguration.HADOOP.defaultVersion()}")
implementation("org.apache.spark:spark-sql_2.11:$project.ext.spark20Version")

compile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath
compile project(":elasticsearch-storm").sourceSets.itest.runtimeClasspath
implementation(project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath)
implementation(project(":elasticsearch-storm").sourceSets.itest.runtimeClasspath)

kdcFixture project(':test:fixtures:minikdc')

Expand Down
26 changes: 16 additions & 10 deletions spark/sql-13/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ description = "Elasticsearch Spark (for Spark 1.3-1.6)"

evaluationDependsOn(':elasticsearch-hadoop-mr')

apply plugin: 'java-library'
apply plugin: 'scala'
apply plugin: 'es.hadoop.build'
apply plugin: 'scala.variants'
Expand Down Expand Up @@ -88,10 +89,9 @@ eclipse {

dependencies {
provided(project(":elasticsearch-hadoop-mr"))
provided(project(path: ":elasticsearch-hadoop-mr", configuration:"compile"))

compile("org.scala-lang:scala-library:${project.ext.scalaVersion}")
compile("org.scala-lang:scala-reflect:${project.ext.scalaVersion}")
api("org.scala-lang:scala-library:${project.ext.scalaVersion}")
api("org.scala-lang:scala-reflect:${project.ext.scalaVersion}")

provided("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'javax.servlet'
Expand Down Expand Up @@ -122,17 +122,17 @@ dependencies {
exclude group: 'org.apache.hadoop'
}

testCompile project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath
itestCompile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath
testImplementation(project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath)
itestImplementation(project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath)

testCompile("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") {
testImplementation("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'javax.servlet'
exclude group: 'org.apache.hadoop'
}
itestCompile("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") {
itestImplementation("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}
testCompile("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") {
testImplementation("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}
}
Expand Down Expand Up @@ -171,8 +171,14 @@ configurations.all { Configuration conf ->
}
}
}

conf.exclude group: "org.mortbay.jetty"
}
configurations {
testImplementation {
exclude group: "org.mortbay.jetty"
}
itestImplementation {
exclude group: "org.mortbay.jetty"
}
}

tasks.withType(ScalaCompile) {
Expand Down
22 changes: 11 additions & 11 deletions spark/sql-20/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ description = "Elasticsearch Spark (for Spark 2.X)"

evaluationDependsOn(':elasticsearch-hadoop-mr')

apply plugin: 'java-library'
apply plugin: 'scala'
apply plugin: 'es.hadoop.build.integration'
apply plugin: 'scala.variants'
Expand Down Expand Up @@ -93,10 +94,9 @@ eclipse {

dependencies {
provided(project(":elasticsearch-hadoop-mr"))
provided(project(path: ":elasticsearch-hadoop-mr", configuration:"compile"))

compile("org.scala-lang:scala-library:$scalaVersion")
compile("org.scala-lang:scala-reflect:$scalaVersion")
api("org.scala-lang:scala-library:$scalaVersion")
api("org.scala-lang:scala-reflect:$scalaVersion")

provided("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'javax.servlet'
Expand Down Expand Up @@ -130,23 +130,23 @@ dependencies {
exclude group: 'org.apache.hadoop'
}

testCompile "org.elasticsearch:securemock:1.2"
testCompile(project(":elasticsearch-hadoop-mr").sourceSets.test.output)
testImplementation("org.elasticsearch:securemock:1.2")
testImplementation(project(":elasticsearch-hadoop-mr").sourceSets.test.output)

itestCompile(project(":elasticsearch-hadoop-mr").sourceSets.itest.output)
itestImplementation(project(":elasticsearch-hadoop-mr").sourceSets.itest.output)

testCompile(project.ext.hadoopClient)
testCompile("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") {
testImplementation(project.ext.hadoopClient)
testImplementation("org.apache.spark:spark-core_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'javax.servlet'
exclude group: 'org.apache.hadoop'
}
itestCompile("org.apache.spark:spark-yarn_${project.ext.scalaMajorVersion}:$sparkVersion") {
itestImplementation("org.apache.spark:spark-yarn_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}
testCompile("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") {
testImplementation("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}
itestCompile("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") {
itestImplementation("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}
}
Expand Down
9 changes: 4 additions & 5 deletions storm/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,9 @@ evaluationDependsOn(':elasticsearch-hadoop-mr')

dependencies {
provided(project(":elasticsearch-hadoop-mr"))
provided(project(path: ":elasticsearch-hadoop-mr", configuration:"compile"))

testCompile project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath
itestCompile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath
testImplementation(project(":elasticsearch-hadoop-mr").sourceSets.test.runtimeClasspath)
itestImplementation(project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath)
}

jar {
Expand All @@ -35,8 +34,8 @@ dependencies {
exclude module: "log4j-slf4j-impl"
}

itestCompile "com.google.guava:guava:16.0.1"
itestRuntime "com.twitter:carbonite:1.4.0"
itestImplementation("com.google.guava:guava:16.0.1")
itestImplementation("com.twitter:carbonite:1.4.0")
}

// add itest to Eclipse
Expand Down
2 changes: 1 addition & 1 deletion test/fixtures/minikdc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ repositories {
}

dependencies {
compile("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") {
implementation("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") {
// For some reason, the dependencies that are pulled in with MiniKDC have multiple resource files
// that cause issues when they are loaded. We exclude the ldap schema data jar to get around this.
exclude group: "org.apache.directory.api", module: "api-ldap-schema-data"
Expand Down