Skip to content

Commit d596cc8

Browse files
authored
Upgrade to Gradle 6.4 (#1471)
Update build scripts for Gradle 6.4
1 parent 796286b commit d596cc8

File tree

6 files changed

+55
-22
lines changed

6 files changed

+55
-22
lines changed

gradle/wrapper/gradle-wrapper.jar

-8 Bytes
Binary file not shown.
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
distributionBase=GRADLE_USER_HOME
22
distributionPath=wrapper/dists
3-
distributionUrl=https\://services.gradle.org/distributions/gradle-6.3-all.zip
3+
distributionUrl=https\://services.gradle.org/distributions/gradle-6.4-all.zip
44
zipStoreBase=GRADLE_USER_HOME
55
zipStorePath=wrapper/dists

gradlew.bat

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,9 @@ if "%DIRNAME%" == "" set DIRNAME=.
2929
set APP_BASE_NAME=%~n0
3030
set APP_HOME=%DIRNAME%
3131

32+
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
33+
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34+
3235
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
3336
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
3437

qa/kerberos/build.gradle

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
* under the License.
1818
*/
1919

20-
2120
import org.elasticsearch.gradle.test.AntFixture
2221
import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask
2322
import org.elasticsearch.hadoop.gradle.fixture.hadoop.HadoopFixturePlugin
@@ -48,6 +47,21 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
4847

4948
boolean localRepo = project.getProperties().containsKey("localRepo")
5049

50+
// Gradle's java library plugin adds a variant to each project that offers the classes dir as an artifact that can be
51+
// used in other projects instead of requiring a jar operation to happen. Kerberos depends on the spark integration to
52+
// compile some code, but due to how the current spark compilation works it's likely that the classes directory in the
53+
// spark projects do not exist anymore when the kerberos compilation happens during a distribution build.
54+
// TODO: Clean this up when we get to variants, they should persist their classes dirs in a sane way
55+
configurations {
56+
compileClasspath {
57+
beforeLocking {
58+
attributes {
59+
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, LibraryElements.JAR))
60+
}
61+
}
62+
}
63+
}
64+
5165
dependencies {
5266
implementation(project(":elasticsearch-hadoop-mr"))
5367
implementation(project(":elasticsearch-storm"))

spark/sql-13/build.gradle

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ apply plugin: 'scala.variants'
1010

1111
variants {
1212
defaultVersion '2.11.12'
13-
targetVersions '2.11.12'
13+
targetVersions '2.10.7', '2.11.12'
1414
}
1515

1616
println "Compiled using Scala ${project.ext.scalaMajorVersion} [${project.ext.scalaVersion}]"
@@ -24,9 +24,11 @@ configurations {
2424
implementation {
2525
extendsFrom project.configurations.embedded
2626
}
27-
scalaCompilerPlugin {
28-
defaultDependencies { dependencies ->
29-
dependencies.add(project.dependencies.create( "com.typesafe.genjavadoc:genjavadoc-plugin_${scalaVersion}:0.13"))
27+
if (project.ext.scalaMajorVersion != '2.10') {
28+
scalaCompilerPlugin {
29+
defaultDependencies { dependencies ->
30+
dependencies.add(project.dependencies.create("com.typesafe.genjavadoc:genjavadoc-plugin_${scalaVersion}:0.13"))
31+
}
3032
}
3133
}
3234
testImplementation {
@@ -56,6 +58,7 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
5658
}
5759

5860
compileScala {
61+
options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').canonicalPath
5962
configure(scalaCompileOptions.forkOptions) {
6063
memoryMaximumSize = '1g'
6164
jvmArgs = ['-XX:MaxPermSize=512m']
@@ -146,6 +149,9 @@ dependencies {
146149
implementation("com.google.code.findbugs:jsr305:2.0.1")
147150
implementation("org.json4s:json4s-ast_2.10:3.2.10")
148151
implementation("com.esotericsoftware.kryo:kryo:2.21")
152+
compileOnly("org.apache.hadoop:hadoop-annotations:${project.ext.hadoopVersion}")
153+
compileOnly("org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}")
154+
compileOnly("org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}")
149155
}
150156

151157
testImplementation(project(":test:shared"))
@@ -190,11 +196,13 @@ scaladoc {
190196
title = "${rootProject.description} ${version} API"
191197
}
192198

193-
tasks.withType(ScalaCompile) {
194-
scalaCompileOptions.with {
195-
additionalParameters = [
196-
"-Xplugin:" + configurations.scalaCompilerPlugin.asPath,
197-
"-P:genjavadoc:out=$buildDir/generated/java".toString()
198-
]
199+
if (project.ext.scalaMajorVersion != '2.10') {
200+
tasks.withType(ScalaCompile) {
201+
scalaCompileOptions.with {
202+
additionalParameters = [
203+
"-Xplugin:" + configurations.scalaCompilerPlugin.asPath,
204+
"-P:genjavadoc:out=$buildDir/generated/java".toString()
205+
]
206+
}
199207
}
200208
}

spark/sql-20/build.gradle

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ apply plugin: 'scala.variants'
1010

1111
variants {
1212
defaultVersion '2.11.12'
13-
targetVersions '2.11.12'
13+
targetVersions '2.10.7', '2.11.12'
1414
}
1515

1616
configurations {
@@ -21,9 +21,11 @@ configurations {
2121
implementation {
2222
extendsFrom project.configurations.embedded
2323
}
24-
scalaCompilerPlugin {
25-
defaultDependencies { dependencies ->
26-
dependencies.add(project.dependencies.create( "com.typesafe.genjavadoc:genjavadoc-plugin_${scalaVersion}:0.13"))
24+
if (project.ext.scalaMajorVersion != '2.10') {
25+
scalaCompilerPlugin {
26+
defaultDependencies { dependencies ->
27+
dependencies.add(project.dependencies.create( "com.typesafe.genjavadoc:genjavadoc-plugin_${scalaVersion}:0.13"))
28+
}
2729
}
2830
}
2931
}
@@ -42,6 +44,7 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
4244
}
4345

4446
compileScala {
47+
options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').canonicalPath
4548
configure(scalaCompileOptions.forkOptions) {
4649
memoryMaximumSize = '1g'
4750
jvmArgs = ['-XX:MaxPermSize=512m']
@@ -137,6 +140,9 @@ dependencies {
137140
implementation("com.google.code.findbugs:jsr305:2.0.1")
138141
implementation("org.json4s:json4s-ast_2.10:3.2.10")
139142
implementation("com.esotericsoftware.kryo:kryo:2.21")
143+
compileOnly("org.apache.hadoop:hadoop-annotations:${project.ext.hadoopVersion}")
144+
compileOnly("org.codehaus.jackson:jackson-core-asl:${project.ext.jacksonVersion}")
145+
compileOnly("org.codehaus.jackson:jackson-mapper-asl:${project.ext.jacksonVersion}")
140146
}
141147

142148
testImplementation(project(":test:shared"))
@@ -186,11 +192,13 @@ scaladoc {
186192
title = "${rootProject.description} ${version} API"
187193
}
188194

189-
tasks.withType(ScalaCompile) {
190-
scalaCompileOptions.with {
191-
additionalParameters = [
192-
"-Xplugin:" + configurations.scalaCompilerPlugin.asPath,
193-
"-P:genjavadoc:out=$buildDir/generated/java".toString()
194-
]
195+
if (project.ext.scalaMajorVersion != '2.10') {
196+
tasks.withType(ScalaCompile) {
197+
scalaCompileOptions.with {
198+
additionalParameters = [
199+
"-Xplugin:" + configurations.scalaCompilerPlugin.asPath,
200+
"-P:genjavadoc:out=$buildDir/generated/java".toString()
201+
]
202+
}
195203
}
196204
}

0 commit comments

Comments
 (0)