Skip to content

Commit 9308a01

Browse files
authored
Backport java 17 build support to 7.17 (#1950)
This commit adds support for building on java 17, while leaving support for building java 11 untouched.
1 parent 5d27e61 commit 9308a01

File tree

8 files changed

+54
-8
lines changed

8 files changed

+54
-8
lines changed

.ci/java-versions.properties

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,5 @@
66
# build and test Elasticsearch for this branch. Valid Java versions
77
# are 'java' or 'openjdk' followed by the major release number.
88

9-
ESH_BUILD_JAVA=openjdk14
10-
ESH_RUNTIME_JAVA=java11
9+
ESH_BUILD_JAVA=openjdk17
10+
ESH_RUNTIME_JAVA=openjdk17

hive/build.gradle

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,13 @@ dependencies {
5858
javadocSources(project(":elasticsearch-hadoop-mr"))
5959
}
6060

61+
tasks.named("test").configure {
62+
if (JavaVersion.current() >= JavaVersion.VERSION_16) {
63+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
64+
jvmArgs "--add-opens=java.base/java.net=ALL-UNNAMED" // Needed for org.apache.hadoop.hive.common.StringInternUtils
65+
}
66+
}
67+
6168
jar {
6269
dependsOn(project.configurations.embedded)
6370
from(project.configurations.embedded.collect { it.isDirectory() ? it : zipTree(it)}) {

pig/build.gradle

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,12 @@ dependencies {
4747
javadocSources(project(":elasticsearch-hadoop-mr"))
4848
}
4949

50+
tasks.named("test").configure {
51+
if (JavaVersion.current() >= JavaVersion.VERSION_16) {
52+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
53+
}
54+
}
55+
5056
jar {
5157
dependsOn(project.configurations.embedded)
5258
from(project.configurations.embedded.collect { it.isDirectory() ? it : zipTree(it)}) {

spark/core/build.gradle

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ sparkVariants {
2626
project.getTasks().getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
2727
configure(compileScala.scalaCompileOptions.forkOptions) {
2828
memoryMaximumSize = '1g'
29-
jvmArgs = ['-XX:MaxPermSize=512m']
3029
}
3130
compileScala.scalaCompileOptions.additionalParameters = [
3231
"-feature",
@@ -144,8 +143,14 @@ configurations.matching{ it.name.contains('CompilerPlugin') == false }.all { Con
144143
conf.exclude group: "org.mortbay.jetty"
145144
}
146145

147-
tasks.named("test").configure {
148-
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
146+
if (JavaVersion.current() >= JavaVersion.VERSION_17) {
147+
tasks.withType(Test) { Test task ->
148+
if (task.getName().startsWith("test"))
149+
task.configure {
150+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
151+
jvmArgs "--add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer
152+
}
153+
}
149154
}
150155

151156
// Set minimum compatibility and java home for compiler task

spark/sql-13/build.gradle

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ sparkVariants {
3636
project.getTasks().getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
3737
configure(compileScala.scalaCompileOptions.forkOptions) {
3838
memoryMaximumSize = '1g'
39-
jvmArgs = ['-XX:MaxPermSize=512m']
4039
}
4140
compileScala.scalaCompileOptions.additionalParameters = [
4241
"-feature",
@@ -176,6 +175,14 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
176175
task.options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').absolutePath
177176
}
178177

178+
if (JavaVersion.current() >= JavaVersion.VERSION_16) {
179+
tasks.withType(Test) { Test task ->
180+
if (task.getName().startsWith("test"))
181+
task.configure {
182+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
183+
}}
184+
}
185+
179186
// Embed the embedded dependencies in the final jar after all configuration is complete
180187
sparkVariants {
181188
all { SparkVariantPlugin.SparkVariant variant ->

spark/sql-20/build.gradle

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ sparkVariants {
3232
project.getTasks().getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
3333
configure(compileScala.scalaCompileOptions.forkOptions) {
3434
memoryMaximumSize = '1g'
35-
jvmArgs = ['-XX:MaxPermSize=512m']
3635
}
3736
compileScala.scalaCompileOptions.additionalParameters = [
3837
"-feature",
@@ -196,6 +195,14 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
196195
task.options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').absolutePath
197196
}
198197

198+
if (JavaVersion.current() >= JavaVersion.VERSION_16) {
199+
tasks.withType(Test) { Test task ->
200+
if (task.getName().startsWith("test"))
201+
task.configure {
202+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
203+
}}
204+
}
205+
199206
// Embed the embedded dependencies in the final jar after all configuration is complete
200207
sparkVariants {
201208
all { SparkVariantPlugin.SparkVariant variant ->

spark/sql-30/build.gradle

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ sparkVariants {
3030
project.getTasks().getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
3131
configure(compileScala.scalaCompileOptions.forkOptions) {
3232
memoryMaximumSize = '1g'
33-
jvmArgs = ['-XX:MaxPermSize=512m']
3433
}
3534
compileScala.scalaCompileOptions.additionalParameters = [
3635
"-feature",
@@ -179,6 +178,15 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
179178
task.options.forkOptions.executable = new File(project.ext.runtimeJavaHome, 'bin/java').absolutePath
180179
}
181180

181+
if (JavaVersion.current() >= JavaVersion.VERSION_16) {
182+
tasks.withType(Test) { Test task ->
183+
if (task.getName().startsWith("test"))
184+
task.configure {
185+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
186+
jvmArgs "--add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer
187+
}}
188+
}
189+
182190
// Embed the embedded dependencies in the final jar after all configuration is complete
183191
sparkVariants {
184192
all { SparkVariantPlugin.SparkVariant variant ->

test/shared/build.gradle

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,12 @@ configurations {
5858
}
5959
}
6060

61+
tasks.named("test").configure {
62+
if (JavaVersion.current() >= JavaVersion.VERSION_16) {
63+
jvmArgs "--add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
64+
}
65+
}
66+
6167
dependencies {
6268
api(project(':elasticsearch-hadoop-mr'))
6369
api(project.ext.hadoopClient) {

0 commit comments

Comments
 (0)