File tree Expand file tree Collapse file tree 8 files changed +54
-8
lines changed Expand file tree Collapse file tree 8 files changed +54
-8
lines changed Original file line number Diff line number Diff line change 6
6
# build and test Elasticsearch for this branch. Valid Java versions
7
7
# are 'java' or 'openjdk' followed by the major release number.
8
8
9
- ESH_BUILD_JAVA =openjdk14
10
- ESH_RUNTIME_JAVA =java11
9
+ ESH_BUILD_JAVA =openjdk17
10
+ ESH_RUNTIME_JAVA =openjdk17
Original file line number Diff line number Diff line change @@ -58,6 +58,13 @@ dependencies {
58
58
javadocSources(project(" :elasticsearch-hadoop-mr" ))
59
59
}
60
60
61
+ tasks. named(" test" ). configure {
62
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
63
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
64
+ jvmArgs " --add-opens=java.base/java.net=ALL-UNNAMED" // Needed for org.apache.hadoop.hive.common.StringInternUtils
65
+ }
66
+ }
67
+
61
68
jar {
62
69
dependsOn(project. configurations. embedded)
63
70
from(project. configurations. embedded. collect { it. isDirectory() ? it : zipTree(it)}) {
Original file line number Diff line number Diff line change @@ -47,6 +47,12 @@ dependencies {
47
47
javadocSources(project(" :elasticsearch-hadoop-mr" ))
48
48
}
49
49
50
+ tasks. named(" test" ). configure {
51
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
52
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
53
+ }
54
+ }
55
+
50
56
jar {
51
57
dependsOn(project. configurations. embedded)
52
58
from(project. configurations. embedded. collect { it. isDirectory() ? it : zipTree(it)}) {
Original file line number Diff line number Diff line change @@ -26,7 +26,6 @@ sparkVariants {
26
26
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
27
27
configure(compileScala. scalaCompileOptions. forkOptions) {
28
28
memoryMaximumSize = ' 1g'
29
- jvmArgs = [' -XX:MaxPermSize=512m' ]
30
29
}
31
30
compileScala. scalaCompileOptions. additionalParameters = [
32
31
" -feature" ,
@@ -144,8 +143,14 @@ configurations.matching{ it.name.contains('CompilerPlugin') == false }.all { Con
144
143
conf. exclude group : " org.mortbay.jetty"
145
144
}
146
145
147
- tasks. named(" test" ). configure {
148
- jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
146
+ if (JavaVersion . current() >= JavaVersion . VERSION_17 ) {
147
+ tasks. withType(Test ) { Test task ->
148
+ if (task. getName(). startsWith(" test" ))
149
+ task. configure {
150
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
151
+ jvmArgs " --add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer
152
+ }
153
+ }
149
154
}
150
155
151
156
// Set minimum compatibility and java home for compiler task
Original file line number Diff line number Diff line change @@ -36,7 +36,6 @@ sparkVariants {
36
36
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
37
37
configure(compileScala. scalaCompileOptions. forkOptions) {
38
38
memoryMaximumSize = ' 1g'
39
- jvmArgs = [' -XX:MaxPermSize=512m' ]
40
39
}
41
40
compileScala. scalaCompileOptions. additionalParameters = [
42
41
" -feature" ,
@@ -176,6 +175,14 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
176
175
task. options. forkOptions. executable = new File (project. ext. runtimeJavaHome, ' bin/java' ). absolutePath
177
176
}
178
177
178
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
179
+ tasks. withType(Test ) { Test task ->
180
+ if (task. getName(). startsWith(" test" ))
181
+ task. configure {
182
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
183
+ }}
184
+ }
185
+
179
186
// Embed the embedded dependencies in the final jar after all configuration is complete
180
187
sparkVariants {
181
188
all { SparkVariantPlugin.SparkVariant variant ->
Original file line number Diff line number Diff line change @@ -32,7 +32,6 @@ sparkVariants {
32
32
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
33
33
configure(compileScala. scalaCompileOptions. forkOptions) {
34
34
memoryMaximumSize = ' 1g'
35
- jvmArgs = [' -XX:MaxPermSize=512m' ]
36
35
}
37
36
compileScala. scalaCompileOptions. additionalParameters = [
38
37
" -feature" ,
@@ -196,6 +195,14 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
196
195
task. options. forkOptions. executable = new File (project. ext. runtimeJavaHome, ' bin/java' ). absolutePath
197
196
}
198
197
198
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
199
+ tasks. withType(Test ) { Test task ->
200
+ if (task. getName(). startsWith(" test" ))
201
+ task. configure {
202
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
203
+ }}
204
+ }
205
+
199
206
// Embed the embedded dependencies in the final jar after all configuration is complete
200
207
sparkVariants {
201
208
all { SparkVariantPlugin.SparkVariant variant ->
Original file line number Diff line number Diff line change @@ -30,7 +30,6 @@ sparkVariants {
30
30
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
31
31
configure(compileScala. scalaCompileOptions. forkOptions) {
32
32
memoryMaximumSize = ' 1g'
33
- jvmArgs = [' -XX:MaxPermSize=512m' ]
34
33
}
35
34
compileScala. scalaCompileOptions. additionalParameters = [
36
35
" -feature" ,
@@ -179,6 +178,15 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
179
178
task. options. forkOptions. executable = new File (project. ext. runtimeJavaHome, ' bin/java' ). absolutePath
180
179
}
181
180
181
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
182
+ tasks. withType(Test ) { Test task ->
183
+ if (task. getName(). startsWith(" test" ))
184
+ task. configure {
185
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
186
+ jvmArgs " --add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer
187
+ }}
188
+ }
189
+
182
190
// Embed the embedded dependencies in the final jar after all configuration is complete
183
191
sparkVariants {
184
192
all { SparkVariantPlugin.SparkVariant variant ->
Original file line number Diff line number Diff line change @@ -58,6 +58,12 @@ configurations {
58
58
}
59
59
}
60
60
61
+ tasks. named(" test" ). configure {
62
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
63
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
64
+ }
65
+ }
66
+
61
67
dependencies {
62
68
api(project(' :elasticsearch-hadoop-mr' ))
63
69
api(project. ext. hadoopClient) {
You can’t perform that action at this time.
0 commit comments