File tree Expand file tree Collapse file tree 7 files changed +49
-6
lines changed Expand file tree Collapse file tree 7 files changed +49
-6
lines changed Original file line number Diff line number Diff line change @@ -58,6 +58,12 @@ dependencies {
58
58
javadocSources(project(" :elasticsearch-hadoop-mr" ))
59
59
}
60
60
61
+ tasks. named(" test" ). configure {
62
+ onlyIf { JavaVersion . current() >= JavaVersion . VERSION_16 }
63
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
64
+ jvmArgs " --add-opens=java.base/java.net=ALL-UNNAMED" // Needed for org.apache.hadoop.hive.common.StringInternUtils
65
+ }
66
+
61
67
jar {
62
68
dependsOn(project. configurations. embedded)
63
69
from(project. configurations. embedded. collect { it. isDirectory() ? it : zipTree(it)}) {
Original file line number Diff line number Diff line change @@ -47,6 +47,11 @@ dependencies {
47
47
javadocSources(project(" :elasticsearch-hadoop-mr" ))
48
48
}
49
49
50
+ tasks. named(" test" ). configure {
51
+ onlyIf { JavaVersion . current() >= JavaVersion . VERSION_16 }
52
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
53
+ }
54
+
50
55
jar {
51
56
dependsOn(project. configurations. embedded)
52
57
from(project. configurations. embedded. collect { it. isDirectory() ? it : zipTree(it)}) {
Original file line number Diff line number Diff line change @@ -26,7 +26,6 @@ sparkVariants {
26
26
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
27
27
configure(compileScala. scalaCompileOptions. forkOptions) {
28
28
memoryMaximumSize = ' 1g'
29
- jvmArgs = [' -XX:MaxPermSize=512m' ]
30
29
}
31
30
compileScala. scalaCompileOptions. additionalParameters = [
32
31
" -feature" ,
@@ -143,8 +142,14 @@ configurations.matching{ it.name.contains('CompilerPlugin') == false }.all { Con
143
142
conf. exclude group : " org.mortbay.jetty"
144
143
}
145
144
146
- tasks. named(" test" ). configure {
147
- jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
145
+ if (JavaVersion . current() >= JavaVersion . VERSION_17 ) {
146
+ tasks. withType(Test ) { Test task ->
147
+ if (task. getName(). startsWith(" test" ))
148
+ task. configure {
149
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
150
+ jvmArgs " --add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer
151
+ }
152
+ }
148
153
}
149
154
150
155
// Set minimum compatibility and java home for compiler task
Original file line number Diff line number Diff line change @@ -36,7 +36,6 @@ sparkVariants {
36
36
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
37
37
configure(compileScala. scalaCompileOptions. forkOptions) {
38
38
memoryMaximumSize = ' 1g'
39
- jvmArgs = [' -XX:MaxPermSize=512m' ]
40
39
}
41
40
compileScala. scalaCompileOptions. additionalParameters = [
42
41
" -feature" ,
@@ -174,6 +173,14 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
174
173
task. options. forkOptions. executable = new File (project. ext. runtimeJavaHome, ' bin/java' ). absolutePath
175
174
}
176
175
176
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
177
+ tasks. withType(Test ) { Test task ->
178
+ if (task. getName(). startsWith(" test" ))
179
+ task. configure {
180
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
181
+ }}
182
+ }
183
+
177
184
// Embed the embedded dependencies in the final jar after all configuration is complete
178
185
sparkVariants {
179
186
all { SparkVariantPlugin.SparkVariant variant ->
Original file line number Diff line number Diff line change @@ -32,7 +32,6 @@ sparkVariants {
32
32
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
33
33
configure(compileScala. scalaCompileOptions. forkOptions) {
34
34
memoryMaximumSize = ' 1g'
35
- jvmArgs = [' -XX:MaxPermSize=512m' ]
36
35
}
37
36
compileScala. scalaCompileOptions. additionalParameters = [
38
37
" -feature" ,
@@ -194,6 +193,14 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
194
193
task. options. forkOptions. executable = new File (project. ext. runtimeJavaHome, ' bin/java' ). absolutePath
195
194
}
196
195
196
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
197
+ tasks. withType(Test ) { Test task ->
198
+ if (task. getName(). startsWith(" test" ))
199
+ task. configure {
200
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
201
+ }}
202
+ }
203
+
197
204
// Embed the embedded dependencies in the final jar after all configuration is complete
198
205
sparkVariants {
199
206
all { SparkVariantPlugin.SparkVariant variant ->
Original file line number Diff line number Diff line change @@ -30,7 +30,6 @@ sparkVariants {
30
30
project. getTasks(). getByName(scalaCompileTaskName) { ScalaCompile compileScala ->
31
31
configure(compileScala. scalaCompileOptions. forkOptions) {
32
32
memoryMaximumSize = ' 1g'
33
- jvmArgs = [' -XX:MaxPermSize=512m' ]
34
33
}
35
34
compileScala. scalaCompileOptions. additionalParameters = [
36
35
" -feature" ,
@@ -177,6 +176,15 @@ tasks.withType(ScalaCompile) { ScalaCompile task ->
177
176
task. options. forkOptions. executable = new File (project. ext. runtimeJavaHome, ' bin/java' ). absolutePath
178
177
}
179
178
179
+ if (JavaVersion . current() >= JavaVersion . VERSION_16 ) {
180
+ tasks. withType(Test ) { Test task ->
181
+ if (task. getName(). startsWith(" test" ))
182
+ task. configure {
183
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
184
+ jvmArgs " --add-opens=java.base/java.nio=ALL-UNNAMED" // Needed for org.apache.spark.SparkConf, which indirectly uses java.nio.DirectByteBuffer
185
+ }}
186
+ }
187
+
180
188
// Embed the embedded dependencies in the final jar after all configuration is complete
181
189
sparkVariants {
182
190
all { SparkVariantPlugin.SparkVariant variant ->
Original file line number Diff line number Diff line change @@ -58,6 +58,11 @@ configurations {
58
58
}
59
59
}
60
60
61
+ tasks. named(" test" ). configure {
62
+ onlyIf { JavaVersion . current() >= JavaVersion . VERSION_16 }
63
+ jvmArgs " --add-opens=java.base/java.io=ALL-UNNAMED" // Needed for IOUtils's BYTE_ARRAY_BUFFER reflection
64
+ }
65
+
61
66
dependencies {
62
67
api(project(' :elasticsearch-hadoop-mr' ))
63
68
api(project. ext. hadoopClient) {
You can’t perform that action at this time.
0 commit comments