Skip to content

Commit e4af476

Browse files
authored
Deprecate Spark 2.x (#2305)
1 parent ee3d762 commit e4af476

File tree

6 files changed

+28
-5
lines changed

6 files changed

+28
-5
lines changed

docs/src/reference/asciidoc/appendix/breaking.adoc

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,17 @@ For clarity, we always list any breaking changes at the top of the
88
//NOTE: The notable-breaking-changes tagged regions are re-used in the
99
//Installation and Upgrade Guide
1010

11+
=== Deprecations in 8.18
12+
13+
The following functionality has been deprecated in {eh} 8.18 and will be removed
14+
in a future version. While this won’t have an immediate impact on your
15+
applications, we strongly encourage you take the described steps to update your
16+
code after upgrading to 8.18.
17+
18+
==== Spark 2.x support is deprecated
19+
20+
Spark 2.x is no longer maintained. Spark 3 is still supported.
21+
1122
[[breaking-changes-8.9]]
1223
=== Breaking Changes in 8.9
1324

docs/src/reference/asciidoc/core/intro/requirements.adoc

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,8 @@ Hive version {hv-v}
8181
[[requirements-spark]]
8282
=== Apache Spark
8383

84+
deprecated::[9.0,Support for Spark 2.x in {eh} is deprecated.]
85+
8486
Spark 2.0 or higher. We recommend using the latest release of Spark (currently {sp-v}). As {eh} provides native integration (which is recommended) with {sp}, it does not matter what binary one is using.
8587
The same applies when using the Hadoop layer to integrate the two as {eh} supports the majority of
8688
Hadoop distributions out there.

spark/core/src/itest/scala/org/elasticsearch/spark/integration/AbstractScalaEsSpark.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -854,10 +854,10 @@ class AbstractScalaEsScalaSpark(prefix: String, readMetadata: jl.Boolean) extend
854854
val target = resource(index, typename, version)
855855

856856
val rawCore = List( Map("colint" -> 1, "colstr" -> "s"),
857-
Map("colint" -> null, "colstr" -> null) )
857+
Map("colint" -> 9, "colstr" -> null) )
858858
sc.parallelize(rawCore, 1).saveToEs(target)
859859
val qjson =
860-
"""{"query":{"range":{"colint":{"from":null,"to":"9","include_lower":true,"include_upper":true}}}}"""
860+
"""{"query":{"range":{"colint":{"lte":"9"}}}}"""
861861

862862
val esRDD = EsSpark.esRDD(sc, target, qjson)
863863
val scRDD = sc.esRDD(target, qjson)

spark/sql-20/src/main/scala/org/elasticsearch/spark/sql/EsSparkSQL.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ import scala.collection.JavaConverters.mapAsJavaMapConverter
3737
import scala.collection.JavaConverters.propertiesAsScalaMapConverter
3838
import scala.collection.Map
3939

40+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
4041
object EsSparkSQL {
4142

4243
private val init = { ObjectUtils.loadClass("org.elasticsearch.spark.rdd.CompatUtils", classOf[ObjectUtils].getClassLoader) }

spark/sql-20/src/main/scala/org/elasticsearch/spark/sql/api/java/JavaEsSparkSQL.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_QUERY
3131
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_RESOURCE_READ
3232
import org.elasticsearch.spark.sql.EsSparkSQL
3333

34+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
3435
object JavaEsSparkSQL {
3536

3637
// specify the return types to make sure the bytecode is generated properly (w/o any scala.collections in it)

spark/sql-20/src/main/scala/org/elasticsearch/spark/sql/package.scala

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,10 @@ import scala.reflect.ClassTag
2929

3030
package object sql {
3131

32+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
3233
implicit def sqlContextFunctions(sc: SQLContext)= new SQLContextFunctions(sc)
3334

35+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
3436
class SQLContextFunctions(sc: SQLContext) extends Serializable {
3537
def esDF() = EsSparkSQL.esDF(sc)
3638
def esDF(resource: String) = EsSparkSQL.esDF(sc, resource)
@@ -42,16 +44,20 @@ package object sql {
4244

4345
// the sparkDatasetFunctions already takes care of this
4446
// but older clients might still import it hence why it's still here
47+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
4548
implicit def sparkDataFrameFunctions(df: DataFrame) = new SparkDataFrameFunctions(df)
4649

50+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
4751
class SparkDataFrameFunctions(df: DataFrame) extends Serializable {
4852
def saveToEs(resource: String): Unit = { EsSparkSQL.saveToEs(df, resource) }
4953
def saveToEs(resource: String, cfg: scala.collection.Map[String, String]): Unit = { EsSparkSQL.saveToEs(df, resource, cfg) }
5054
def saveToEs(cfg: scala.collection.Map[String, String]): Unit = { EsSparkSQL.saveToEs(df, cfg) }
5155
}
52-
56+
57+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
5358
implicit def sparkSessionFunctions(ss: SparkSession)= new SparkSessionFunctions(ss)
54-
59+
60+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
5561
class SparkSessionFunctions(ss: SparkSession) extends Serializable {
5662
def esDF() = EsSparkSQL.esDF(ss)
5763
def esDF(resource: String) = EsSparkSQL.esDF(ss, resource)
@@ -61,8 +67,10 @@ package object sql {
6167
def esDF(resource: String, query: String, cfg: scala.collection.Map[String, String]) = EsSparkSQL.esDF(ss, resource, query, cfg)
6268
}
6369

70+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
6471
implicit def sparkDatasetFunctions[T : ClassTag](ds: Dataset[T]) = new SparkDatasetFunctions(ds)
65-
72+
73+
@deprecated("Support for Apache Spark 2 is deprecated. Use Spark 3.")
6674
class SparkDatasetFunctions[T : ClassTag](ds: Dataset[T]) extends Serializable {
6775
def saveToEs(resource: String): Unit = { EsSparkSQL.saveToEs(ds, resource) }
6876
def saveToEs(resource: String, cfg: scala.collection.Map[String, String]): Unit = { EsSparkSQL.saveToEs(ds, resource, cfg) }

0 commit comments

Comments
 (0)