Skip to content

Commit 11a7958

Browse files
committed
Wildcard queries now parse the * character correctly. Update tests. (#1458)
1 parent ba36c72 commit 11a7958

File tree

2 files changed

+0
-30
lines changed

2 files changed

+0
-30
lines changed

spark/sql-13/src/itest/scala/org/elasticsearch/spark/integration/AbstractScalaEsSparkSQL.scala

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -997,11 +997,6 @@ class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pus
997997
val df = esDataSource("pd_starts_with")
998998
var filter = df.filter(df("airport").startsWith("O"))
999999

1000-
if (strictPushDown) {
1001-
assertEquals(0, filter.count())
1002-
return
1003-
}
1004-
10051000
if (!keepHandledFilters) {
10061001
// term query pick field with multi values
10071002
assertEquals(2, filter.count())
@@ -1018,11 +1013,6 @@ class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pus
10181013
val df = esDataSource("pd_ends_with")
10191014
var filter = df.filter(df("airport").endsWith("O"))
10201015

1021-
if (strictPushDown) {
1022-
assertEquals(0, filter.count())
1023-
return
1024-
}
1025-
10261016
if (!keepHandledFilters) {
10271017
// term query pick field with multi values
10281018
assertEquals(2, filter.count())
@@ -1047,11 +1037,6 @@ class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pus
10471037
val df = esDataSource("pd_and")
10481038
var filter = df.filter(df("reason").isNotNull.and(df("airport").endsWith("O")))
10491039

1050-
if (strictPushDown) {
1051-
assertEquals(0, filter.count())
1052-
return
1053-
}
1054-
10551040
assertEquals(1, filter.count())
10561041
assertEquals("jan", filter.select("tag").take(1)(0)(0))
10571042
}

spark/sql-20/src/itest/scala/org/elasticsearch/spark/integration/AbstractScalaEsSparkSQL.scala

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1052,11 +1052,6 @@ class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pus
10521052
val df = esDataSource("pd_starts_with")
10531053
var filter = df.filter(df("airport").startsWith("O"))
10541054

1055-
if (strictPushDown) {
1056-
assertEquals(0, filter.count())
1057-
return
1058-
}
1059-
10601055
if (!keepHandledFilters) {
10611056
// term query pick field with multi values
10621057
assertEquals(2, filter.count())
@@ -1073,11 +1068,6 @@ class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pus
10731068
val df = esDataSource("pd_ends_with")
10741069
var filter = df.filter(df("airport").endsWith("O"))
10751070

1076-
if (strictPushDown) {
1077-
assertEquals(0, filter.count())
1078-
return
1079-
}
1080-
10811071
if (!keepHandledFilters) {
10821072
// term query pick field with multi values
10831073
assertEquals(2, filter.count())
@@ -1102,11 +1092,6 @@ class AbstractScalaEsScalaSparkSQL(prefix: String, readMetadata: jl.Boolean, pus
11021092
val df = esDataSource("pd_and")
11031093
var filter = df.filter(df("reason").isNotNull.and(df("airport").endsWith("O")))
11041094

1105-
if (strictPushDown) {
1106-
assertEquals(0, filter.count())
1107-
return
1108-
}
1109-
11101095
assertEquals(1, filter.count())
11111096
assertEquals("jan", filter.select("tag").take(1)(0)(0))
11121097
}

0 commit comments

Comments
 (0)