Skip to content

Commit e6bdb53

Browse files
resolve comments
1 parent ff6d2e2 commit e6bdb53

File tree

6 files changed

+21
-18
lines changed

6 files changed

+21
-18
lines changed

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import org.apache.logging.log4j.Level
2222
import org.apache.spark.rdd.RDD
2323
import org.apache.spark.sql.{AnalysisException, DataFrame}
2424
import org.apache.spark.sql.catalyst.analysis.{IndexAlreadyExistsException, NoSuchIndexException}
25+
import org.apache.spark.sql.connector.DataSourcePushdownTestUtils
2526
import org.apache.spark.sql.connector.catalog.{Catalogs, Identifier, TableCatalog}
2627
import org.apache.spark.sql.connector.catalog.index.SupportsIndex
2728
import org.apache.spark.sql.connector.expressions.NullOrdering
@@ -33,7 +34,7 @@ import org.apache.spark.tags.DockerTest
3334

3435
@DockerTest
3536
private[v2] trait V2JDBCTest
36-
extends V2JDBCPushdownTestUtils
37+
extends DataSourcePushdownTestUtils
3738
with DockerIntegrationFunSuite
3839
with SharedSparkSession {
3940
import testImplicits._

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/join/OracleJoinPushdownIntegrationSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import java.util.Locale
2323
import org.apache.spark.SparkConf
2424
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
2525
import org.apache.spark.sql.jdbc.{DockerJDBCIntegrationSuite, JdbcDialect, OracleDatabaseOnDocker, OracleDialect}
26-
import org.apache.spark.sql.jdbc.v2.JDBCJoinPushdownIntegrationSuite
26+
import org.apache.spark.sql.jdbc.v2.JDBCV2JoinPushdownIntegrationSuiteBase
2727
import org.apache.spark.tags.DockerTest
2828

2929
/**
@@ -56,10 +56,10 @@ import org.apache.spark.tags.DockerTest
5656
@DockerTest
5757
class OracleJoinPushdownIntegrationSuite
5858
extends DockerJDBCIntegrationSuite
59-
with JDBCJoinPushdownIntegrationSuite {
59+
with JDBCV2JoinPushdownIntegrationSuiteBase {
6060
override val catalogName: String = "oracle"
6161

62-
override def namespaceOpt: Option[String] = Some("SYSTEM")
62+
override val namespaceOpt: Option[String] = Some("SYSTEM")
6363

6464
override val db = new OracleDatabaseOnDocker
6565

sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcSQLQueryBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ class JdbcSQLQueryBuilder(dialect: JdbcDialect, options: JDBCOptions) {
208208

209209
// If join has been pushed down, reuse join query as a subquery. Otherwise, fallback to
210210
// what is provided in options.
211-
def tableOrQuery: String = joinQuery.getOrElse(options.tableOrQuery)
211+
protected final def tableOrQuery: String = joinQuery.getOrElse(options.tableOrQuery)
212212

213213
/**
214214
* Build the final SQL query that following dialect's SQL syntax.

sql/core/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCPushdownTestUtils.scala renamed to sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourcePushdownTestUtils.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,15 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark.sql.jdbc.v2
18+
package org.apache.spark.sql.connector
1919

2020
import org.apache.spark.sql.{DataFrame, ExplainSuiteHelper}
21-
import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Filter, GlobalLimit, Join, LocalLimit, Offset, Sample, Sort}
21+
import org.apache.spark.sql.catalyst.plans.logical._
2222
import org.apache.spark.sql.connector.expressions.aggregate.GeneralAggregateFunc
2323
import org.apache.spark.sql.execution.datasources.v2.{DataSourceV2ScanRelation, V1ScanWrapper}
2424
import org.apache.spark.sql.internal.SQLConf
2525

26-
trait V2JDBCPushdownTestUtils extends ExplainSuiteHelper {
26+
trait DataSourcePushdownTestUtils extends ExplainSuiteHelper {
2727
protected def checkSamplePushed(df: DataFrame, pushed: Boolean = true): Unit = {
2828
val sample = df.queryExecution.optimizedPlan.collect {
2929
case s: Sample => s

sql/core/src/test/scala/org/apache/spark/sql/jdbc/v2/JDBCJoinPushdownIntegrationSuite.scala renamed to sql/core/src/test/scala/org/apache/spark/sql/jdbc/v2/JDBCV2JoinPushdownIntegrationSuiteBase.scala

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,19 @@ import java.sql.{Connection, DriverManager}
2121
import java.util.Properties
2222

2323
import org.apache.spark.sql.QueryTest
24+
import org.apache.spark.sql.connector.DataSourcePushdownTestUtils
2425
import org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils
2526
import org.apache.spark.sql.internal.SQLConf
2627
import org.apache.spark.sql.jdbc.JdbcDialect
2728
import org.apache.spark.sql.test.SharedSparkSession
2829
import org.apache.spark.sql.types.{DataType, DataTypes}
2930

30-
trait JDBCJoinPushdownIntegrationSuite
31+
trait JDBCV2JoinPushdownIntegrationSuiteBase
3132
extends QueryTest
3233
with SharedSparkSession
33-
with V2JDBCPushdownTestUtils {
34+
with DataSourcePushdownTestUtils {
3435
val catalogName: String
35-
def namespaceOpt: Option[String] = None
36+
val namespaceOpt: Option[String] = None
3637
val url: String
3738

3839
val joinTableName1: String = "join_table_1"
@@ -46,9 +47,9 @@ trait JDBCJoinPushdownIntegrationSuite
4647
def qualifyTableName(tableName: String): String = namespaceOpt
4748
.map(namespace => s"$namespace.$tableName").getOrElse(tableName)
4849

49-
private val fullyQualifiedTableName1: String = qualifyTableName(joinTableName1)
50+
private lazy val fullyQualifiedTableName1: String = qualifyTableName(joinTableName1)
5051

51-
private val fullyQualifiedTableName2: String = qualifyTableName(joinTableName2)
52+
private lazy val fullyQualifiedTableName2: String = qualifyTableName(joinTableName2)
5253

5354
protected def getJDBCTypeString(dt: DataType): String = {
5455
JdbcUtils.getJdbcType(dt, jdbcDialect).databaseTypeDefinition.toUpperCase()

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2JoinPushdownSuite.scala renamed to sql/core/src/test/scala/org/apache/spark/sql/jdbc/v2/JDBCV2JoinPushdownSuite.scala

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,28 +15,29 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark.sql.jdbc
18+
package org.apache.spark.sql.jdbc.v2
1919

2020
import java.sql.Connection
2121

2222
import org.apache.spark.SparkConf
2323
import org.apache.spark.sql.{ExplainSuiteHelper, QueryTest}
24+
import org.apache.spark.sql.connector.DataSourcePushdownTestUtils
2425
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
25-
import org.apache.spark.sql.jdbc.v2.{JDBCJoinPushdownIntegrationSuite, V2JDBCPushdownTestUtils}
26+
import org.apache.spark.sql.jdbc.{H2Dialect, JdbcDialect}
2627
import org.apache.spark.sql.test.SharedSparkSession
2728
import org.apache.spark.util.Utils
2829

2930
class JDBCV2JoinPushdownSuite
3031
extends QueryTest
3132
with SharedSparkSession
3233
with ExplainSuiteHelper
33-
with V2JDBCPushdownTestUtils
34-
with JDBCJoinPushdownIntegrationSuite {
34+
with DataSourcePushdownTestUtils
35+
with JDBCV2JoinPushdownIntegrationSuiteBase {
3536
val tempDir = Utils.createTempDir()
3637
override val url = s"jdbc:h2:${tempDir.getCanonicalPath};user=testUser;password=testPass"
3738

3839
override val catalogName: String = "h2"
39-
override def namespaceOpt: Option[String] = Some("test")
40+
override val namespaceOpt: Option[String] = Some("test")
4041

4142
override val jdbcDialect: JdbcDialect = H2Dialect()
4243

0 commit comments

Comments
 (0)