Skip to content

Commit 967f11e

Browse files
committed
Enhancement: Fix compatibility issues with Spark 3.2.
1 parent 846f1be commit 967f11e

File tree

3 files changed

+68
-147
lines changed

3 files changed

+68
-147
lines changed

spark-connector-oceanbase/spark-connector-oceanbase-3.2/pom.xml

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,68 @@ under the License.
5454
</dependencies>
5555

5656
<build>
57+
<pluginManagement>
58+
<plugins>
59+
<plugin>
60+
<groupId>net.alchim31.maven</groupId>
61+
<artifactId>scala-maven-plugin</artifactId>
62+
<version>${scala-maven-plugin.version}</version>
63+
<configuration>
64+
<args>
65+
<arg>-nobootcp</arg>
66+
<arg>-target:jvm-${target.java.version}</arg>
67+
</args>
68+
<checkMultipleScalaVersions>false</checkMultipleScalaVersions>
69+
</configuration>
70+
</plugin>
71+
<plugin>
72+
<groupId>org.apache.maven.plugins</groupId>
73+
<artifactId>maven-compiler-plugin</artifactId>
74+
</plugin>
75+
</plugins>
76+
</pluginManagement>
5777
<plugins>
78+
<plugin>
79+
<groupId>net.alchim31.maven</groupId>
80+
<artifactId>scala-maven-plugin</artifactId>
81+
<version>${scala-maven-plugin.version}</version>
82+
<executions>
83+
<!-- Run scala compiler in the process-resources phase, so that dependencies on
84+
scala classes can be resolved later in the (Java) compile phase -->
85+
<execution>
86+
<id>scala-compile-first</id>
87+
<goals>
88+
<goal>add-source</goal>
89+
<goal>compile</goal>
90+
</goals>
91+
<phase>process-resources</phase>
92+
</execution>
93+
94+
<!-- Run scala compiler in the process-test-resources phase, so that dependencies on
95+
scala classes can be resolved later in the (Java) test-compile phase -->
96+
<execution>
97+
<id>scala-test-compile</id>
98+
<goals>
99+
<goal>testCompile</goal>
100+
</goals>
101+
<phase>process-test-resources</phase>
102+
</execution>
103+
</executions>
104+
</plugin>
105+
<plugin>
106+
<groupId>com.diffplug.spotless</groupId>
107+
<artifactId>spotless-maven-plugin</artifactId>
108+
<version>${spotless.version}</version>
109+
<configuration>
110+
<scala>
111+
<scalafmt>
112+
<version>3.4.3</version>
113+
<!-- This file is in the root of the project to make sure IntelliJ picks it up automatically -->
114+
<file>${project.basedir}/../../.scalafmt.conf</file>
115+
</scalafmt>
116+
</scala>
117+
</configuration>
118+
</plugin>
58119
<plugin>
59120
<groupId>org.apache.maven.plugins</groupId>
60121
<artifactId>maven-shade-plugin</artifactId>

spark-connector-oceanbase/spark-connector-oceanbase-3.2/src/main/scala/com/oceanbase/spark/catalog/OceanBaseTable.scala

Lines changed: 0 additions & 127 deletions
This file was deleted.

spark-connector-oceanbase/spark-connector-oceanbase-3.2/src/main/scala/com/oceanbase/spark/reader/v2/OBJdbcScanBuilder.scala

Lines changed: 7 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -24,18 +24,7 @@ import org.apache.spark.sql.ExprUtils.compileFilter
2424
import org.apache.spark.sql.catalyst.InternalRow
2525
import org.apache.spark.sql.connector.expressions.{NamedReference, SortOrder}
2626
import org.apache.spark.sql.connector.expressions.aggregate.Aggregation
27-
import org.apache.spark.sql.connector.read.{
28-
Batch,
29-
InputPartition,
30-
PartitionReader,
31-
PartitionReaderFactory,
32-
Scan,
33-
ScanBuilder,
34-
SupportsPushDownAggregates,
35-
SupportsPushDownFilters,
36-
SupportsPushDownRequiredColumns,
37-
SupportsRuntimeFiltering
38-
}
27+
import org.apache.spark.sql.connector.read.{Batch, InputPartition, PartitionReader, PartitionReaderFactory, Scan, ScanBuilder, SupportsPushDownAggregates, SupportsPushDownFilters, SupportsPushDownRequiredColumns, SupportsRuntimeFiltering}
3928
import org.apache.spark.sql.sources.Filter
4029
import org.apache.spark.sql.types.StructType
4130

@@ -44,18 +33,16 @@ case class OBJdbcScanBuilder(
4433
config: OceanBaseConfig,
4534
dialect: OceanBaseDialect
4635
) extends ScanBuilder
47-
with SupportsPushDownFilters
48-
with SupportsPushDownRequiredColumns
49-
with SupportsPushDownAggregates
50-
with Logging {
36+
with SupportsPushDownFilters
37+
with SupportsPushDownRequiredColumns
38+
with SupportsPushDownAggregates
39+
with Logging {
5140
private var finalSchema = schema
5241
private var pushedFilter = Array.empty[Filter]
5342
private var pushDownLimit = 0
5443
private var sortOrders: Array[SortOrder] = Array.empty[SortOrder]
5544

56-
/** TODO: support
57-
* org.apache.spark.sql.connector.read.SupportsPushDownV2Filters
58-
*/
45+
/** TODO: support org.apache.spark.sql.connector.read.SupportsPushDownV2Filters */
5946
override def pushFilters(filters: Array[Filter]): Array[Filter] = {
6047
val (pushed, unSupported) =
6148
filters.partition(f => compileFilter(f, dialect).isDefined)
@@ -96,7 +83,7 @@ case class OBJdbcBatchScan(
9683
pushDownTopNSortOrders: Array[SortOrder],
9784
dialect: OceanBaseDialect
9885
) extends Scan
99-
with SupportsRuntimeFiltering {
86+
with SupportsRuntimeFiltering {
10087

10188
// TODO: support spark runtime filter feat.
10289
private var runtimeFilters: Array[Filter] = Array.empty

0 commit comments

Comments
 (0)