diff --git a/pom.xml b/pom.xml
index b2392365..92111141 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,6 @@
1.5.5
4.0.0
- com.google.cloud
pubsublite-spark-sql-streaming
0.3.5-SNAPSHOT
jar
@@ -19,7 +18,8 @@
UTF-8
2.12.15
2.12
- 3.2.1
+ 3.1.2
+ 3.2.2
@@ -37,13 +37,23 @@
org.apache.hadoop
hadoop-client
- 3.2.1
+ ${hadoop.version}
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
org.apache.curator
curator-client
2.13.0
+
+ org.apache.commons
+ commons-compress
+ 1.19
+
org.apache.yetus
audience-annotations
@@ -59,6 +69,26 @@
jetty-server
9.4.43.v20210629
+
+ jakarta.xml.bind
+ jakarta.xml.bind-api
+ 2.3.3
+
+
+ jakarta.activation
+ jakarta.activation-api
+ 1.2.2
+
+
+ commons.io
+ commons.io
+ 2.5
+
+
+ commons-net
+ commons-net
+ 3.6
+
@@ -287,6 +317,8 @@
org.apache.hadoop.yarn.*
javax.ws.rs.*
javax.annotation.*
+ javax.activation.*
+ javax.xml.bind.*
true
diff --git a/renovate.json b/renovate.json
index 39a3611f..cc6f931a 100644
--- a/renovate.json
+++ b/renovate.json
@@ -67,6 +67,16 @@
"^com.fasterxml.jackson.core"
],
"groupName": "jackson dependencies"
+ },
+ {
+ "packagePatterns": [
+ "^spark.version",
+ "^org.apache.spark",
+ "^scala.version",
+ "^org.scala-lang"
+ ],
+ "enabled": false,
+ "groupName": "spark and scala pinned dependencies"
}
],
"semanticCommits": true,
diff --git a/src/main/java/com/google/cloud/pubsublite/spark/PslWrite.java b/src/main/java/com/google/cloud/pubsublite/spark/PslWrite.java
index c3be58fd..a3f131b7 100644
--- a/src/main/java/com/google/cloud/pubsublite/spark/PslWrite.java
+++ b/src/main/java/com/google/cloud/pubsublite/spark/PslWrite.java
@@ -20,14 +20,13 @@
import org.apache.spark.sql.connector.write.BatchWrite;
import org.apache.spark.sql.connector.write.DataWriterFactory;
import org.apache.spark.sql.connector.write.PhysicalWriteInfo;
-import org.apache.spark.sql.connector.write.Write;
import org.apache.spark.sql.connector.write.WriteBuilder;
import org.apache.spark.sql.connector.write.WriterCommitMessage;
import org.apache.spark.sql.connector.write.streaming.StreamingDataWriterFactory;
import org.apache.spark.sql.connector.write.streaming.StreamingWrite;
import org.apache.spark.sql.types.StructType;
-public class PslWrite implements Write, WriteBuilder, BatchWrite, StreamingWrite {
+public class PslWrite implements WriteBuilder, BatchWrite, StreamingWrite {
private static final GoogleLogger log = GoogleLogger.forEnclosingClass();
private final StructType inputSchema;
@@ -86,17 +85,12 @@ public StreamingDataWriterFactory createStreamingWriterFactory(PhysicalWriteInfo
}
@Override
- public BatchWrite toBatch() {
+ public BatchWrite buildForBatch() {
return this;
}
@Override
- public StreamingWrite toStreaming() {
- return this;
- }
-
- @Override
- public Write build() {
+ public StreamingWrite buildForStreaming() {
return this;
}
}
diff --git a/src/test/java/com/google/cloud/pubsublite/spark/PslWriteTest.java b/src/test/java/com/google/cloud/pubsublite/spark/PslWriteTest.java
index a1b61f93..dafd7edc 100644
--- a/src/test/java/com/google/cloud/pubsublite/spark/PslWriteTest.java
+++ b/src/test/java/com/google/cloud/pubsublite/spark/PslWriteTest.java
@@ -48,7 +48,7 @@ public void testAbort() {
@Test
public void testCreateFactory() {
PhysicalWriteInfo info = new PhysicalWriteInfoImpl(42);
- writer.toBatch().createBatchWriterFactory(info);
- writer.toStreaming().createStreamingWriterFactory(info);
+ writer.createBatchWriterFactory(info);
+ writer.createStreamingWriterFactory(info);
}
}