Skip to content

Commit a0dbcaf

Browse files
committed
[Kernel-Spark] Integrate DeltaSnapshotManagerFactory in SparkTable
1 parent ef369c6 commit a0dbcaf

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

kernel-spark/src/main/java/io/delta/kernel/spark/catalog/SparkTable.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import io.delta.kernel.Snapshot;
2222
import io.delta.kernel.spark.read.SparkScanBuilder;
2323
import io.delta.kernel.spark.snapshot.DeltaSnapshotManager;
24-
import io.delta.kernel.spark.snapshot.PathBasedSnapshotManager;
24+
import io.delta.kernel.spark.snapshot.DeltaSnapshotManagerFactory;
2525
import io.delta.kernel.spark.utils.SchemaUtils;
2626
import java.util.*;
2727
import org.apache.hadoop.conf.Configuration;
@@ -141,7 +141,11 @@ private SparkTable(
141141

142142
this.hadoopConf =
143143
SparkSession.active().sessionState().newHadoopConfWithOptions(toScalaMap(options));
144-
this.snapshotManager = new PathBasedSnapshotManager(tablePath, hadoopConf);
144+
this.snapshotManager =
145+
catalogTable.isPresent()
146+
? DeltaSnapshotManagerFactory.fromCatalogTable(
147+
catalogTable.get(), SparkSession.active(), hadoopConf)
148+
: DeltaSnapshotManagerFactory.fromPath(tablePath, hadoopConf);
145149
// Load the initial snapshot through the manager
146150
this.initialSnapshot = snapshotManager.loadLatestSnapshot();
147151
this.schema = SchemaUtils.convertKernelSchemaToSparkSchema(initialSnapshot.getSchema());

0 commit comments

Comments
 (0)