Skip to content

Commit b5ce960

Browse files
committed
Address review comments
- Remove redundant .filter(_.nonEmpty) guard in PushDownUtils - Fix misleading comment in InMemoryPartitionPredicateDeleteTable - Add logDebug for each delete optimization outcome path
1 parent 0b8b3d1 commit b5ce960

3 files changed

Lines changed: 10 additions & 3 deletions

File tree

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryPartitionPredicateDeleteTable.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ class InMemoryPartitionPredicateDeleteTable(
8484
candidateKeys
8585
}
8686

87-
// Handle data predicates (simulate data source with data column statistics)
87+
// Handle data predicates (simulate a data source handling data filters with data statistics)
8888
if (dataStdPreds.isEmpty) {
8989
dataMap --= keysToProcess
9090
} else {

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/OptimizeMetadataOnlyDeleteFromTable.scala

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,10 +45,15 @@ object OptimizeMetadataOnlyDeleteFromTable extends Rule[LogicalPlan] with Predic
4545
val normalizedPredicates = DataSourceStrategy.normalizeExprs(predicates, relation.output)
4646
val filtersOpt = tryTranslateToV2(normalizedPredicates)
4747
if (filtersOpt.exists(table.canDeleteWhere)) {
48+
logDebug(s"Switching to delete with filters: " +
49+
s"${filtersOpt.get.mkString("[", ", ", "]")}")
4850
DeleteFromTableWithFilters(relation, filtersOpt.get.toImmutableArraySeq)
4951
} else {
5052
tryDeleteWithPartitionPredicates(table, relation, normalizedPredicates)
51-
.getOrElse(rowLevelPlan)
53+
.getOrElse {
54+
logDebug(s"Falling back to row-level delete on ${relation.table.name()}")
55+
rowLevelPlan
56+
}
5257
}
5358

5459
case _: TruncatableTable if cond == TrueLiteral =>
@@ -93,6 +98,8 @@ object OptimizeMetadataOnlyDeleteFromTable extends Rule[LogicalPlan] with Predic
9398
combined = partPredicates.toArray ++ dataV2Filters
9499
if table.canDeleteWhere(combined)
95100
} yield {
101+
logDebug(s"Switching to delete with PartitionPredicate filters: " +
102+
s"${combined.mkString("[", ", ", "]")}")
96103
DeleteFromTableWithFilters(relation, combined.toImmutableArraySeq)
97104
}
98105
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/PushDownUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,7 @@ object PushDownUtils extends Logging {
149149
case _ => None
150150
}
151151
if (fields.length == transforms.length) {
152-
Some(fields.toSeq).filter(_.nonEmpty)
152+
Some(fields.toSeq)
153153
} else {
154154
None
155155
}

0 commit comments

Comments
 (0)