Skip to content

Commit 933235e

Browse files
LantaoJinGitHub Enterprise
authored and
GitHub Enterprise
committed
[CARMEL-4270] UpdateCommand and DeleteCommand should skip to optimize subquery in their condition (delta-io#16)
1 parent a13db3d commit 933235e

File tree

3 files changed

+4
-6
lines changed

3 files changed

+4
-6
lines changed

build.sbt

-2
Original file line numberDiff line numberDiff line change
@@ -176,8 +176,6 @@ packageBin in Compile := spPackage.value
176176

177177
sparkComponents := Seq("sql")
178178

179-
publishArtifact in Test := true
180-
181179
/********************
182180
* Release settings *
183181
********************/

src/main/scala/org/apache/spark/sql/delta/commands/DeleteCommand.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.spark.sql.{Column, Dataset, Row, SparkSession}
2525
import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
2626
import org.apache.spark.sql.catalyst.expressions.{EqualNullSafe, Expression, InputFileName, Literal, Not}
2727
import org.apache.spark.sql.catalyst.plans.QueryPlan
28-
import org.apache.spark.sql.catalyst.plans.logical.{DeltaDelete, LogicalPlan}
28+
import org.apache.spark.sql.catalyst.plans.logical.{DeltaDelete, LogicalPlan, SkipOptimizingSubquery}
2929
import org.apache.spark.sql.execution.SQLExecution
3030
import org.apache.spark.sql.execution.command.RunnableCommand
3131
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
@@ -47,7 +47,7 @@ case class DeleteCommand(
4747
tahoeFileIndex: TahoeFileIndex,
4848
target: LogicalPlan,
4949
condition: Option[Expression])
50-
extends RunnableCommand with DeltaCommand {
50+
extends RunnableCommand with DeltaCommand with SkipOptimizingSubquery {
5151

5252
override def innerChildren: Seq[QueryPlan[_]] = Seq(target)
5353

src/main/scala/org/apache/spark/sql/delta/commands/UpdateCommand.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.SparkContext
2424
import org.apache.spark.sql.{Column, Dataset, Row, SparkSession}
2525
import org.apache.spark.sql.catalyst.expressions.{Alias, Expression, If, Literal}
2626
import org.apache.spark.sql.catalyst.plans.QueryPlan
27-
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SupportsSubquery}
27+
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SkipOptimizingSubquery, SupportsSubquery}
2828
import org.apache.spark.sql.execution.SQLExecution
2929
import org.apache.spark.sql.execution.command.RunnableCommand
3030
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
@@ -46,7 +46,7 @@ case class UpdateCommand(
4646
target: LogicalPlan,
4747
updateExpressions: Seq[Expression],
4848
condition: Option[Expression])
49-
extends RunnableCommand with DeltaCommand with SupportsSubquery {
49+
extends RunnableCommand with DeltaCommand with SupportsSubquery with SkipOptimizingSubquery {
5050

5151
override def innerChildren: Seq[QueryPlan[_]] = Seq(target)
5252

0 commit comments

Comments
 (0)