Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -79,22 +79,27 @@ object GlutenWriterColumnarRules {
// So FakeRowAdaptor will always consumes columnar data,
// thus avoiding the case of c2r->aqe->r2c->writer
case aqe: AdaptiveSparkPlanExec =>
command.withNewChildren(
Array(
BackendsApiManager.getSparkPlanExecApiInstance.genColumnarToCarrierRow(
AdaptiveSparkPlanExec(
aqe.inputPlan,
aqe.context,
aqe.preprocessingRules,
aqe.isSubquery,
supportsColumnar = true
))))
val newChild = BackendsApiManager.getSparkPlanExecApiInstance
.genColumnarToCarrierRow(aqe.inputPlan)
command.withNewChildren(Array(wrapColumnarToRowWithAqe(newChild, aqe)))
case other =>
command.withNewChildren(
Array(BackendsApiManager.getSparkPlanExecApiInstance.genColumnarToCarrierRow(other)))
}
}

private def wrapColumnarToRowWithAqe(
newChild: SparkPlan,
aqe: AdaptiveSparkPlanExec): AdaptiveSparkPlanExec = {
aqe.inputPlan.logicalLink.foreach(newChild.setLogicalLink)
AdaptiveSparkPlanExec(
newChild,
aqe.context,
aqe.preprocessingRules,
aqe.isSubquery,
supportsColumnar = false)
}

case class NativeWritePostRule(session: SparkSession) extends Rule[SparkPlan] {

override def apply(p: SparkPlan): SparkPlan = p match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1255,12 +1255,12 @@ class VeloxAdaptiveQueryExecSuite extends AdaptiveQueryExecSuite with GlutenSQLT
sparkContext.listenerBus.waitUntilEmpty()
assert(plan.isInstanceOf[V2TableWriteExec])
val childPlan = plan.asInstanceOf[V2TableWriteExec].child
assert(childPlan.isInstanceOf[ColumnarToCarrierRowExecBase])
assert(childPlan.isInstanceOf[AdaptiveSparkPlanExec])
assert(
childPlan
.asInstanceOf[ColumnarToCarrierRowExecBase]
.child
.isInstanceOf[AdaptiveSparkPlanExec])
.asInstanceOf[AdaptiveSparkPlanExec]
.inputPlan
.isInstanceOf[ColumnarToCarrierRowExecBase])

spark.listenerManager.unregister(listener)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1259,12 +1259,12 @@ class VeloxAdaptiveQueryExecSuite extends AdaptiveQueryExecSuite with GlutenSQLT
sparkContext.listenerBus.waitUntilEmpty()
assert(plan.isInstanceOf[V2TableWriteExec])
val childPlan = plan.asInstanceOf[V2TableWriteExec].child
assert(childPlan.isInstanceOf[ColumnarToCarrierRowExecBase])
assert(childPlan.isInstanceOf[AdaptiveSparkPlanExec])
assert(
childPlan
.asInstanceOf[ColumnarToCarrierRowExecBase]
.child
.isInstanceOf[AdaptiveSparkPlanExec])
.asInstanceOf[AdaptiveSparkPlanExec]
.inputPlan
.isInstanceOf[ColumnarToCarrierRowExecBase])

spark.listenerManager.unregister(listener)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1208,12 +1208,12 @@ class VeloxAdaptiveQueryExecSuite extends AdaptiveQueryExecSuite with GlutenSQLT
sparkContext.listenerBus.waitUntilEmpty()
assert(plan.isInstanceOf[V2TableWriteExec])
val childPlan = plan.asInstanceOf[V2TableWriteExec].child
assert(childPlan.isInstanceOf[ColumnarToCarrierRowExecBase])
assert(childPlan.isInstanceOf[AdaptiveSparkPlanExec])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

GlutenWriterColumnarRules should be only used in spark 32 and spark 33. I have filed PR#11787 to fix. After merging, we can remove thoese changes in spark 34 to spark 41.

assert(
childPlan
.asInstanceOf[ColumnarToCarrierRowExecBase]
.child
.isInstanceOf[AdaptiveSparkPlanExec])
.asInstanceOf[AdaptiveSparkPlanExec]
.inputPlan
.isInstanceOf[ColumnarToCarrierRowExecBase])

spark.listenerManager.unregister(listener)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1208,12 +1208,12 @@ class VeloxAdaptiveQueryExecSuite extends AdaptiveQueryExecSuite with GlutenSQLT
sparkContext.listenerBus.waitUntilEmpty()
assert(plan.isInstanceOf[V2TableWriteExec])
val childPlan = plan.asInstanceOf[V2TableWriteExec].child
assert(childPlan.isInstanceOf[ColumnarToCarrierRowExecBase])
assert(childPlan.isInstanceOf[AdaptiveSparkPlanExec])
assert(
childPlan
.asInstanceOf[ColumnarToCarrierRowExecBase]
.child
.isInstanceOf[AdaptiveSparkPlanExec])
.asInstanceOf[AdaptiveSparkPlanExec]
.inputPlan
.isInstanceOf[ColumnarToCarrierRowExecBase])

spark.listenerManager.unregister(listener)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1214,12 +1214,12 @@ class VeloxAdaptiveQueryExecSuite extends AdaptiveQueryExecSuite with GlutenSQLT
sparkContext.listenerBus.waitUntilEmpty()
assert(plan.isInstanceOf[V2TableWriteExec])
val childPlan = plan.asInstanceOf[V2TableWriteExec].child
assert(childPlan.isInstanceOf[ColumnarToCarrierRowExecBase])
assert(childPlan.isInstanceOf[AdaptiveSparkPlanExec])
assert(
childPlan
.asInstanceOf[ColumnarToCarrierRowExecBase]
.child
.isInstanceOf[AdaptiveSparkPlanExec])
.asInstanceOf[AdaptiveSparkPlanExec]
.inputPlan
.isInstanceOf[ColumnarToCarrierRowExecBase])

spark.listenerManager.unregister(listener)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1218,12 +1218,12 @@ class VeloxAdaptiveQueryExecSuite extends AdaptiveQueryExecSuite with GlutenSQLT
sparkContext.listenerBus.waitUntilEmpty()
assert(plan.isInstanceOf[V2TableWriteExec])
val childPlan = plan.asInstanceOf[V2TableWriteExec].child
assert(childPlan.isInstanceOf[ColumnarToCarrierRowExecBase])
assert(childPlan.isInstanceOf[AdaptiveSparkPlanExec])
assert(
childPlan
.asInstanceOf[ColumnarToCarrierRowExecBase]
.child
.isInstanceOf[AdaptiveSparkPlanExec])
.asInstanceOf[AdaptiveSparkPlanExec]
.inputPlan
.isInstanceOf[ColumnarToCarrierRowExecBase])

spark.listenerManager.unregister(listener)
}
Expand Down
Loading