diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala index a6405f18ad444..b897026a0df46 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala @@ -445,6 +445,21 @@ object Union { def apply(left: LogicalPlan, right: LogicalPlan): Union = { Union (left :: right :: Nil) } + + // updating nullability to make all the children consistent + def mergeChildOutputs(childOutputs: Seq[Seq[Attribute]]): Seq[Attribute] = { + childOutputs.transpose.map { attrs => + val firstAttr = attrs.head + val nullable = attrs.exists(_.nullable) + val newDt = attrs.map(_.dataType).reduce(StructType.unionLikeMerge) + if (firstAttr.dataType == newDt) { + firstAttr.withNullability(nullable) + } else { + AttributeReference(firstAttr.name, newDt, nullable, firstAttr.metadata)( + firstAttr.exprId, firstAttr.qualifier) + } + } + } } /** @@ -526,20 +541,7 @@ case class Union( private lazy val lazyOutput: Seq[Attribute] = computeOutput() - // updating nullability to make all the children consistent - private def computeOutput(): Seq[Attribute] = { - children.map(_.output).transpose.map { attrs => - val firstAttr = attrs.head - val nullable = attrs.exists(_.nullable) - val newDt = attrs.map(_.dataType).reduce(StructType.unionLikeMerge) - if (firstAttr.dataType == newDt) { - firstAttr.withNullability(nullable) - } else { - AttributeReference(firstAttr.name, newDt, nullable, firstAttr.metadata)( - firstAttr.exprId, firstAttr.qualifier) - } - } - } + private def computeOutput(): Seq[Attribute] = Union.mergeChildOutputs(children.map(_.output)) /** * Maps the constraints containing a given (original) sequence of attributes to those with a