ctring commented on code in PR #49559:
URL: https://github.com/apache/spark/pull/49559#discussion_r1924273748


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2AlterTableCommands.scala:
##########
@@ -201,49 +201,55 @@ case class RenameColumn(
     copy(table = newChild)
 }
 
-/**
- * The logical plan of the ALTER TABLE ... ALTER COLUMN command.
- */
-case class AlterColumn(
-    table: LogicalPlan,
-    column: FieldName,
+case class AlterColumnSpec(
     dataType: Option[DataType],
     nullable: Option[Boolean],
     comment: Option[String],
     position: Option[FieldPosition],
-    setDefaultExpression: Option[String]) extends AlterTableCommand {
+    setDefaultExpression: Option[String])
+
+/**
+ * The logical plan of the ALTER TABLE ... ALTER COLUMN command.
+ */
+case class AlterColumns(
+    table: LogicalPlan,
+    columns: Seq[FieldName],
+    specs: Seq[AlterColumnSpec]) extends AlterTableCommand {
   override def changes: Seq[TableChange] = {
-    require(column.resolved, "FieldName should be resolved before it's 
converted to TableChange.")
-    val colName = column.name.toArray
-    val typeChange = dataType.map { newDataType =>
-      TableChange.updateColumnType(colName, newDataType)
-    }
-    val nullabilityChange = nullable.map { nullable =>
-      TableChange.updateColumnNullability(colName, nullable)
-    }
-    val commentChange = comment.map { newComment =>
-      TableChange.updateColumnComment(colName, newComment)
-    }
-    val positionChange = position.map { newPosition =>
-      require(newPosition.resolved,
-        "FieldPosition should be resolved before it's converted to 
TableChange.")
-      TableChange.updateColumnPosition(colName, newPosition.position)
-    }
-    val defaultValueChange = setDefaultExpression.map { newDefaultExpression =>
-      if (newDefaultExpression.nonEmpty) {
-        // SPARK-45075: We call 'ResolveDefaultColumns.analyze' here to make 
sure that the default
-        // value parses successfully, and return an error otherwise
-        val newDataType = 
dataType.getOrElse(column.asInstanceOf[ResolvedFieldName].field.dataType)
-        ResolveDefaultColumns.analyze(column.name.last, newDataType, 
newDefaultExpression,
-          "ALTER TABLE ALTER COLUMN")
+    assert(columns.size == specs.size)
+    columns.zip(specs).flatMap { case (column, spec) =>
+      require(column.resolved, "FieldName should be resolved before it's 
converted to TableChange.")
+      val colName = column.name.toArray
+      val typeChange = spec.dataType.map { newDataType =>
+        TableChange.updateColumnType(colName, newDataType)
+      }
+      val nullabilityChange = spec.nullable.map { nullable =>
+        TableChange.updateColumnNullability(colName, nullable)
       }
-      TableChange.updateColumnDefaultValue(colName, newDefaultExpression)
+      val commentChange = spec.comment.map { newComment =>
+        TableChange.updateColumnComment(colName, newComment)
+      }
+      val positionChange = spec.position.map { newPosition =>
+        require(newPosition.resolved,
+          "FieldPosition should be resolved before it's converted to 
TableChange.")
+        TableChange.updateColumnPosition(colName, newPosition.position)
+      }
+      val defaultValueChange = spec.setDefaultExpression.map { 
newDefaultExpression =>
+        if (newDefaultExpression.nonEmpty) {
+          // SPARK-45075: We call 'ResolveDefaultColumns.analyze' here to make 
sure that the default
+          // value parses successfully, and return an error otherwise
+          val newDataType = spec.dataType.getOrElse(
+            column.asInstanceOf[ResolvedFieldName].field.dataType)
+          ResolveDefaultColumns.analyze(column.name.last, newDataType, 
newDefaultExpression,
+            "ALTER TABLE ALTER COLUMN")
+        }
+        TableChange.updateColumnDefaultValue(colName, newDefaultExpression)
+      }
+      typeChange.toSeq ++ nullabilityChange ++ commentChange ++ positionChange 
++ defaultValueChange
     }
-    typeChange.toSeq ++ nullabilityChange ++ commentChange ++ positionChange 
++ defaultValueChange
   }
 
-  override protected def withNewChildInternal(newChild: LogicalPlan): 
LogicalPlan =
-    copy(table = newChild)
+  override protected def withNewChildInternal(newChild: LogicalPlan): 
LogicalPlan = copy(newChild)

Review Comment:
   I didn't intent to write it this way and will fix this, but I think it still 
works because the child argument happens to be at the first position.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to