ctring commented on code in PR #49559: URL: https://github.com/apache/spark/pull/49559#discussion_r1926074852
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2AlterTableCommands.scala: ########## @@ -201,45 +201,52 @@ case class RenameColumn( copy(table = newChild) } -/** - * The logical plan of the ALTER TABLE ... ALTER COLUMN command. - */ -case class AlterColumn( - table: LogicalPlan, - column: FieldName, +case class AlterColumnSpec( dataType: Option[DataType], nullable: Option[Boolean], comment: Option[String], position: Option[FieldPosition], - setDefaultExpression: Option[String]) extends AlterTableCommand { + setDefaultExpression: Option[String]) + +/** + * The logical plan of the ALTER TABLE ... ALTER COLUMN command. + */ +case class AlterColumns( + table: LogicalPlan, + columns: Seq[FieldName], + specs: Seq[AlterColumnSpec]) extends AlterTableCommand { override def changes: Seq[TableChange] = { - require(column.resolved, "FieldName should be resolved before it's converted to TableChange.") - val colName = column.name.toArray - val typeChange = dataType.map { newDataType => - TableChange.updateColumnType(colName, newDataType) - } - val nullabilityChange = nullable.map { nullable => - TableChange.updateColumnNullability(colName, nullable) - } - val commentChange = comment.map { newComment => - TableChange.updateColumnComment(colName, newComment) - } - val positionChange = position.map { newPosition => - require(newPosition.resolved, - "FieldPosition should be resolved before it's converted to TableChange.") - TableChange.updateColumnPosition(colName, newPosition.position) - } - val defaultValueChange = setDefaultExpression.map { newDefaultExpression => - if (newDefaultExpression.nonEmpty) { - // SPARK-45075: We call 'ResolveDefaultColumns.analyze' here to make sure that the default - // value parses successfully, and return an error otherwise - val newDataType = dataType.getOrElse(column.asInstanceOf[ResolvedFieldName].field.dataType) - ResolveDefaultColumns.analyze(column.name.last, newDataType, newDefaultExpression, - "ALTER TABLE ALTER COLUMN") + assert(columns.size == specs.size) Review Comment: It would be much nicer that way. I didn't know making `AlterColumnSpec` was an option. I've updated the PR with this change. ########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2AlterTableCommands.scala: ########## @@ -201,45 +201,52 @@ case class RenameColumn( copy(table = newChild) } -/** - * The logical plan of the ALTER TABLE ... ALTER COLUMN command. - */ -case class AlterColumn( - table: LogicalPlan, - column: FieldName, +case class AlterColumnSpec( dataType: Option[DataType], nullable: Option[Boolean], comment: Option[String], position: Option[FieldPosition], - setDefaultExpression: Option[String]) extends AlterTableCommand { + setDefaultExpression: Option[String]) + +/** + * The logical plan of the ALTER TABLE ... ALTER COLUMN command. + */ +case class AlterColumns( + table: LogicalPlan, + columns: Seq[FieldName], + specs: Seq[AlterColumnSpec]) extends AlterTableCommand { override def changes: Seq[TableChange] = { - require(column.resolved, "FieldName should be resolved before it's converted to TableChange.") - val colName = column.name.toArray - val typeChange = dataType.map { newDataType => - TableChange.updateColumnType(colName, newDataType) - } - val nullabilityChange = nullable.map { nullable => - TableChange.updateColumnNullability(colName, nullable) - } - val commentChange = comment.map { newComment => - TableChange.updateColumnComment(colName, newComment) - } - val positionChange = position.map { newPosition => - require(newPosition.resolved, - "FieldPosition should be resolved before it's converted to TableChange.") - TableChange.updateColumnPosition(colName, newPosition.position) - } - val defaultValueChange = setDefaultExpression.map { newDefaultExpression => - if (newDefaultExpression.nonEmpty) { - // SPARK-45075: We call 'ResolveDefaultColumns.analyze' here to make sure that the default - // value parses successfully, and return an error otherwise - val newDataType = dataType.getOrElse(column.asInstanceOf[ResolvedFieldName].field.dataType) - ResolveDefaultColumns.analyze(column.name.last, newDataType, newDefaultExpression, - "ALTER TABLE ALTER COLUMN") + assert(columns.size == specs.size) Review Comment: It would be much nicer that way. I didn't know making `AlterColumnSpec` an expression was an option. I've updated the PR with this change. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org