agent_patch
runs/2026-02-28__18-27-22__gpt-5-3-codex/flux-pr-1918/flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/agent-logs/agent.patch
6802 bytes
diff --git a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/ast/ddl.rs b/app/src/ast/ddl.rs
index 7e46a59..2bd7a69 100644
--- a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/ast/ddl.rs
+++ b/app/src/ast/ddl.rs
@@ -147,6 +147,13 @@ pub enum AlterTableOperation {
if_exists: bool,
drop_behavior: Option<DropBehavior>,
},
+ /// `DROP [ COLUMN ] [ IF EXISTS ] <column_name> [, <column_name> ...] [ CASCADE ]`
+ DropColumns {
+ has_column_keyword: bool,
+ column_names: Vec<Ident>,
+ if_exists: bool,
+ drop_behavior: Option<DropBehavior>,
+ },
/// `ATTACH PART|PARTITION <partition_expr>`
/// Note: this is a ClickHouse-specific operation, please refer to
/// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/alter/pakrtition#attach-partitionpart)
@@ -646,6 +653,23 @@ impl fmt::Display for AlterTableOperation {
Some(DropBehavior::Cascade) => " CASCADE",
}
),
+ AlterTableOperation::DropColumns {
+ has_column_keyword,
+ column_names,
+ if_exists,
+ drop_behavior,
+ } => write!(
+ f,
+ "DROP {}{}{}{}",
+ if *has_column_keyword { "COLUMN " } else { "" },
+ if *if_exists { "IF EXISTS " } else { "" },
+ display_comma_separated(column_names),
+ match drop_behavior {
+ None => "",
+ Some(DropBehavior::Restrict) => " RESTRICT",
+ Some(DropBehavior::Cascade) => " CASCADE",
+ }
+ ),
AlterTableOperation::AttachPartition { partition } => {
write!(f, "ATTACH {partition}")
}
diff --git a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/mod.rs b/app/src/dialect/mod.rs
index 3345380..d05d516 100644
--- a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/mod.rs
+++ b/app/src/dialect/mod.rs
@@ -494,6 +494,17 @@ pub trait Dialect: Debug + Any {
false
}
+ /// Returns true if the dialect supports dropping multiple columns in a
+ /// single `ALTER TABLE ... DROP COLUMN` operation.
+ ///
+ /// Example:
+ /// ```sql
+ /// ALTER TABLE t DROP COLUMN c1, c2, c3
+ /// ```
+ fn supports_multiple_drop_columns(&self) -> bool {
+ false
+ }
+
/// Returns true if the dialect supports double dot notation for object names
///
/// Example
diff --git a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/snowflake.rs b/app/src/dialect/snowflake.rs
index ee770b0..adc6ed0 100644
--- a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/snowflake.rs
+++ b/app/src/dialect/snowflake.rs
@@ -63,6 +63,10 @@ impl Dialect for SnowflakeDialect {
true
}
+ fn supports_multiple_drop_columns(&self) -> bool {
+ true
+ }
+
// Snowflake supports double-dot notation when the schema name is not specified
// In this case the default PUBLIC schema is used
//
diff --git a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/parser/mod.rs b/app/src/parser/mod.rs
index 32f8a97..65349b4 100644
--- a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/parser/mod.rs
+++ b/app/src/parser/mod.rs
@@ -8676,6 +8676,25 @@ impl<'a> Parser<'a> {
let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
let column_name = self.parse_identifier()?;
+ if self.dialect.supports_multiple_drop_columns() {
+ let mut column_names = vec![column_name];
+ while let Some(next_column_name) = self.maybe_parse(|parser| {
+ parser.expect_token(&Token::Comma)?;
+ parser.parse_identifier()
+ })? {
+ column_names.push(next_column_name);
+ }
+
+ if column_names.len() > 1 {
+ let drop_behavior = self.parse_optional_drop_behavior();
+ return Ok(AlterTableOperation::DropColumns {
+ has_column_keyword,
+ column_names,
+ if_exists,
+ drop_behavior,
+ });
+ }
+ }
let drop_behavior = self.parse_optional_drop_behavior();
AlterTableOperation::DropColumn {
has_column_keyword,
diff --git a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/tests/sqlparser_snowflake.rs b/app/tests/sqlparser_snowflake.rs
index 8a1558b..e5bfb92 100644
--- a/tmp/agent-patch-flux-pr-1918.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/tests/sqlparser_snowflake.rs
+++ b/app/tests/sqlparser_snowflake.rs
@@ -23,7 +23,7 @@ use sqlparser::ast::helpers::key_value_options::{KeyValueOption, KeyValueOptionT
use sqlparser::ast::helpers::stmt_data_loading::{StageLoadSelectItem, StageLoadSelectItemKind};
use sqlparser::ast::*;
use sqlparser::dialect::{Dialect, GenericDialect, SnowflakeDialect};
-use sqlparser::parser::{ParserError, ParserOptions};
+use sqlparser::parser::{Parser, ParserError, ParserOptions};
use sqlparser::tokenizer::*;
use test_utils::*;
@@ -1646,6 +1646,31 @@ fn test_alter_table_clustering() {
snowflake_and_generic().verified_stmt("ALTER TABLE tbl RESUME RECLUSTER");
}
+#[test]
+fn test_alter_table_drop_multiple_columns() {
+ let sql = "ALTER TABLE tab DROP COLUMN c1, c2, c3";
+ match alter_table_op_with_name(snowflake().verified_stmt(sql), "tab") {
+ AlterTableOperation::DropColumns {
+ has_column_keyword,
+ column_names,
+ if_exists,
+ drop_behavior,
+ } => {
+ assert!(has_column_keyword);
+ assert_eq!(
+ column_names,
+ vec![Ident::new("c1"), Ident::new("c2"), Ident::new("c3")]
+ );
+ assert!(!if_exists);
+ assert_eq!(drop_behavior, None);
+ }
+ _ => unreachable!(),
+ }
+
+ snowflake().one_statement_parses_to(sql, sql);
+ assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
+}
+
#[test]
fn test_alter_iceberg_table() {
snowflake_and_generic().verified_stmt("ALTER ICEBERG TABLE tbl DROP CLUSTERING KEY");