agent_patch
runs/2026-02-28__18-27-22__gpt-5-3-codex/flux-pr-1576/flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/agent-logs/agent.patch
8064 bytes
diff --git a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/mod.rs b/app/src/dialect/mod.rs
index 8cce6a3..8c3c889 100644
--- a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/mod.rs
+++ b/app/src/dialect/mod.rs
@@ -138,6 +138,26 @@ pub trait Dialect: Debug + Any {
true
}
+ /// Determine if a character starts a nested quoted identifier.
+ ///
+ /// Nested quoted identifiers are delimited by one quote style and may
+ /// optionally contain a second, inner quote style (for example Redshift's
+ /// `["foo"]` form).
+ fn is_nested_delimited_identifier_start(&self, _ch: char) -> bool {
+ false
+ }
+
+ /// Determine if nested quoted characters are proper for identifier.
+ ///
+ /// This is used by dialects where the same token opener can also represent
+ /// array or JSON access syntax.
+ fn is_proper_identifier_inside_nested_quotes(
+ &self,
+ mut _chars: Peekable<Chars<'_>>,
+ ) -> bool {
+ false
+ }
+
/// Determine if a character is a valid start character for an unquoted identifier
fn is_identifier_start(&self, ch: char) -> bool;
diff --git a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/redshift.rs b/app/src/dialect/redshift.rs
index 48eb00a..4f8142f 100644
--- a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/dialect/redshift.rs
+++ b/app/src/dialect/redshift.rs
@@ -33,20 +33,54 @@ pub struct RedshiftSqlDialect {}
// be a json path
impl Dialect for RedshiftSqlDialect {
fn is_delimited_identifier_start(&self, ch: char) -> bool {
- ch == '"' || ch == '['
+ ch == '"'
}
- /// Determine if quoted characters are proper for identifier
- /// It's needed to distinguish treating square brackets as quotes from
- /// treating them as json path. If there is identifier then we assume
- /// there is no json path.
- fn is_proper_identifier_inside_quotes(&self, mut chars: Peekable<Chars<'_>>) -> bool {
+ fn is_nested_delimited_identifier_start(&self, ch: char) -> bool {
+ ch == '['
+ }
+
+ /// Determine if nested bracket-delimited characters are proper for identifier.
+ /// It is needed to distinguish treating square brackets as identifiers from
+ /// treating them as JSON path.
+ fn is_proper_identifier_inside_nested_quotes(&self, mut chars: Peekable<Chars<'_>>) -> bool {
chars.next();
- let mut not_white_chars = chars.skip_while(|ch| ch.is_whitespace()).peekable();
- if let Some(&ch) = not_white_chars.peek() {
- return self.is_identifier_start(ch);
+ match chars.peek().copied() {
+ Some('"') => {
+ chars.next();
+ let mut quote_closed = false;
+ while let Some(ch) = chars.next() {
+ if ch == '"' {
+ if chars.peek() == Some(&'"') {
+ chars.next();
+ } else {
+ quote_closed = true;
+ break;
+ }
+ }
+ }
+
+ if !quote_closed {
+ return false;
+ }
+
+ while let Some(ch) = chars.peek() {
+ if !ch.is_whitespace() {
+ break;
+ }
+ chars.next();
+ }
+
+ chars.next() == Some(']')
+ }
+ _ => {
+ let mut non_whitespace_chars = chars.skip_while(|ch| ch.is_whitespace()).peekable();
+ match non_whitespace_chars.peek().copied() {
+ Some(ch) => self.is_identifier_start(ch),
+ None => false,
+ }
+ }
}
- false
}
fn is_identifier_start(&self, ch: char) -> bool {
diff --git a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/tokenizer.rs b/app/src/tokenizer.rs
index aacfc16..0ba044e 100644
--- a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/src/tokenizer.rs
+++ b/app/src/tokenizer.rs
@@ -1095,6 +1095,15 @@ impl<'a> Tokenizer<'a> {
)
}
}
+ // nested delimited identifier (for example: [foo], ["foo"])
+ quote_start
+ if self.dialect.is_nested_delimited_identifier_start(ch)
+ && self
+ .dialect
+ .is_proper_identifier_inside_nested_quotes(chars.peekable.clone()) =>
+ {
+ self.tokenize_nested_delimited_identifier(chars, quote_start)
+ }
// numbers and period
'0'..='9' | '.' => {
let mut s = peeking_take_while(chars, |ch| ch.is_ascii_digit());
@@ -1843,6 +1852,48 @@ impl<'a> Tokenizer<'a> {
(s, last_char)
}
+ fn tokenize_nested_delimited_identifier(
+ &self,
+ chars: &mut State,
+ quote_start: char,
+ ) -> Result<Option<Token>, TokenizerError> {
+ let error_loc = chars.location();
+ chars.next(); // consume the opening quote
+ let quote_end = Word::matching_end_quote(quote_start);
+
+ if chars.peek() == Some(&'"') {
+ chars.next(); // consume the inner opening quote
+ let (s, last_char) = self.parse_quoted_ident(chars, '"');
+ if last_char != Some('"') {
+ return self.tokenizer_error(
+ error_loc,
+ "Expected close delimiter '\"' before EOF.",
+ );
+ }
+
+ while let Some(ch) = chars.peek() {
+ if !ch.is_whitespace() {
+ break;
+ }
+ chars.next();
+ }
+
+ if chars.next() == Some(quote_end) {
+ return Ok(Some(Token::make_word(&s, Some(quote_start))));
+ }
+ } else {
+ let (s, last_char) = self.parse_quoted_ident(chars, quote_end);
+ if last_char == Some(quote_end) {
+ return Ok(Some(Token::make_word(&s, Some(quote_start))));
+ }
+ }
+
+ self.tokenizer_error(
+ error_loc,
+ format!("Expected close delimiter '{quote_end}' before EOF."),
+ )
+ }
+
#[allow(clippy::unnecessary_wraps)]
fn consume_and_return(
&self,
diff --git a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/tests/sqlparser_redshift.rs b/app/tests/sqlparser_redshift.rs
index 9492946..803c3ad 100644
--- a/tmp/agent-patch-flux-pr-1576.1-of-1.2026-02-28__18-27-22__gpt-5-3-codex/app/tests/sqlparser_redshift.rs
+++ b/app/tests/sqlparser_redshift.rs
@@ -262,6 +262,32 @@ fn test_redshift_json_path() {
);
}
+#[test]
+fn test_nested_bracket_quoted_identifiers() {
+ let select = redshift().verified_only_select(r#"SELECT ["0"], ["foo"] FROM t"#);
+ assert_eq!(
+ expr_from_projection(&select.projection[0]),
+ &Expr::Identifier(Ident::with_quote('[', "0"))
+ );
+ assert_eq!(
+ expr_from_projection(&select.projection[1]),
+ &Expr::Identifier(Ident::with_quote('[', "foo"))
+ );
+
+ let select = redshift().verified_only_select("SELECT col[0] FROM t");
+ assert_eq!(
+ expr_from_projection(only(&select.projection)),
+ &Expr::JsonAccess {
+ value: Box::new(Expr::Identifier(Ident::new("col"))),
+ path: JsonPath {
+ path: vec![JsonPathElem::Bracket {
+ key: Expr::Value(Value::Number("0".parse().unwrap(), false))
+ }]
+ }
+ }
+ );
+}
+
#[test]
fn test_parse_json_path_from() {
let dialects = all_dialects_where(|d| d.supports_partiql());