STET

agent_patch

runs/2026-02-28__13-38-10__gpt-5-1-codex-mini/flux-pr-1241/flux-pr-1241.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/agent-logs/agent.patch

13192 bytes

Back to adjudication
diff --git a/app/pkg/astparser/options.go b/app/pkg/astparser/options.go
new file mode 100644
index 0000000..787d1ee
--- /dev/null
+++ b/app/pkg/astparser/options.go
@@ -0,0 +1,19 @@
+package astparser
+
+// ParseOptions configures parser limits and behavior.
+type ParseOptions struct {
+	// DepthLimit enforces a maximum selection-set nesting depth for operations and fragments.
+	// A value of 0 disables depth limiting.
+	DepthLimit int
+	// FieldLimit enforces a maximum count of actual fields parsed from the document.
+	// A value of 0 disables field limiting.
+	FieldLimit int
+}
+
+// ParseStats exposes metrics collected during a parse run.
+type ParseStats struct {
+	// Depth is the sum of the peak depths observed for each operation and fragment defined in the document.
+	Depth int
+	// FieldCount is the number of fields parsed across operations and fragments, excluding fragment spreads.
+	FieldCount int
+}
diff --git a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/app/pkg/astparser/parser.go b/app/pkg/astparser/parser.go
index 0847a97..19ee014 100644
--- a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/app/pkg/astparser/parser.go
+++ b/app/pkg/astparser/parser.go
@@ -20,7 +20,18 @@ import (
 // Therefore you shouldn't use this function in a hot path.
 // Instead create a parser as well as AST objects and re-use them.
 func ParseGraphqlDocumentString(input string) (ast.Document, operationreport.Report) {
-	return ParseGraphqlDocumentBytes([]byte(input))
+	doc, report, _ := ParseGraphqlDocumentStringWithOptions(input, ParseOptions{})
+	return doc, report
+}
+
+// ParseGraphqlDocumentStringWithOptions parses a document using the supplied options and returns parse statistics.
+func ParseGraphqlDocumentStringWithOptions(input string, options ParseOptions) (ast.Document, operationreport.Report, ParseStats) {
+	parser := NewParserWithOptions(options)
+	doc := *ast.NewDocument()
+	doc.Input.ResetInputBytes([]byte(input))
+	report := operationreport.Report{}
+	parser.Parse(&doc, &report)
+	return doc, report, parser.Stats()
 }
 
 // ParseGraphqlDocumentBytes takes a raw GraphQL document in byte slice format and parses it into an AST.
@@ -28,12 +39,18 @@ func ParseGraphqlDocumentString(input string) (ast.Document, operationreport.Rep
 // Therefore you shouldn't use this function in a hot path.
 // Instead create a parser as well as AST objects and re-use them.
 func ParseGraphqlDocumentBytes(input []byte) (ast.Document, operationreport.Report) {
-	parser := NewParser()
+	doc, report, _ := ParseGraphqlDocumentBytesWithOptions(input, ParseOptions{})
+	return doc, report
+}
+
+// ParseGraphqlDocumentBytesWithOptions parses a document using the supplied options and returns parse statistics.
+func ParseGraphqlDocumentBytesWithOptions(input []byte, options ParseOptions) (ast.Document, operationreport.Report, ParseStats) {
+	parser := NewParserWithOptions(options)
 	doc := *ast.NewDocument()
 	doc.Input.ResetInputBytes(input)
 	report := operationreport.Report{}
 	parser.Parse(&doc, &report)
-	return doc, report
+	return doc, report, parser.Stats()
 }
 
 // Parser takes a raw input and turns it into an AST
@@ -45,19 +62,30 @@ type Parser struct {
 	tokenizer            *Tokenizer
 	shouldIndex          bool
 	reportInternalErrors bool
+	options              ParseOptions
+	stats                ParseStats
+	depthStack           []selectionContext
+	limitExceeded        bool
 }
 
-// NewParser returns a new parser with all values properly initialized
+// NewParser returns a new parser with default options.
 func NewParser() *Parser {
+	return NewParserWithOptions(ParseOptions{})
+}
+
+// NewParserWithOptions returns a parser configured with the provided options.
+func NewParserWithOptions(options ParseOptions) *Parser {
 	return &Parser{
 		tokenizer:            NewTokenizer(),
 		shouldIndex:          true,
 		reportInternalErrors: false,
+		options:              options,
 	}
 }
 
 // PrepareImport prepares the Parser for importing new Nodes into an AST without directly parsing the content
 func (p *Parser) PrepareImport(document *ast.Document, report *operationreport.Report) {
+	p.resetState()
 	p.document = document
 	p.report = report
 	p.tokenize()
@@ -65,12 +93,24 @@ func (p *Parser) PrepareImport(document *ast.Document, report *operationreport.R
 
 // Parse parses all input in a Document.Input into the Document
 func (p *Parser) Parse(document *ast.Document, report *operationreport.Report) {
+	p.resetState()
 	p.document = document
 	p.report = report
 	p.tokenize()
 	p.parse()
 }
 
+func (p *Parser) resetState() {
+	p.stats = ParseStats{}
+	p.depthStack = p.depthStack[:0]
+	p.limitExceeded = false
+}
+
+// Stats returns the statistics collected during the last parse.
+func (p *Parser) Stats() ParseStats {
+	return p.stats
+}
+
 func (p *Parser) tokenize() {
 	p.tokenizer.Tokenize(&p.document.Input)
 }
@@ -1270,7 +1310,12 @@ func (p *Parser) parseDirectiveLocations(locations *ast.DirectiveLocations) {
 	}
 }
 
-func (p *Parser) parseSelectionSet() (int, bool) {
+func (p *Parser) parseSelectionSet(kind selectionContextKind) (int, bool) {
+
+	rootContext := kind != selectionContextNone
+	if rootContext {
+		p.depthStack = append(p.depthStack, selectionContext{kind: kind})
+	}
 
 	var set ast.SelectionSet
 
@@ -1278,6 +1323,16 @@ func (p *Parser) parseSelectionSet() (int, bool) {
 	lbraceToken := p.mustRead(keyword.LBRACE)
 	set.LBrace = lbraceToken.TextPosition
 
+	p.enterSelectionSet(lbraceToken.TextPosition)
+	defer func() {
+		p.exitSelectionSet()
+		if rootContext {
+			ctx := p.depthStack[len(p.depthStack)-1]
+			p.stats.Depth += ctx.maxDepth
+			p.depthStack = p.depthStack[:len(p.depthStack)-1]
+		}
+	}()
+
 	for {
 		switch p.peek() {
 		case keyword.RBRACE:
@@ -1307,6 +1362,85 @@ func (p *Parser) parseSelectionSet() (int, bool) {
 	}
 }
 
+type selectionContextKind int
+
+const (
+	selectionContextNone selectionContextKind = iota
+	selectionContextOperation
+	selectionContextFragmentDefinition
+)
+
+type selectionContext struct {
+	kind         selectionContextKind
+	currentDepth int
+	maxDepth     int
+}
+
+func (p *Parser) enterSelectionSet(pos position.Position) {
+	if len(p.depthStack) == 0 || p.limitExceeded {
+		return
+	}
+
+	ctx := &p.depthStack[len(p.depthStack)-1]
+	ctx.currentDepth++
+	if ctx.currentDepth > ctx.maxDepth {
+		ctx.maxDepth = ctx.currentDepth
+	}
+
+	if p.options.DepthLimit > 0 && ctx.currentDepth > p.options.DepthLimit {
+		p.handleDepthLimitExceeded(pos, ctx.currentDepth)
+	}
+}
+
+func (p *Parser) exitSelectionSet() {
+	if len(p.depthStack) == 0 {
+		return
+	}
+
+	ctx := &p.depthStack[len(p.depthStack)-1]
+	ctx.currentDepth--
+	if ctx.currentDepth < 0 {
+		ctx.currentDepth = 0
+	}
+}
+
+func (p *Parser) handleDepthLimitExceeded(pos position.Position, depth int) {
+	if p.limitExceeded {
+		return
+	}
+	p.limitExceeded = true
+	if p.options.DepthLimit <= 0 {
+		return
+	}
+
+	p.report.AddExternalError(operationreport.ExternalError{
+		Message: fmt.Sprintf("GraphQL document exceeds depth limit of %d (found %d)", p.options.DepthLimit, depth),
+		Locations: []graphqlerrors.Location{
+			{
+				Line:   pos.LineStart,
+				Column: pos.CharStart,
+			},
+		},
+	})
+}
+
+func (p *Parser) handleFieldLimitExceeded(pos position.Position) {
+	if p.limitExceeded || p.options.FieldLimit <= 0 {
+		return
+	}
+	p.limitExceeded = true
+
+	p.report.AddExternalError(operationreport.ExternalError{
+		Message: fmt.Sprintf("GraphQL document exceeds field limit of %d (found %d)", p.options.FieldLimit, p.stats.FieldCount),
+		Locations: []graphqlerrors.Location{
+			{
+				Line:   pos.LineStart,
+				Column: pos.CharStart,
+			},
+		},
+	})
+}
+
 func (p *Parser) parseSelection() int {
 	next := p.peek()
 	switch next {
@@ -1375,6 +1509,14 @@ func (p *Parser) parseField() int {
 	}
 	field.Position = firstToken.TextPosition
 
+	p.stats.FieldCount++
+	if p.options.FieldLimit > 0 && p.stats.FieldCount > p.options.FieldLimit {
+		p.handleFieldLimitExceeded(firstToken.TextPosition)
+	}
+	if p.report.HasErrors() {
+		return ast.InvalidRef
+	}
+
 	if p.peekEquals(keyword.LPAREN) {
 		field.Arguments = p.parseArgumentList()
 		field.HasArguments = len(field.Arguments.Refs) > 0
@@ -1384,7 +1526,7 @@ func (p *Parser) parseField() int {
 		field.HasDirectives = len(field.Directives.Refs) > 0
 	}
 	if p.peekEquals(keyword.LBRACE) {
-		field.SelectionSet, field.HasSelections = p.parseSelectionSet()
+		field.SelectionSet, field.HasSelections = p.parseSelectionSet(selectionContextNone)
 	}
 
 	p.document.Fields = append(p.document.Fields, field)
@@ -1418,7 +1560,7 @@ func (p *Parser) parseInlineFragment(spread position.Position) int {
 		fragment.HasDirectives = len(fragment.Directives.Refs) > 0
 	}
 	if p.peekEquals(keyword.LBRACE) {
-		fragment.SelectionSet, fragment.HasSelections = p.parseSelectionSet()
+		fragment.SelectionSet, fragment.HasSelections = p.parseSelectionSet(selectionContextNone)
 	}
 	p.document.InlineFragments = append(p.document.InlineFragments, fragment)
 	return len(p.document.InlineFragments) - 1
@@ -1454,7 +1596,7 @@ func (p *Parser) parseOperationDefinition() {
 		}
 	case keyword.LBRACE:
 		operationDefinition.OperationType = ast.OperationTypeQuery
-		operationDefinition.SelectionSet, operationDefinition.HasSelections = p.parseSelectionSet()
+		operationDefinition.SelectionSet, operationDefinition.HasSelections = p.parseSelectionSet(selectionContextOperation)
 		p.document.OperationDefinitions = append(p.document.OperationDefinitions, operationDefinition)
 		ref := len(p.document.OperationDefinitions) - 1
 		rootNode := ast.Node{
@@ -1480,7 +1622,7 @@ func (p *Parser) parseOperationDefinition() {
 		operationDefinition.HasDirectives = len(operationDefinition.Directives.Refs) > 0
 	}
 
-	operationDefinition.SelectionSet, operationDefinition.HasSelections = p.parseSelectionSet()
+	operationDefinition.SelectionSet, operationDefinition.HasSelections = p.parseSelectionSet(selectionContextOperation)
 
 	p.document.OperationDefinitions = append(p.document.OperationDefinitions, operationDefinition)
 	ref := len(p.document.OperationDefinitions) - 1
@@ -1560,7 +1702,7 @@ func (p *Parser) parseFragmentDefinition() {
 		fragmentDefinition.Directives = p.parseDirectiveList()
 		fragmentDefinition.HasDirectives = len(fragmentDefinition.Directives.Refs) > 0
 	}
-	fragmentDefinition.SelectionSet, fragmentDefinition.HasSelections = p.parseSelectionSet()
+	fragmentDefinition.SelectionSet, fragmentDefinition.HasSelections = p.parseSelectionSet(selectionContextFragmentDefinition)
 	p.document.FragmentDefinitions = append(p.document.FragmentDefinitions, fragmentDefinition)
 
 	ref := len(p.document.FragmentDefinitions) - 1
diff --git a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/app/pkg/astparser/parser_test.go b/app/pkg/astparser/parser_test.go
index bd17e57..8ca2172 100644
--- a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/app/pkg/astparser/parser_test.go
+++ b/app/pkg/astparser/parser_test.go
@@ -51,7 +51,7 @@ func TestParser_Parse(t *testing.T) {
 			report := operationreport.Report{}
 			parser.report = &report
 			parser.tokenize()
-			set, _ := parser.parseSelectionSet()
+			set, _ := parser.parseSelectionSet(selectionContextNone)
 			return set, report
 		}
 	}
@@ -2348,6 +2348,64 @@ func BenchmarkParseGithub(b *testing.B) {
 	}
 }
 
+func TestParserStatsAndLimits(t *testing.T) {
+	t.Run("stats depth and fields", func(t *testing.T) {
+		const document = `query StatsQuery {
+  top {
+    nested {
+      value
+    }
+  }
+  simple
+}
+fragment SampleFragment on Type {
+  inner
+  deep {
+    deepField
+  }
+}`
+
+		_, report, stats := ParseGraphqlDocumentStringWithOptions(document, ParseOptions{})
+		assert.False(t, report.HasErrors())
+		assert.Equal(t, 5, stats.Depth)
+		assert.Equal(t, 7, stats.FieldCount)
+	})
+
+	t.Run("depth limit error", func(t *testing.T) {
+		const document = `query {
+  parent {
+    child
+  }
+}`
+
+		_, report, stats := ParseGraphqlDocumentStringWithOptions(document, ParseOptions{DepthLimit: 1})
+		assert.True(t, report.HasErrors())
+		assert.Len(t, report.ExternalErrors, 1)
+		depthError := report.ExternalErrors[0]
+		assert.Equal(t, "GraphQL document exceeds depth limit of 1 (found 2)", depthError.Message)
+		assert.Len(t, depthError.Locations, 1)
+		assert.Equal(t, uint32(2), depthError.Locations[0].Line)
+		assert.Equal(t, 2, stats.Depth)
+	})
+
+	t.Run("field limit error", func(t *testing.T) {
+		const document = `query {
+  foo
+  bar
+  baz
+}`
+
+		_, report, stats := ParseGraphqlDocumentStringWithOptions(document, ParseOptions{FieldLimit: 2})
+		assert.True(t, report.HasErrors())
+		assert.Len(t, report.ExternalErrors, 1)
+		fieldError := report.ExternalErrors[0]
+		assert.Equal(t, "GraphQL document exceeds field limit of 2 (found 3)", fieldError.Message)
+		assert.Len(t, fieldError.Locations, 1)
+		assert.Equal(t, uint32(4), fieldError.Locations[0].Line)
+		assert.Equal(t, 3, stats.FieldCount)
+	})
+}
+
 func BenchmarkSelectionSet(b *testing.B) {
 
 	doc := ast.NewDocument()