agent_patch
runs/2026-02-21__03-36-16__gpt-5-1-codex-mini/flux-pr-1241/flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/agent-logs/agent.patch
12476 bytes
diff --git a/app/pkg/astparser/complexity.go b/app/pkg/astparser/complexity.go
new file mode 100644
index 0000000..61ce568
--- /dev/null
+++ b/app/pkg/astparser/complexity.go
@@ -0,0 +1,36 @@
+package astparser
+
+import "github.com/wundergraph/graphql-go-tools/pkg/lexer/position"
+
+// ParserComplexityOptions configures the optional complexity tracking that can
+// be enabled on the parser.
+type ParserComplexityOptions struct {
+ DepthLimit int
+ FieldLimit int
+ TrackStats bool
+}
+
+func (o ParserComplexityOptions) enabled() bool {
+ return o.TrackStats || o.DepthLimit > 0 || o.FieldLimit > 0
+}
+
+// DocumentComplexityStats exposes aggregated depth and field counts after a parse.
+type DocumentComplexityStats struct {
+ Depth int
+ FieldCount int
+}
+
+type parserComplexityState struct {
+ stats DocumentComplexityStats
+ currentDepth int
+ currentDefinitionMaxDepth int
+ lastSelectionSetPosition position.Position
+ hasSelectionSetPosition bool
+ depthLimitReached bool
+ fieldLimitReached bool
+ inDefinition bool
+}
+
+func (s *parserComplexityState) reset() {
+ *s = parserComplexityState{}
+}
diff --git a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/pkg/astparser/parser.go b/app/pkg/astparser/parser.go
index 0847a97..eaa9d22 100644
--- a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/pkg/astparser/parser.go
+++ b/app/pkg/astparser/parser.go
@@ -36,6 +36,32 @@ func ParseGraphqlDocumentBytes(input []byte) (ast.Document, operationreport.Repo
return doc, report
}
+// ParseGraphqlDocumentStringWithComplexity parses the document while optionally enforcing
+// configured limits and collecting complexity statistics.
+func ParseGraphqlDocumentStringWithComplexity(input string, options ParserComplexityOptions) (ast.Document, operationreport.Report, DocumentComplexityStats) {
+ parser := NewParser()
+ options.TrackStats = true
+ parser.ConfigureComplexity(options)
+ doc := *ast.NewDocument()
+ doc.Input.ResetInputString(input)
+ report := operationreport.Report{}
+ parser.Parse(&doc, &report)
+ return doc, report, parser.ComplexityStats()
+}
+
+// ParseGraphqlDocumentBytesWithComplexity parses the document while optionally enforcing
+// configured limits and collecting complexity statistics.
+func ParseGraphqlDocumentBytesWithComplexity(input []byte, options ParserComplexityOptions) (ast.Document, operationreport.Report, DocumentComplexityStats) {
+ parser := NewParser()
+ options.TrackStats = true
+ parser.ConfigureComplexity(options)
+ doc := *ast.NewDocument()
+ doc.Input.ResetInputBytes(input)
+ report := operationreport.Report{}
+ parser.Parse(&doc, &report)
+ return doc, report, parser.ComplexityStats()
+}
+
// Parser takes a raw input and turns it into an AST
// use NewParser() to create a parser
// Don't create new parsers in the hot path, re-use them.
@@ -45,6 +71,8 @@ type Parser struct {
tokenizer *Tokenizer
shouldIndex bool
reportInternalErrors bool
+ complexityOptions ParserComplexityOptions
+ complexityState parserComplexityState
}
// NewParser returns a new parser with all values properly initialized
@@ -56,10 +84,30 @@ func NewParser() *Parser {
}
}
+// ConfigureComplexity enables optional depth and field limits and/or statistics.
+func (p *Parser) ConfigureComplexity(options ParserComplexityOptions) {
+ p.complexityOptions = options
+ p.resetComplexityState()
+}
+
+// ComplexityStats returns the aggregated statistics calculated during the last parse.
+func (p *Parser) ComplexityStats() DocumentComplexityStats {
+ return p.complexityState.stats
+}
+
+func (p *Parser) resetComplexityState() {
+ p.complexityState.reset()
+}
+
+func (p *Parser) complexityEnabled() bool {
+ return p.complexityOptions.enabled()
+}
+
// PrepareImport prepares the Parser for importing new Nodes into an AST without directly parsing the content
func (p *Parser) PrepareImport(document *ast.Document, report *operationreport.Report) {
p.document = document
p.report = report
+ p.resetComplexityState()
p.tokenize()
}
@@ -67,6 +115,7 @@ func (p *Parser) PrepareImport(document *ast.Document, report *operationreport.R
func (p *Parser) Parse(document *ast.Document, report *operationreport.Report) {
p.document = document
p.report = report
+ p.resetComplexityState()
p.tokenize()
p.parse()
}
@@ -1277,6 +1326,8 @@ func (p *Parser) parseSelectionSet() (int, bool) {
set.SelectionRefs = p.document.Refs[p.document.NextRefIndex()][:0]
lbraceToken := p.mustRead(keyword.LBRACE)
set.LBrace = lbraceToken.TextPosition
+ p.beginSelectionSet(lbraceToken.TextPosition)
+ defer p.endSelectionSet()
for {
switch p.peek() {
@@ -1374,6 +1425,7 @@ func (p *Parser) parseField() int {
field.Name = firstToken.Literal
}
field.Position = firstToken.TextPosition
+ p.trackField(firstToken.TextPosition)
if p.peekEquals(keyword.LPAREN) {
field.Arguments = p.parseArgumentList()
@@ -1433,6 +1485,10 @@ func (p *Parser) parseTypeCondition() (typeCondition ast.TypeCondition) {
func (p *Parser) parseOperationDefinition() {
var operationDefinition ast.OperationDefinition
+ p.beginComplexityDefinition()
+ defer func() {
+ p.endComplexityDefinition(operationDefinition.HasSelections)
+ }()
next, literal := p.peekLiteral()
switch next {
@@ -1553,6 +1609,10 @@ func (p *Parser) parseDefaultValue() ast.DefaultValue {
func (p *Parser) parseFragmentDefinition() {
var fragmentDefinition ast.FragmentDefinition
+ p.beginComplexityDefinition()
+ defer func() {
+ p.endComplexityDefinition(fragmentDefinition.HasSelections)
+ }()
fragmentDefinition.FragmentLiteral = p.mustReadIdentKey(identkeyword.FRAGMENT).TextPosition
fragmentDefinition.Name = p.mustRead(keyword.IDENT).Literal
fragmentDefinition.TypeCondition = p.parseTypeCondition()
@@ -1570,6 +1630,99 @@ func (p *Parser) parseFragmentDefinition() {
})
}
+func (p *Parser) beginComplexityDefinition() {
+ if !p.complexityEnabled() {
+ return
+ }
+ p.complexityState.inDefinition = true
+ p.complexityState.currentDepth = 0
+ p.complexityState.currentDefinitionMaxDepth = 0
+ p.complexityState.hasSelectionSetPosition = false
+}
+
+func (p *Parser) endComplexityDefinition(hasSelections bool) {
+ if !p.complexityEnabled() || !p.complexityState.inDefinition {
+ return
+ }
+ if hasSelections {
+ p.complexityState.stats.Depth += p.complexityState.currentDefinitionMaxDepth
+ p.checkDepthLimitAfterDefinition()
+ }
+ p.complexityState.currentDepth = 0
+ p.complexityState.currentDefinitionMaxDepth = 0
+ p.complexityState.inDefinition = false
+ p.complexityState.hasSelectionSetPosition = false
+}
+
+func (p *Parser) beginSelectionSet(pos position.Position) {
+ if !p.complexityEnabled() || !p.complexityState.inDefinition {
+ return
+ }
+ p.complexityState.currentDepth++
+ if p.complexityState.currentDepth > p.complexityState.currentDefinitionMaxDepth {
+ p.complexityState.currentDefinitionMaxDepth = p.complexityState.currentDepth
+ }
+ p.complexityState.lastSelectionSetPosition = pos
+ p.complexityState.hasSelectionSetPosition = true
+ if p.complexityState.depthLimitReached || p.complexityOptions.DepthLimit <= 0 {
+ return
+ }
+ projected := p.complexityState.stats.Depth + p.complexityState.currentDefinitionMaxDepth
+ if projected > p.complexityOptions.DepthLimit {
+ p.complexityState.depthLimitReached = true
+ p.reportDepthLimitExceeded(pos, projected)
+ }
+}
+
+func (p *Parser) endSelectionSet() {
+ if !p.complexityEnabled() || !p.complexityState.inDefinition {
+ return
+ }
+ p.complexityState.currentDepth--
+}
+
+func (p *Parser) trackField(pos position.Position) {
+ if !p.complexityEnabled() {
+ return
+ }
+ p.complexityState.stats.FieldCount++
+ if p.complexityState.fieldLimitReached || p.complexityOptions.FieldLimit <= 0 {
+ return
+ }
+ if p.complexityState.stats.FieldCount > p.complexityOptions.FieldLimit {
+ p.complexityState.fieldLimitReached = true
+ p.reportFieldLimitExceeded(pos, p.complexityState.stats.FieldCount)
+ }
+}
+
+func (p *Parser) checkDepthLimitAfterDefinition() {
+ if p.complexityState.depthLimitReached || p.complexityOptions.DepthLimit <= 0 {
+ return
+ }
+ if p.complexityState.stats.Depth > p.complexityOptions.DepthLimit {
+ p.complexityState.depthLimitReached = true
+ if p.complexityState.hasSelectionSetPosition {
+ p.reportDepthLimitExceeded(p.complexityState.lastSelectionSetPosition, p.complexityState.stats.Depth)
+ } else {
+ p.reportDepthLimitExceeded(position.Position{}, p.complexityState.stats.Depth)
+ }
+ }
+}
+
+func (p *Parser) reportDepthLimitExceeded(pos position.Position, actual int) {
+ if p.report == nil {
+ return
+ }
+ p.report.AddExternalError(operationreport.ErrDocumentDepthLimitExceeded(p.complexityOptions.DepthLimit, actual, pos))
+}
+
+func (p *Parser) reportFieldLimitExceeded(pos position.Position, actual int) {
+ if p.report == nil {
+ return
+ }
+ p.report.AddExternalError(operationreport.ErrDocumentFieldLimitExceeded(p.complexityOptions.FieldLimit, actual, pos))
+}
+
func (p *Parser) parseExtension() {
extend := p.mustReadIdentKey(identkeyword.EXTEND).TextPosition
next, literal := p.peekLiteral()
diff --git a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/pkg/astparser/parser_test.go b/app/pkg/astparser/parser_test.go
index bd17e57..ba516de 100644
--- a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/pkg/astparser/parser_test.go
+++ b/app/pkg/astparser/parser_test.go
@@ -2427,6 +2427,51 @@ func BenchmarkParse(b *testing.B) {
}
}
+func TestParserComplexityTracking(t *testing.T) {
+ const complexityDocument = `
+query Foo {
+ a {
+ b {
+ c
+ }
+ }
+}
+
+query Bar {
+ d
+}
+
+fragment Frag on Foo {
+ e {
+ f
+ }
+}
+`
+
+ t.Run("statistics", func(t *testing.T) {
+ _, report, stats := ParseGraphqlDocumentStringWithComplexity(complexityDocument, ParserComplexityOptions{TrackStats: true})
+ assert.False(t, report.HasErrors())
+ assert.Equal(t, 6, stats.Depth)
+ assert.Equal(t, 6, stats.FieldCount)
+ })
+
+ t.Run("field limit", func(t *testing.T) {
+ _, report, stats := ParseGraphqlDocumentStringWithComplexity(complexityDocument, ParserComplexityOptions{FieldLimit: 5})
+ assert.True(t, report.HasErrors())
+ assert.NotEmpty(t, report.ExternalErrors)
+ assert.Contains(t, report.ExternalErrors[0].Message, "field limit")
+ assert.Greater(t, stats.FieldCount, 0)
+ })
+
+ t.Run("depth limit", func(t *testing.T) {
+ _, report, stats := ParseGraphqlDocumentStringWithComplexity(complexityDocument, ParserComplexityOptions{DepthLimit: 5})
+ assert.True(t, report.HasErrors())
+ assert.NotEmpty(t, report.ExternalErrors)
+ assert.Contains(t, report.ExternalErrors[0].Message, "depth limit")
+ assert.Greater(t, stats.Depth, 0)
+ })
+}
+
var selectionSet = []byte(`{
me {
... on Person @foo {
diff --git a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/pkg/operationreport/externalerror.go b/app/pkg/operationreport/externalerror.go
index 6b0df5e..cae42b8 100644
--- a/tmp/agent-patch-flux-pr-1241.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/pkg/operationreport/externalerror.go
+++ b/app/pkg/operationreport/externalerror.go
@@ -210,6 +210,26 @@ func ErrDuplicatedFieldInputObject(fieldName ast.ByteSlice, first, duplicated po
return err
}
+func ErrDocumentDepthLimitExceeded(limit, actual int, position position.Position) ExternalError {
+ err := ExternalError{
+ Message: fmt.Sprintf("document depth limit exceeded - limit: %d, actual: %d", limit, actual),
+ }
+ if position.LineStart != 0 || position.CharStart != 0 {
+ err.Locations = LocationsFromPosition(position)
+ }
+ return err
+}
+
+func ErrDocumentFieldLimitExceeded(limit, actual int, position position.Position) ExternalError {
+ err := ExternalError{
+ Message: fmt.Sprintf("document field limit exceeded - limit: %d, actual: %d", limit, actual),
+ }
+ if position.LineStart != 0 || position.CharStart != 0 {
+ err.Locations = LocationsFromPosition(position)
+ }
+ return err
+}
+
func ErrArgumentNotDefinedOnField(argName, typeName, fieldName ast.ByteSlice, position position.Position) (err ExternalError) {
err.Message = fmt.Sprintf(UnknownArgumentOnFieldErrMsg, argName, typeName, fieldName)
err.Locations = LocationsFromPosition(position)