agent_patch
runs/2026-02-28__13-38-10__gpt-5-1-codex-mini/flux-pr-828/flux-pr-828.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/agent-logs/agent.patch
13806 bytes
diff --git a/app/pkg/astminifier/minifier.go b/app/pkg/astminifier/minifier.go
new file mode 100644
index 0000000..2490159
--- /dev/null
+++ b/app/pkg/astminifier/minifier.go
@@ -0,0 +1,298 @@
+package astminifier
+
+import (
+ "bytes"
+ "fmt"
+
+ "github.com/wundergraph/graphql-go-tools/pkg/ast"
+)
+
+const fragmentPrefix = "__wf_astminifier_fragment_"
+
+type selectionSetOccurrence struct {
+ selectionSetRef int
+ typeName string
+}
+
+type minifier struct {
+ operation *ast.Document
+ definition *ast.Document
+ occurrences map[string][]selectionSetOccurrence
+ signatureCache map[int]string
+ fragmentCount int
+ visitCounts map[int]int
+}
+
+// Minify walks through the operation document and replaces duplicated selection sets
+// with fragment spreads if the schema definition allows producing fragments for the
+// underlying type. Minify returns true if any replacements were applied.
+func Minify(operation, definition *ast.Document) bool {
+ if operation == nil || definition == nil {
+ return false
+ }
+
+ m := &minifier{
+ operation: operation,
+ definition: definition,
+ occurrences: make(map[string][]selectionSetOccurrence),
+ signatureCache: make(map[int]string),
+ visitCounts: make(map[int]int),
+ }
+
+ m.collect()
+ return m.applyFragments()
+}
+
+func (m *minifier) collect() {
+ for _, node := range m.operation.RootNodes {
+ if node.Kind != ast.NodeKindOperationDefinition {
+ continue
+ }
+ op := m.operation.OperationDefinitions[node.Ref]
+ typeName := m.rootTypeName(op.OperationType)
+ if typeName == "" || op.SelectionSet == ast.InvalidRef {
+ continue
+ }
+ m.traverseSelectionSet(op.SelectionSet, typeName)
+ }
+
+ for i := range m.operation.FragmentDefinitions {
+ fragment := m.operation.FragmentDefinitions[i]
+ typeName := m.definition.ResolveTypeNameString(fragment.TypeCondition.Type)
+ if typeName == "" || fragment.SelectionSet == ast.InvalidRef {
+ continue
+ }
+ m.traverseSelectionSet(fragment.SelectionSet, typeName)
+ }
+}
+
+func (m *minifier) rootTypeName(operationType ast.OperationType) string {
+ switch operationType {
+ case ast.OperationTypeQuery:
+ return string(m.definition.Index.QueryTypeName)
+ case ast.OperationTypeMutation:
+ return string(m.definition.Index.MutationTypeName)
+ case ast.OperationTypeSubscription:
+ return string(m.definition.Index.SubscriptionTypeName)
+ default:
+ return ""
+ }
+}
+
+func (m *minifier) traverseSelectionSet(selectionSetRef int, typeName string) {
+ if selectionSetRef == ast.InvalidRef || typeName == "" {
+ return
+ }
+
+ m.visitCounts[selectionSetRef]++
+
+ selectionSet := m.operation.SelectionSets[selectionSetRef]
+ if len(selectionSet.SelectionRefs) == 0 {
+ return
+ }
+
+ key := m.keyForSelectionSet(selectionSetRef, typeName)
+ m.occurrences[key] = append(m.occurrences[key], selectionSetOccurrence{selectionSetRef: selectionSetRef, typeName: typeName})
+
+ for _, selectionRef := range selectionSet.SelectionRefs {
+ selection := m.operation.Selections[selectionRef]
+ switch selection.Kind {
+ case ast.SelectionKindField:
+ fieldRef := selection.Ref
+ field := m.operation.Fields[fieldRef]
+ if !field.HasSelections {
+ continue
+ }
+ fieldName := m.operation.FieldNameBytes(fieldRef)
+ childType, ok := m.fieldReturnType(typeName, fieldName)
+ if !ok || childType == "" {
+ continue
+ }
+ m.traverseSelectionSet(field.SelectionSet, childType)
+
+ case ast.SelectionKindInlineFragment:
+ inline := m.operation.InlineFragments[selection.Ref]
+ inlineType := m.definition.ResolveTypeNameString(inline.TypeCondition.Type)
+ if inlineType == "" {
+ inlineType = typeName
+ }
+ if inline.HasSelections {
+ m.traverseSelectionSet(inline.SelectionSet, inlineType)
+ }
+ }
+ }
+}
+
+func (m *minifier) fieldReturnType(parentType string, fieldName ast.ByteSlice) (string, bool) {
+ node, ok := m.definition.Index.FirstNodeByNameStr(parentType)
+ if !ok {
+ return "", false
+ }
+ fieldDefRef, exists := m.definition.NodeFieldDefinitionByName(node, fieldName)
+ if !exists {
+ return "", false
+ }
+ return m.definition.ResolveTypeNameString(m.definition.FieldDefinitionType(fieldDefRef)), true
+}
+
+func (m *minifier) keyForSelectionSet(selectionSetRef int, typeName string) string {
+ signature := m.selectionSignature(selectionSetRef)
+ return typeName + "\x00" + signature
+}
+
+func (m *minifier) selectionSignature(selectionSetRef int) string {
+ if sig, ok := m.signatureCache[selectionSetRef]; ok {
+ return sig
+ }
+ var buf bytes.Buffer
+ buf.WriteByte('[')
+ selections := m.operation.SelectionSets[selectionSetRef].SelectionRefs
+ for i, selectionRef := range selections {
+ if i > 0 {
+ buf.WriteByte(',')
+ }
+ m.appendSelectionSignature(&buf, selectionRef)
+ }
+ buf.WriteByte(']')
+ sig := buf.String()
+ m.signatureCache[selectionSetRef] = sig
+ return sig
+}
+
+func (m *minifier) appendSelectionSignature(buf *bytes.Buffer, selectionRef int) {
+ selection := m.operation.Selections[selectionRef]
+ switch selection.Kind {
+ case ast.SelectionKindField:
+ fieldRef := selection.Ref
+ buf.WriteByte('F')
+ if m.operation.FieldAliasIsDefined(fieldRef) {
+ buf.WriteByte('[')
+ buf.Write(m.operation.FieldAliasBytes(fieldRef))
+ buf.WriteByte(']')
+ }
+ buf.Write(m.operation.FieldNameBytes(fieldRef))
+ field := m.operation.Fields[fieldRef]
+ if len(field.Arguments.Refs) > 0 {
+ buf.WriteByte('(')
+ m.operation.PrintArguments(field.Arguments.Refs, buf)
+ buf.WriteByte(')')
+ }
+ if field.HasDirectives {
+ m.appendDirectives(buf, field.Directives.Refs)
+ }
+ if field.HasSelections {
+ buf.WriteString(m.selectionSignature(field.SelectionSet))
+ }
+
+ case ast.SelectionKindInlineFragment:
+ inline := m.operation.InlineFragments[selection.Ref]
+ buf.WriteByte('I')
+ typeCond := m.definition.ResolveTypeNameString(inline.TypeCondition.Type)
+ if typeCond != "" {
+ buf.WriteByte('(')
+ buf.WriteString(typeCond)
+ buf.WriteByte(')')
+ }
+ if inline.HasDirectives {
+ m.appendDirectives(buf, inline.Directives.Refs)
+ }
+ if inline.HasSelections {
+ buf.WriteString(m.selectionSignature(inline.SelectionSet))
+ }
+
+ case ast.SelectionKindFragmentSpread:
+ spread := m.operation.FragmentSpreads[selection.Ref]
+ buf.WriteByte('S')
+ buf.Write(m.operation.FragmentSpreadNameBytes(selection.Ref))
+ if spread.HasDirectives {
+ m.appendDirectives(buf, spread.Directives.Refs)
+ }
+ }
+}
+
+func (m *minifier) appendDirectives(buf *bytes.Buffer, refs []int) {
+ for _, directiveRef := range refs {
+ buf.WriteByte('@')
+ buf.Write(m.operation.DirectiveNameBytes(directiveRef))
+ if m.operation.Directives[directiveRef].HasArguments {
+ buf.WriteByte('(')
+ m.operation.PrintArguments(m.operation.Directives[directiveRef].Arguments.Refs, buf)
+ buf.WriteByte(')')
+ }
+ }
+}
+
+func (m *minifier) applyFragments() bool {
+ applied := false
+ for key, occs := range m.occurrences {
+ if len(occs) < 2 {
+ continue
+ }
+ // All occurrences share the same type name by construction.
+ typeName := occs[0].typeName
+ if typeName == "" {
+ continue
+ }
+ selectionSetRef := occs[0].selectionSetRef
+ if selectionSetRef == ast.InvalidRef {
+ continue
+ }
+ visits := m.visitCounts[selectionSetRef]
+ if len(occs) < 2 && visits < 2 {
+ continue
+ }
+ if m.buildFragment(typeName, occs) {
+ applied = true
+ }
+ delete(m.occurrences, key)
+ }
+ return applied
+}
+
+func (m *minifier) buildFragment(typeName string, occs []selectionSetOccurrence) bool {
+ selectionSetRef := occs[0].selectionSetRef
+ selectionCopy := m.operation.CopySelectionSet(selectionSetRef)
+
+ fragmentName := m.uniqueFragmentName()
+ fragmentNameRef := m.operation.Input.AppendInputString(fragmentName)
+ typeRef := m.operation.AddNamedType([]byte(typeName))
+
+ fragment := ast.FragmentDefinition{
+ Name: fragmentNameRef,
+ TypeCondition: ast.TypeCondition{
+ Type: typeRef,
+ },
+ SelectionSet: selectionCopy,
+ HasSelections: true,
+ }
+
+ m.operation.FragmentDefinitions = append(m.operation.FragmentDefinitions, fragment)
+ fragmentIndex := len(m.operation.FragmentDefinitions) - 1
+ node := ast.Node{Kind: ast.NodeKindFragmentDefinition, Ref: fragmentIndex}
+ m.operation.RootNodes = append(m.operation.RootNodes, node)
+ m.operation.Index.AddNodeBytes(m.operation.Input.ByteSlice(fragmentNameRef), node)
+
+ spread := ast.FragmentSpread{FragmentName: fragmentNameRef}
+ spreadRef := m.operation.AddFragmentSpread(spread)
+ selection := ast.Selection{
+ Kind: ast.SelectionKindFragmentSpread,
+ Ref: spreadRef,
+ }
+
+ for _, occ := range occs {
+ m.operation.SelectionSets[occ.selectionSetRef].SelectionRefs = m.operation.SelectionSets[occ.selectionSetRef].SelectionRefs[:0]
+ m.operation.AddSelection(occ.selectionSetRef, selection)
+ }
+
+ return true
+}
+
+func (m *minifier) uniqueFragmentName() string {
+ for {
+ name := fmt.Sprintf("%s%d", fragmentPrefix, m.fragmentCount)
+ m.fragmentCount++
+ if _, exists := m.operation.FragmentDefinitionRef([]byte(name)); !exists {
+ return name
+ }
+ }
+}
diff --git a/app/pkg/astminifier/minifier_test.go b/app/pkg/astminifier/minifier_test.go
new file mode 100644
index 0000000..e0de974
--- /dev/null
+++ b/app/pkg/astminifier/minifier_test.go
@@ -0,0 +1,120 @@
+package astminifier
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+
+ "github.com/wundergraph/graphql-go-tools/pkg/ast"
+ "github.com/wundergraph/graphql-go-tools/pkg/astnormalization"
+ "github.com/wundergraph/graphql-go-tools/pkg/astparser"
+ "github.com/wundergraph/graphql-go-tools/pkg/astprinter"
+ "github.com/wundergraph/graphql-go-tools/pkg/asttransform"
+ "github.com/wundergraph/graphql-go-tools/pkg/operationreport"
+)
+
+const (
+ schemaSDL = `
+schema {
+ query: Query
+}
+
+type Query {
+ pets: [Animal]
+}
+
+interface Animal {
+ id: ID!
+ name: String!
+}
+
+type Dog implements Animal {
+ id: ID!
+ name: String!
+ bark: String!
+}
+
+type Cat implements Animal {
+ id: ID!
+ name: String!
+ meow: String!
+}`
+)
+
+func parseDocument(t *testing.T, source string) *ast.Document {
+ t.Helper()
+ doc := ast.NewDocument()
+ report := &operationreport.Report{}
+ parser := astparser.NewParser()
+ doc.Input.ResetInputString(source)
+ parser.Parse(doc, report)
+ if report.HasErrors() {
+ t.Fatalf("parse errors: %v", report.Error())
+ }
+ return doc
+}
+
+func TestMinifyCreatesFragmentFromDuplicateInlineFragments(t *testing.T) {
+ definition := parseDocument(t, schemaSDL)
+ operation := parseDocument(t, `
+query {
+ pets {
+ ... on Dog {
+ id
+ name
+ }
+ ... on Cat {
+ id
+ name
+ }
+ }
+}
+`)
+
+ if err := asttransform.MergeDefinitionWithBaseSchema(definition); err != nil {
+ t.Fatalf("merge base schema failed: %v", err)
+ }
+
+ report := &operationreport.Report{}
+ normalizer := astnormalization.NewWithOpts(
+ astnormalization.WithExtractVariables(),
+ astnormalization.WithRemoveFragmentDefinitions(),
+ astnormalization.WithRemoveUnusedVariables(),
+ )
+ normalizer.NormalizeOperation(operation, definition, report)
+ if report.HasErrors() {
+ t.Fatalf("normalization failed: %v", report.Error())
+ }
+
+ if !Minify(operation, definition) {
+ t.Fatal("expected minifier to apply duplicates")
+ }
+
+ var buf bytes.Buffer
+ if err := astprinter.Print(operation, definition, &buf); err != nil {
+ t.Fatalf("failed to print operation: %v", err)
+ }
+ result := buf.String()
+ if !strings.Contains(result, "fragment __wf_astminifier_fragment_0 on Animal") {
+ t.Fatalf("expected fragment definition, got %s", result)
+ }
+ if !strings.Contains(result, "...__wf_astminifier_fragment_0") {
+ t.Fatalf("expected fragment spread, got %s", result)
+ }
+}
+
+func TestMinifySkipsWhenNoDuplicates(t *testing.T) {
+ definition := parseDocument(t, schemaSDL)
+ operation := parseDocument(t, `
+query {
+ person {
+ id
+ name
+ }
+}
+`)
+
+ if Minify(operation, definition) {
+ t.Fatal("expected minifier to skip when there are no duplicates")
+ }
+}
diff --git a/tmp/agent-patch-flux-pr-828.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/app/pkg/engine/datasource/graphql_datasource/graphql_datasource.go b/app/pkg/engine/datasource/graphql_datasource/graphql_datasource.go
index 8900bbf..fe69a09 100644
--- a/tmp/agent-patch-flux-pr-828.1-of-1.2026-02-28__13-38-10__gpt-5-1-codex-mini/app/pkg/engine/datasource/graphql_datasource/graphql_datasource.go
+++ b/app/pkg/engine/datasource/graphql_datasource/graphql_datasource.go
@@ -13,6 +13,7 @@ import (
"golang.org/x/exp/slices"
"github.com/wundergraph/graphql-go-tools/pkg/ast"
+ "github.com/wundergraph/graphql-go-tools/pkg/astminifier"
"github.com/wundergraph/graphql-go-tools/pkg/astnormalization"
"github.com/wundergraph/graphql-go-tools/pkg/astparser"
"github.com/wundergraph/graphql-go-tools/pkg/astprinter"
@@ -221,6 +222,7 @@ type Configuration struct {
Subscription SubscriptionConfiguration
Federation FederationConfiguration
UpstreamSchema string
+ MinifyOperation bool
CustomScalarTypeFields []SingleTypeField
}
@@ -1134,7 +1136,22 @@ func (p *Planner) printOperation() []byte {
return nil
}
- return buf.Bytes()
+ rawOperation := append([]byte(nil), buf.Bytes()...)
+
+ if p.config.MinifyOperation && astminifier.Minify(operation, definition) {
+ buf.Reset()
+ err = astprinter.Print(operation, p.visitor.Definition, buf)
+ if err != nil {
+ p.stopWithError(normalizationFailedErrMsg)
+ return nil
+ }
+ minifiedOperation := append([]byte(nil), buf.Bytes()...)
+ if len(minifiedOperation) < len(rawOperation) {
+ return minifiedOperation
+ }
+ }
+
+ return rawOperation
}
func (p *Planner) stopWithError(msg string, args ...interface{}) {