agent_patch
runs/2026-02-21__03-36-16__gpt-5-1-codex-mini/flux-pr-1338/flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/agent-logs/agent.patch
70132 bytes
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/astprinter/astprinter.go b/app/v2/pkg/astprinter/astprinter.go
index 07aca38..30eb1e7 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/astprinter/astprinter.go
+++ b/app/v2/pkg/astprinter/astprinter.go
@@ -269,8 +269,17 @@ func (p *printVisitor) EnterArgument(ref int) {
}
func (p *printVisitor) LeaveArgument(ref int) {
- if len(p.document.ArgumentsAfter(p.Ancestors[len(p.Ancestors)-1], ref)) == 0 {
- p.write(literal.RPAREN)
+ if len(p.document.ArgumentsAfter(p.Ancestors[len(p.Ancestors)-1], ref)) > 0 {
+ // if not the last argument
+ return
+ }
+
+ // last argument
+ p.write(literal.RPAREN)
+
+ a := p.Ancestors[len(p.Ancestors)-1]
+ if p.debug && a.Kind == ast.NodeKindField && !p.document.FieldHasSelections(a.Ref) && !p.document.FieldHasDirectives(a.Ref) {
+ p.printFieldInfo(a.Ref, true)
}
}
@@ -319,14 +328,23 @@ func (p *printVisitor) LeaveOperationDefinition(ref int) {
}
func (p *printVisitor) EnterSelectionSet(ref int) {
+ p.write(literal.LBRACE)
+
if p.debug {
- p.writeIndented(literal.LINETERMINATOR)
- p.writeIndented([]byte("# SelectionSet ref:"))
+ p.write([]byte(" # setRef:"))
p.write([]byte(strconv.Itoa(ref)))
- p.write(literal.LINETERMINATOR)
- p.writeIndented(literal.LBRACE)
- } else {
- p.write(literal.LBRACE)
+
+ if l := len(p.SimpleWalker.Ancestors); l > 0 {
+ a := p.SimpleWalker.Ancestors[l-1]
+ switch a.Kind {
+ case ast.NodeKindField:
+ p.printFieldInfo(a.Ref, false)
+ case ast.NodeKindInlineFragment:
+ p.write([]byte(" fragmentRef:"))
+ p.write([]byte(strconv.Itoa(a.Ref)))
+ default:
+ }
+ }
}
if p.indent != nil {
@@ -342,15 +360,6 @@ func (p *printVisitor) LeaveSelectionSet(ref int) {
}
func (p *printVisitor) EnterField(ref int) {
- if p.debug {
- p.writeIndented([]byte("# Field ref:"))
- p.write([]byte(strconv.Itoa(ref)))
- if p.fieldCallback != nil {
- p.fieldCallback(ref, p.out)
- }
- p.write(literal.LINETERMINATOR)
- }
-
if p.document.Fields[ref].Alias.IsDefined {
p.writeIndented(p.document.Input.ByteSlice(p.document.Fields[ref].Alias.Name))
p.write(literal.COLON)
@@ -362,6 +371,23 @@ func (p *printVisitor) EnterField(ref int) {
if !p.document.FieldHasArguments(ref) && (p.document.FieldHasSelections(ref) || p.document.FieldHasDirectives(ref)) {
p.write(literal.SPACE)
}
+
+ if p.debug && !p.document.FieldHasArguments(ref) && !p.document.FieldHasSelections(ref) && !p.document.FieldHasDirectives(ref) {
+ p.printFieldInfo(ref, true)
+ }
+}
+
+func (p *printVisitor) printFieldInfo(ref int, comment bool) {
+ if p.debug {
+ if comment {
+ p.write([]byte(" #"))
+ }
+ p.write([]byte(" fieldRef:"))
+ p.write([]byte(strconv.Itoa(ref)))
+ if p.fieldCallback != nil {
+ p.fieldCallback(ref, p.out)
+ }
+ }
}
func (p *printVisitor) LeaveField(ref int) {
@@ -399,12 +425,6 @@ func (p *printVisitor) LeaveFragmentSpread(ref int) {
}
func (p *printVisitor) EnterInlineFragment(ref int) {
- if p.debug {
- p.write(literal.LINETERMINATOR)
- p.writeIndented([]byte("# InlineFragment ref:"))
- p.write([]byte(strconv.Itoa(ref)))
- p.write(literal.LINETERMINATOR)
- }
p.writeIndented(literal.SPREAD)
if p.document.InlineFragmentHasTypeCondition(ref) && !p.document.InlineFragmentIsOfTheSameType(ref) {
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go b/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go
index 3acdd07..11c61d9 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go
+++ b/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource.go
@@ -1372,7 +1372,7 @@ func (p *Planner[T]) debugPrintQueryPlan(operation *ast.Document) {
if p.hasFederationRoot && p.dataSourcePlannerConfig.HasRequiredFields() { // IsRepresentationsQuery
args = append(args, "Representations:\n")
for _, cfg := range p.dataSourcePlannerConfig.RequiredFields {
- key, report := plan.RequiredFieldsFragment(cfg.TypeName, cfg.SelectionSet, cfg.FieldName == "")
+ key, report := plan.QueryPlanRequiredFieldsFragment(cfg.TypeName, cfg.FieldName, cfg.SelectionSet)
if report.HasErrors() {
continue
}
@@ -1407,7 +1407,7 @@ func (p *Planner[T]) generateQueryPlansForFetchConfiguration(operation *ast.Docu
if p.hasFederationRoot && p.dataSourcePlannerConfig.HasRequiredFields() { // IsRepresentationsQuery
representations = make([]resolve.Representation, len(p.dataSourcePlannerConfig.RequiredFields))
for i, cfg := range p.dataSourcePlannerConfig.RequiredFields {
- fragmentAst, report := plan.QueryPlanRequiredFieldsFragment(cfg.FieldName, cfg.TypeName, cfg.SelectionSet)
+ fragmentAst, report := plan.QueryPlanRequiredFieldsFragment(cfg.TypeName, cfg.FieldName, cfg.SelectionSet)
if report.HasErrors() {
p.stopWithError(errors.WithStack(fmt.Errorf("generateQueryPlansForFetchConfiguration: failed to build fragment for required fields: %w", report)))
return
@@ -1446,7 +1446,7 @@ func (p *Planner[T]) printOperation() (operationBytes []byte, variablesBytes []b
// create empty operation and definition documents
definition, err := p.config.UpstreamSchema()
if err != nil {
- p.stopWithError(errors.WithStack(fmt.Errorf("printOperation: failed to read upstream schema: %w", err)))
+ p.stopWithError(errors.WithStack(fmt.Errorf("printOperation planner id: %d: failed to read upstream schema: %w", p.id, err)))
return nil, nil
}
@@ -1457,7 +1457,7 @@ func (p *Planner[T]) printOperation() (operationBytes []byte, variablesBytes []b
// normalize upstream operation
kit.normalizer.NormalizeOperation(p.upstreamOperation, definition, kit.report)
if kit.report.HasErrors() {
- p.stopWithError(errors.WithStack(fmt.Errorf("printOperation: normalization failed: %w", kit.report)))
+ p.stopWithError(errors.WithStack(fmt.Errorf("printOperation planner id: %d: normalization failed: %w", p.id, kit.report)))
return nil, nil
}
@@ -1468,7 +1468,7 @@ func (p *Planner[T]) printOperation() (operationBytes []byte, variablesBytes []b
kit.validator.Validate(p.upstreamOperation, definition, kit.report)
if kit.report.HasErrors() {
- p.stopWithError(errors.WithStack(fmt.Errorf("printOperation: validation failed: %w", kit.report)))
+ p.stopWithError(errors.WithStack(fmt.Errorf("printOperation planner id: %d: validation failed: %w", p.id, kit.report)))
return nil, nil
}
@@ -1478,7 +1478,7 @@ func (p *Planner[T]) printOperation() (operationBytes []byte, variablesBytes []b
// print upstream operation
err = kit.printer.Print(p.upstreamOperation, kit.buf)
if err != nil {
- p.stopWithError(errors.WithStack(fmt.Errorf("printOperation: failed to print: %w", err)))
+ p.stopWithError(errors.WithStack(fmt.Errorf("printOperation planner id: %d: failed to print: %w", p.id, err)))
return nil, nil
}
@@ -1493,7 +1493,7 @@ func (p *Planner[T]) printOperation() (operationBytes []byte, variablesBytes []b
Pretty: false,
}, kit.buf)
if err != nil {
- p.stopWithError(errors.WithStack(fmt.Errorf("printOperation: failed to minify: %w", err)))
+ p.stopWithError(errors.WithStack(fmt.Errorf("printOperation planner id: %d: failed to minify: %w", p.id, err)))
return nil, nil
}
if madeReplacements && kit.buf.Len() < len(rawOperationBytes) {
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_entity_interfaces_test.go b/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_entity_interfaces_test.go
index f70c5be..ad3df31 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_entity_interfaces_test.go
+++ b/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_entity_interfaces_test.go
@@ -4587,7 +4587,7 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
DependsOnFetchIDs: []int{0},
},
FetchConfiguration: resolve.FetchConfiguration{
- Input: `{"method":"POST","url":"http://localhost:4001/graphql","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Admin {__typename} ... on Moderator {__typename title} ... on User {__typename title}}}","variables":{"representations":[$$0$$]}}}`,
+ Input: `{"method":"POST","url":"http://localhost:4004/graphql","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Account {age}}}","variables":{"representations":[$$0$$]}}}`,
Variables: []resolve.Variable{
&resolve.ResolvableObjectVariable{
Renderer: resolve.NewGraphQLVariableResolveRenderer(&resolve.Object{
@@ -4595,8 +4595,9 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
Fields: []*resolve.Field{
{
Name: []byte("__typename"),
- Value: &resolve.String{
- Path: []string{"__typename"},
+ Value: &resolve.StaticString{
+ Path: []string{"__typename"},
+ Value: "Account",
},
OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
},
@@ -4609,8 +4610,9 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
},
{
Name: []byte("__typename"),
- Value: &resolve.String{
- Path: []string{"__typename"},
+ Value: &resolve.StaticString{
+ Path: []string{"__typename"},
+ Value: "Account",
},
OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
},
@@ -4623,8 +4625,9 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
},
{
Name: []byte("__typename"),
- Value: &resolve.String{
- Path: []string{"__typename"},
+ Value: &resolve.StaticString{
+ Path: []string{"__typename"},
+ Value: "Account",
},
OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
},
@@ -4652,7 +4655,7 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
DependsOnFetchIDs: []int{0},
},
FetchConfiguration: resolve.FetchConfiguration{
- Input: `{"method":"POST","url":"http://localhost:4004/graphql","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Account {age}}}","variables":{"representations":[$$0$$]}}}`,
+ Input: `{"method":"POST","url":"http://localhost:4001/graphql","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Admin {__typename} ... on Moderator {__typename title} ... on User {__typename title}}}","variables":{"representations":[$$0$$]}}}`,
Variables: []resolve.Variable{
&resolve.ResolvableObjectVariable{
Renderer: resolve.NewGraphQLVariableResolveRenderer(&resolve.Object{
@@ -4660,9 +4663,8 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
Fields: []*resolve.Field{
{
Name: []byte("__typename"),
- Value: &resolve.StaticString{
- Path: []string{"__typename"},
- Value: "Account",
+ Value: &resolve.String{
+ Path: []string{"__typename"},
},
OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
},
@@ -4675,9 +4677,8 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
},
{
Name: []byte("__typename"),
- Value: &resolve.StaticString{
- Path: []string{"__typename"},
- Value: "Account",
+ Value: &resolve.String{
+ Path: []string{"__typename"},
},
OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
},
@@ -4690,9 +4691,8 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
},
{
Name: []byte("__typename"),
- Value: &resolve.StaticString{
- Path: []string{"__typename"},
- Value: "Account",
+ Value: &resolve.String{
+ Path: []string{"__typename"},
},
OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
},
@@ -4718,7 +4718,7 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
resolve.SingleWithPath(&resolve.SingleFetch{
FetchDependencies: resolve.FetchDependencies{
FetchID: 3,
- DependsOnFetchIDs: []int{0, 1},
+ DependsOnFetchIDs: []int{0, 2},
},
FetchConfiguration: resolve.FetchConfiguration{
Input: `{"method":"POST","url":"http://localhost:4003/graphql","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Admin {__typename title}}}","variables":{"representations":[$$0$$]}}}`,
@@ -4755,7 +4755,7 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
resolve.SingleWithPath(&resolve.SingleFetch{
FetchDependencies: resolve.FetchDependencies{
FetchID: 4,
- DependsOnFetchIDs: []int{0, 1, 3},
+ DependsOnFetchIDs: []int{0, 2, 3},
},
FetchConfiguration: resolve.FetchConfiguration{
Input: `{"method":"POST","url":"http://localhost:4002/graphql","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Account {uniqueTitle}}}","variables":{"representations":[$$0$$]}}}`,
@@ -4770,21 +4770,21 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
Path: []string{"__typename"},
Value: "Account",
},
- OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
},
{
Name: []byte("title"),
Value: &resolve.String{
Path: []string{"title"},
},
- OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
},
{
Name: []byte("id"),
Value: &resolve.Scalar{
Path: []string{"id"},
},
- OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
},
{
Name: []byte("__typename"),
@@ -4792,21 +4792,21 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
Path: []string{"__typename"},
Value: "Account",
},
- OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
},
{
Name: []byte("title"),
Value: &resolve.String{
Path: []string{"title"},
},
- OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
},
{
Name: []byte("id"),
Value: &resolve.Scalar{
Path: []string{"id"},
},
- OnTypeNames: [][]byte{[]byte("Moderator"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
},
{
Name: []byte("__typename"),
@@ -4814,21 +4814,21 @@ func TestGraphQLDataSourceFederationEntityInterfaces(t *testing.T) {
Path: []string{"__typename"},
Value: "Account",
},
- OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
},
{
Name: []byte("title"),
Value: &resolve.String{
Path: []string{"title"},
},
- OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
},
{
Name: []byte("id"),
Value: &resolve.Scalar{
Path: []string{"id"},
},
- OnTypeNames: [][]byte{[]byte("User"), []byte("Account")},
+ OnTypeNames: [][]byte{[]byte("Admin"), []byte("Account")},
},
},
}),
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_test.go b/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_test.go
index 33932a9..4a84acf 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_test.go
+++ b/app/v2/pkg/engine/datasource/graphql_datasource/graphql_datasource_federation_test.go
@@ -12702,6 +12702,640 @@ func TestGraphQLDataSourceFederation(t *testing.T) {
)
})
})
+
+ t.Run("ensure dependencies are correct when we append required fields into rewritten fields", func(t *testing.T) {
+ definition := `
+ type User implements Node {
+ id: ID!
+ title: String!
+ fieldA: String!
+ fieldB: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ type Admin implements Node {
+ id: ID!
+ title: String!
+ fieldA: String!
+ fieldB: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ interface Node {
+ id: ID!
+ title: String!
+ fieldA: String!
+ fieldB: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ type AdditionalInfo {
+ id: ID!
+ fieldA: String!
+ fieldB: String!
+ }
+
+ type NodesWrapper {
+ id: ID!
+ nodes: [Node!]
+ summary1: String!
+ summary2: String!
+ summary3: String!
+ }
+
+ type Query {
+ nodes: NodesWrapper
+ }
+ `
+
+ firstSubgraphSDL := `
+ type User implements Node @key(fields: "id") {
+ id: ID!
+ title: String! @external
+ fieldA: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ type Admin implements Node @key(fields: "id") {
+ id: ID!
+ title: String!
+ fieldA: String! @external
+ additionalInfo: AdditionalInfo @external
+ }
+
+ interface Node @key(fields: "id") {
+ id: ID!
+ title: String!
+ fieldA: String!
+ }
+
+ type AdditionalInfo @key(fields: "id") {
+ id: ID!
+ fieldA: String! @external
+ fieldB: String! @external
+ }
+
+ type NodesWrapper @key(fields: "id") {
+ id: ID!
+ nodes: [Node!]
+ summary1: String! @requires(fields: "nodes { fieldA additionalInfo { fieldB } }")
+ summary3: String! @requires(fields: "nodes { fieldB }")
+ }
+
+ type Query {
+ nodes: NodesWrapper
+ }
+ `
+
+ firstDatasourceConfiguration := mustDataSourceConfiguration(
+ t,
+ "first-service",
+ &plan.DataSourceMetadata{
+ RootNodes: []plan.TypeField{
+ {
+ TypeName: "Query",
+ FieldNames: []string{"nodes"},
+ },
+ {
+ TypeName: "User",
+ FieldNames: []string{"id", "fieldA"},
+ ExternalFieldNames: []string{"title", "fieldA", "fieldB"},
+ },
+ {
+ TypeName: "Admin",
+ FieldNames: []string{"id", "title", "fieldA"},
+ ExternalFieldNames: []string{"fieldB"},
+ },
+ {
+ TypeName: "NodesWrapper",
+ FieldNames: []string{"id", "nodes", "summary1", "summary3"},
+ },
+ {
+ TypeName: "Node",
+ FieldNames: []string{"id", "title", "fieldA"},
+ },
+ },
+ ChildNodes: []plan.TypeField{},
+ FederationMetaData: plan.FederationMetaData{
+ EntityInterfaces: []plan.EntityInterfaceConfiguration{
+ {
+ InterfaceTypeName: "Node",
+ ConcreteTypeNames: []string{"Admin", "User"},
+ },
+ },
+ Keys: plan.FederationFieldConfigurations{
+ {
+ TypeName: "User",
+ SelectionSet: "id",
+ },
+ {
+ TypeName: "Admin",
+ SelectionSet: "id",
+ },
+ {
+ TypeName: "NodesWrapper",
+ SelectionSet: "id",
+ },
+ },
+ Requires: plan.FederationFieldConfigurations{
+ {
+ TypeName: "NodesWrapper",
+ FieldName: "summary1",
+ SelectionSet: "nodes { fieldA fieldB additionalInfo { fieldB } }",
+ },
+ {
+ TypeName: "NodesWrapper",
+ FieldName: "summary3",
+ SelectionSet: "nodes { fieldB }",
+ },
+ },
+ },
+ },
+ mustCustomConfiguration(t,
+ ConfigurationInput{
+ Fetch: &FetchConfiguration{
+ URL: "http://first.service",
+ },
+ SchemaConfiguration: mustSchema(t,
+ &FederationConfiguration{
+ Enabled: true,
+ ServiceSDL: firstSubgraphSDL,
+ },
+ firstSubgraphSDL,
+ ),
+ },
+ ),
+ )
+
+ secondSubgraphSDL := `
+ interface Node {
+ id: ID!
+ fieldB: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ type User @key(fields: "id") {
+ id: ID!
+ name: String!
+ title: String!
+ fieldA: String!
+ fieldB: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ type Admin @key(fields: "id") {
+ id: ID!
+ adminName: String!
+ fieldA: String!
+ fieldB: String!
+ additionalInfo: AdditionalInfo
+ }
+
+ type AdditionalInfo @key(fields: "id") {
+ id: ID!
+ }
+
+ type NodesWrapper @key(fields: "id") {
+ id: ID!
+ summary2: String! @requires(fields: "summary3")
+ summary3: String! @external
+ }
+ `
+
+ secondDatasourceConfiguration := mustDataSourceConfiguration(
+ t,
+ "second-service",
+ &plan.DataSourceMetadata{
+ RootNodes: []plan.TypeField{
+ {
+ TypeName: "User",
+ FieldNames: []string{"id", "name", "title", "fieldA", "fieldB", "additionalInfo"},
+ },
+ {
+ TypeName: "Admin",
+ FieldNames: []string{"id", "adminName", "fieldA", "fieldB", "additionalInfo"},
+ },
+ {
+ TypeName: "NodesWrapper",
+ FieldNames: []string{"id", "summary2"},
+ ExternalFieldNames: []string{"summary3"},
+ },
+ {
+ TypeName: "AdditionalInfo",
+ FieldNames: []string{"id"},
+ },
+ },
+ ChildNodes: []plan.TypeField{
+ {
+ TypeName: "Node",
+ FieldNames: []string{"id", "fieldB", "additionalInfo"},
+ },
+ },
+ FederationMetaData: plan.FederationMetaData{
+ Keys: plan.FederationFieldConfigurations{
+ {
+ TypeName: "User",
+ SelectionSet: "id",
+ },
+ {
+ TypeName: "Admin",
+ SelectionSet: "id",
+ },
+ {
+ TypeName: "AdditionalInfo",
+ SelectionSet: "id",
+ },
+ },
+ },
+ },
+ mustCustomConfiguration(t,
+ ConfigurationInput{
+ Fetch: &FetchConfiguration{
+ URL: "http://second.service",
+ },
+ SchemaConfiguration: mustSchema(t,
+ &FederationConfiguration{
+ Enabled: true,
+ ServiceSDL: secondSubgraphSDL,
+ },
+ secondSubgraphSDL,
+ ),
+ },
+ ),
+ )
+
+ thirdSubgraphSDL := `
+ type AdditionalInfo @key(fields: "id") {
+ id: ID!
+ fieldA: String!
+ fieldB: String!
+ }
+ `
+
+ thirdDatasourceConfiguration := mustDataSourceConfiguration(
+ t,
+ "third-service",
+ &plan.DataSourceMetadata{
+ RootNodes: []plan.TypeField{
+ {
+ TypeName: "AdditionalInfo",
+ FieldNames: []string{"id", "fieldA", "fieldB"},
+ },
+ },
+ FederationMetaData: plan.FederationMetaData{
+ Keys: plan.FederationFieldConfigurations{
+ {
+ TypeName: "AdditionalInfo",
+ SelectionSet: "id",
+ },
+ },
+ },
+ },
+ mustCustomConfiguration(t,
+ ConfigurationInput{
+ Fetch: &FetchConfiguration{
+ URL: "http://third.service",
+ },
+ SchemaConfiguration: mustSchema(t,
+ &FederationConfiguration{
+ Enabled: true,
+ ServiceSDL: thirdSubgraphSDL,
+ },
+ thirdSubgraphSDL,
+ ),
+ },
+ ),
+ )
+
+ planConfiguration := plan.Configuration{
+ DataSources: []plan.DataSource{
+ firstDatasourceConfiguration,
+ secondDatasourceConfiguration,
+ thirdDatasourceConfiguration,
+ },
+ DisableResolveFieldPositions: true,
+ Debug: plan.DebugConfiguration{},
+ }
+
+ t.Run("query", func(t *testing.T) {
+ // in this scenario, we decide to rewrite the `nodes` fields selection
+ // because title is external on Admin type,
+ // due to requirements of `summary1` field we add required fields to `nodes` selection set
+ // as fields mentioned in the interface `Node` and interfaces is an entity interface,
+ // we consider all fields as local - so decide not to rewrite selection set again
+ // this leads to partially rewritten query, which has duplicates paths
+ // fields will be selected correctly, but we won't be able to setup fetch dependencies correctly
+ // because parent type of selected fields is Node, which is not an entity in subgraph 2, so there is no way to fetch it
+ // unless we rewrite operation to concrete types
+
+ RunWithPermutations(
+ t,
+ definition,
+ `
+ query Accounts {
+ nodes {
+ nodes {
+ title
+ fieldB
+ additionalInfo {
+ fieldA
+ }
+ }
+ summary1
+ }
+ }
+ `,
+ "Accounts",
+ &plan.SynchronousResponsePlan{
+ Response: &resolve.GraphQLResponse{
+ Fetches: resolve.Sequence(
+ resolve.Single(&resolve.SingleFetch{
+ FetchConfiguration: resolve.FetchConfiguration{
+ Input: `{"method":"POST","url":"http://first.service","body":{"query":"{nodes {nodes {__typename ... on Admin {title fieldA __typename id} ... on User {__typename id}} __typename id}}"}}`,
+ PostProcessing: DefaultPostProcessingConfiguration,
+ DataSource: &Source{},
+ },
+ DataSourceIdentifier: []byte("graphql_datasource.Source"),
+ }),
+ resolve.SingleWithPath(&resolve.SingleFetch{
+ FetchDependencies: resolve.FetchDependencies{
+ FetchID: 1,
+ DependsOnFetchIDs: []int{0},
+ },
+ FetchConfiguration: resolve.FetchConfiguration{
+ Input: `{"method":"POST","url":"http://second.service","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on Admin {__typename fieldB additionalInfo {__typename id}} ... on User {__typename title fieldB additionalInfo {__typename id} fieldA}}}","variables":{"representations":[$$0$$]}}}`,
+ DataSource: &Source{},
+ SetTemplateOutputToNullOnVariableNull: true,
+ RequiresEntityBatchFetch: true,
+ PostProcessing: EntitiesPostProcessingConfiguration,
+ Variables: []resolve.Variable{
+ &resolve.ResolvableObjectVariable{
+ Renderer: resolve.NewGraphQLVariableResolveRenderer(&resolve.Object{
+ Nullable: true,
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("__typename"),
+ Value: &resolve.String{
+ Path: []string{"__typename"},
+ },
+ OnTypeNames: [][]byte{[]byte("Admin")},
+ },
+ {
+ Name: []byte("id"),
+ Value: &resolve.Scalar{
+ Path: []string{"id"},
+ },
+ OnTypeNames: [][]byte{[]byte("Admin")},
+ },
+ {
+ Name: []byte("__typename"),
+ Value: &resolve.String{
+ Path: []string{"__typename"},
+ },
+ OnTypeNames: [][]byte{[]byte("User")},
+ },
+ {
+ Name: []byte("id"),
+ Value: &resolve.Scalar{
+ Path: []string{"id"},
+ },
+ OnTypeNames: [][]byte{[]byte("User")},
+ },
+ },
+ }),
+ },
+ },
+ },
+ DataSourceIdentifier: []byte("graphql_datasource.Source"),
+ }, "nodes.nodes", resolve.ObjectPath("nodes"), resolve.ArrayPath("nodes")),
+ resolve.SingleWithPath(&resolve.SingleFetch{
+ FetchDependencies: resolve.FetchDependencies{
+ FetchID: 2,
+ DependsOnFetchIDs: []int{1},
+ }, FetchConfiguration: resolve.FetchConfiguration{
+ Input: `{"method":"POST","url":"http://third.service","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on AdditionalInfo {__typename fieldA fieldB}}}","variables":{"representations":[$$0$$]}}}`,
+ DataSource: &Source{},
+ SetTemplateOutputToNullOnVariableNull: true,
+ RequiresEntityBatchFetch: true,
+ PostProcessing: EntitiesPostProcessingConfiguration,
+ Variables: []resolve.Variable{
+ &resolve.ResolvableObjectVariable{
+ Renderer: resolve.NewGraphQLVariableResolveRenderer(&resolve.Object{
+ Nullable: true,
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("__typename"),
+ Value: &resolve.String{
+ Path: []string{"__typename"},
+ },
+ OnTypeNames: [][]byte{[]byte("AdditionalInfo")},
+ },
+ {
+ Name: []byte("id"),
+ Value: &resolve.Scalar{
+ Path: []string{"id"},
+ },
+ OnTypeNames: [][]byte{[]byte("AdditionalInfo")},
+ },
+ },
+ }),
+ },
+ },
+ },
+ DataSourceIdentifier: []byte("graphql_datasource.Source"),
+ }, "nodes.nodes.@.additionalInfo", resolve.ObjectPath("nodes"), resolve.ArrayPath("nodes"), resolve.PathElementWithTypeNames(resolve.ObjectPath("additionalInfo"), []string{"Admin", "User"})),
+ resolve.SingleWithPath(&resolve.SingleFetch{
+ FetchDependencies: resolve.FetchDependencies{
+ FetchID: 4,
+ DependsOnFetchIDs: []int{0, 1, 2, 2},
+ }, FetchConfiguration: resolve.FetchConfiguration{
+ Input: `{"method":"POST","url":"http://first.service","body":{"query":"query($representations: [_Any!]!){_entities(representations: $representations){... on NodesWrapper {__typename summary1}}}","variables":{"representations":[$$0$$]}}}`,
+ DataSource: &Source{},
+ SetTemplateOutputToNullOnVariableNull: true,
+ RequiresEntityFetch: true,
+ PostProcessing: SingleEntityPostProcessingConfiguration,
+ Variables: []resolve.Variable{
+ &resolve.ResolvableObjectVariable{
+ Renderer: resolve.NewGraphQLVariableResolveRenderer(&resolve.Object{
+ Nullable: true,
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("__typename"),
+ Value: &resolve.String{
+ Path: []string{"__typename"},
+ },
+ OnTypeNames: [][]byte{[]byte("NodesWrapper")},
+ },
+ {
+ Name: []byte("nodes"),
+ Value: &resolve.Array{
+ Path: []string{"nodes"},
+ Nullable: true,
+ Item: &resolve.Object{
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("fieldA"),
+ Value: &resolve.String{
+ Path: []string{"fieldA"},
+ },
+ },
+ {
+ Name: []byte("fieldB"),
+ Value: &resolve.String{
+ Path: []string{"fieldB"},
+ },
+ },
+ {
+ Name: []byte("additionalInfo"),
+ Value: &resolve.Object{
+ Path: []string{"additionalInfo"},
+ Nullable: true,
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("fieldB"),
+ Value: &resolve.String{
+ Path: []string{"fieldB"},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ OnTypeNames: [][]byte{[]byte("NodesWrapper")},
+ },
+ {
+ Name: []byte("id"),
+ Value: &resolve.Scalar{
+ Path: []string{"id"},
+ },
+ OnTypeNames: [][]byte{[]byte("NodesWrapper")},
+ },
+ },
+ }),
+ },
+ },
+ },
+ DataSourceIdentifier: []byte("graphql_datasource.Source"),
+ }, "nodes", resolve.ObjectPath("nodes")),
+ ),
+ Data: &resolve.Object{
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("nodes"),
+ Value: &resolve.Object{
+ Path: []string{"nodes"},
+ Nullable: true,
+ PossibleTypes: map[string]struct{}{
+ "NodesWrapper": {},
+ },
+ TypeName: "NodesWrapper",
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("nodes"),
+ Value: &resolve.Array{
+ Path: []string{"nodes"},
+ Nullable: true,
+ Item: &resolve.Object{
+ Nullable: false,
+ PossibleTypes: map[string]struct{}{
+ "Admin": {},
+ "Node": {},
+ "User": {},
+ },
+ TypeName: "Node",
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("title"),
+ Value: &resolve.String{
+ Path: []string{"title"},
+ },
+ OnTypeNames: [][]byte{[]byte("Admin")},
+ },
+ {
+ Name: []byte("fieldB"),
+ Value: &resolve.String{
+ Path: []string{"fieldB"},
+ },
+ OnTypeNames: [][]byte{[]byte("Admin")},
+ },
+ {
+ Name: []byte("additionalInfo"),
+ Value: &resolve.Object{
+ Path: []string{"additionalInfo"},
+ Nullable: true,
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("fieldA"),
+ Value: &resolve.String{
+ Path: []string{"fieldA"},
+ },
+ },
+ },
+ PossibleTypes: map[string]struct{}{
+ "AdditionalInfo": {},
+ },
+ TypeName: "AdditionalInfo",
+ },
+ OnTypeNames: [][]byte{[]byte("Admin")},
+ },
+ {
+ Name: []byte("title"),
+ Value: &resolve.String{
+ Path: []string{"title"},
+ },
+ OnTypeNames: [][]byte{[]byte("User")},
+ },
+ {
+ Name: []byte("fieldB"),
+ Value: &resolve.String{
+ Path: []string{"fieldB"},
+ },
+ OnTypeNames: [][]byte{[]byte("User")},
+ },
+ {
+ Name: []byte("additionalInfo"),
+ Value: &resolve.Object{
+ Path: []string{"additionalInfo"},
+ Nullable: true,
+ Fields: []*resolve.Field{
+ {
+ Name: []byte("fieldA"),
+ Value: &resolve.String{
+ Path: []string{"fieldA"},
+ },
+ },
+ },
+ PossibleTypes: map[string]struct{}{
+ "AdditionalInfo": {},
+ },
+ TypeName: "AdditionalInfo",
+ },
+ OnTypeNames: [][]byte{[]byte("User")},
+ },
+ },
+ },
+ },
+ },
+ {
+ Name: []byte("summary1"),
+ Value: &resolve.String{
+ Path: []string{"summary1"},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ planConfiguration,
+ WithDefaultPostProcessor(),
+ )
+ })
+ })
})
t.Run("different entity keys jumps", func(t *testing.T) {
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/abstract_selection_rewriter.go b/app/v2/pkg/engine/plan/abstract_selection_rewriter.go
index 4d19e9b..65c0f87 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/abstract_selection_rewriter.go
+++ b/app/v2/pkg/engine/plan/abstract_selection_rewriter.go
@@ -77,17 +77,35 @@ type RewriteResult struct {
var resultNotRewritten = RewriteResult{}
-func newFieldSelectionRewriter(operation *ast.Document, definition *ast.Document, dsConfiguration DataSource) (*fieldSelectionRewriter, error) {
+type rewriterOption func(*rewriterOptions)
+
+type rewriterOptions struct {
+ forceRewrite bool
+}
+
+func withForceRewrite() rewriterOption {
+ return func(o *rewriterOptions) {
+ o.forceRewrite = true
+ }
+}
+
+func newFieldSelectionRewriter(operation *ast.Document, definition *ast.Document, dsConfiguration DataSource, options ...rewriterOption) (*fieldSelectionRewriter, error) {
upstreamDefinition, ok := dsConfiguration.UpstreamSchema()
if !ok {
return nil, ErrNoUpstreamSchema
}
+
+ opts := &rewriterOptions{}
+ for _, option := range options {
+ option(opts)
+ }
+
return &fieldSelectionRewriter{
operation: operation,
definition: definition,
upstreamDefinition: upstreamDefinition,
dsConfiguration: dsConfiguration,
- alwaysRewrite: dsConfiguration.PlanningBehavior().AlwaysFlattenFragments,
+ alwaysRewrite: dsConfiguration.PlanningBehavior().AlwaysFlattenFragments || opts.forceRewrite,
}, nil
}
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/datasource_filter_node_suggestions.go b/app/v2/pkg/engine/plan/datasource_filter_node_suggestions.go
index 9722949..77b4441 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/datasource_filter_node_suggestions.go
+++ b/app/v2/pkg/engine/plan/datasource_filter_node_suggestions.go
@@ -28,6 +28,7 @@ type NodeSuggestion struct {
IsRequiredKeyField bool `json:"isRequiredKeyField"`
IsLeaf bool `json:"isLeaf"`
isTypeName bool
+ IsOrphan bool // if node is orphan it should not be taken into account for planning
parentPathWithoutFragment string
onFragment bool
@@ -156,9 +157,28 @@ func (f *NodeSuggestions) RemoveTreeNodeChilds(fieldRef int) {
return
}
+ // mark all nested suggestions as orphans
+ f.abandonNodeChildren(node, false)
+
+ // remove rewritten children nodes from the current node
node.ReplaceChildren()
}
+// abandonNodeChildren recursively marks all nested suggestions as orphans
+func (f *NodeSuggestions) abandonNodeChildren(node tree.Node[[]int], clearData bool) {
+ for _, child := range node.GetChildren() {
+ f.abandonNodeChildren(child, true)
+ }
+
+ if clearData {
+ for _, idx := range node.GetData() {
+ // we can't reslice f.items because tree data stores indexes of f.items
+ f.items[idx].IsOrphan = true
+ f.items[idx].unselect()
+ }
+ }
+}
+
func (f *NodeSuggestions) addSuggestion(node *NodeSuggestion) (suggestionIdx int) {
f.items = append(f.items, node)
return len(f.items) - 1
@@ -171,6 +191,10 @@ func (f *NodeSuggestions) SuggestionsForPath(typeName, fieldName, path string) (
}
for i := range items {
+ if items[i].IsOrphan {
+ continue
+ }
+
if items[i].Selected && typeName == items[i].TypeName && fieldName == items[i].FieldName {
suggestions = append(suggestions, items[i])
}
@@ -186,6 +210,10 @@ func (f *NodeSuggestions) HasSuggestionForPath(typeName, fieldName, path string)
}
for i := range items {
+ if items[i].IsOrphan {
+ continue
+ }
+
if typeName == items[i].TypeName && fieldName == items[i].FieldName && items[i].Selected {
return items[i].DataSourceHash, true
}
@@ -336,6 +364,10 @@ func (f *NodeSuggestions) populateHasSuggestions() map[DSHash]struct{} {
f.pathSuggestions = make(map[string][]*NodeSuggestion, len(f.pathSuggestions))
for i := range f.items {
+ if f.items[i].IsOrphan {
+ continue
+ }
+
if !f.items[i].Selected {
continue
}
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/datasource_filter_visitor.go b/app/v2/pkg/engine/plan/datasource_filter_visitor.go
index 50a2ed2..940d86c 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/datasource_filter_visitor.go
+++ b/app/v2/pkg/engine/plan/datasource_filter_visitor.go
@@ -108,7 +108,9 @@ func (f *DataSourceFilter) applyLandedTo(landedTo map[int]DSHash) {
node, ok := f.nodes.responseTree.Find(treeNodeID)
if !ok {
- panic("node not found")
+ // no such node in the tree
+ // we may removed it as orphaned
+ continue
}
nodeData := node.GetData()
@@ -217,6 +219,10 @@ const (
func (f *DataSourceFilter) selectUniqueNodes() {
for i := range f.nodes.items {
+ if f.nodes.items[i].IsOrphan {
+ continue
+ }
+
if f.nodes.items[i].Selected {
continue
}
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/federation_metadata.go b/app/v2/pkg/engine/plan/federation_metadata.go
index ce1290e..a344938 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/federation_metadata.go
+++ b/app/v2/pkg/engine/plan/federation_metadata.go
@@ -94,6 +94,8 @@ type FieldCoordinate struct {
FieldName string `json:"field_name"`
}
+// parseSelectionSet parses the selection set and stores the parsed AST in parsedSelectionSet.
+// should have pointer receiver to preserve the value
func (f *FederationFieldConfiguration) parseSelectionSet() error {
if f.parsedSelectionSet != nil {
return nil
@@ -108,7 +110,9 @@ func (f *FederationFieldConfiguration) parseSelectionSet() error {
return nil
}
-func (f *FederationFieldConfiguration) String() string {
+// String - implements fmt.Stringer
+// NOTE: do not change to pointer receiver, it won't work for not pointer values
+func (f FederationFieldConfiguration) String() string {
b, _ := json.Marshal(f)
return string(b)
}
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/node_selection_builder.go b/app/v2/pkg/engine/plan/node_selection_builder.go
index 3ad400a..40964c4 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/node_selection_builder.go
+++ b/app/v2/pkg/engine/plan/node_selection_builder.go
@@ -3,6 +3,7 @@ package plan
import (
"fmt"
"io"
+ "slices"
"github.com/wundergraph/graphql-go-tools/v2/pkg/ast"
"github.com/wundergraph/graphql-go-tools/v2/pkg/astprinter"
@@ -57,7 +58,7 @@ func NewNodeSelectionBuilder(config *Configuration) *NodeSelectionBuilder {
newFieldRefs: make(map[int]struct{}),
}
- nodeSelectionsWalker.RegisterEnterDocumentVisitor(nodeSelectionVisitor)
+ nodeSelectionsWalker.RegisterDocumentVisitor(nodeSelectionVisitor)
nodeSelectionsWalker.RegisterFieldVisitor(nodeSelectionVisitor)
nodeSelectionsWalker.RegisterEnterOperationVisitor(nodeSelectionVisitor)
nodeSelectionsWalker.RegisterSelectionSetVisitor(nodeSelectionVisitor)
@@ -124,9 +125,9 @@ func (p *NodeSelectionBuilder) SelectNodes(operation, definition *ast.Document,
}
i := 1
+ hasUnresolvedFields := false
// secondary runs to add path for the new required fields
- for p.nodeSelectionsVisitor.shouldRevisit() {
-
+ for p.nodeSelectionsVisitor.hasNewFields || hasUnresolvedFields {
// when we have rewritten a field old node suggestion are not make sense anymore
// so we are removing child nodes of the rewritten fields
for _, fieldRef := range p.nodeSelectionsVisitor.rewrittenFieldRefs {
@@ -166,15 +167,19 @@ func (p *NodeSelectionBuilder) SelectNodes(operation, definition *ast.Document,
i++
if resolvableReport := p.isResolvable(operation, definition, p.nodeSelectionsVisitor.nodeSuggestions); resolvableReport.HasErrors() {
- p.nodeSelectionsVisitor.hasUnresolvedFields = true
+ hasUnresolvedFields = true
if i > 100 {
report.AddInternalError(fmt.Errorf("could not resolve a field: %v", resolvableReport))
return
}
+
+ continue
+ } else {
+ hasUnresolvedFields = false
}
- // TODO: what logic should be here?
+ // if we have revisited operation more than 100 times, we have a bug
if i > 100 {
report.AddInternalError(fmt.Errorf("something went wrong"))
return
@@ -184,8 +189,8 @@ func (p *NodeSelectionBuilder) SelectNodes(operation, definition *ast.Document,
if i == 1 {
// if we have not revisited the operation, we need to check if it is resolvable
if resolvableReport := p.isResolvable(operation, definition, p.nodeSelectionsVisitor.nodeSuggestions); resolvableReport.HasErrors() {
- p.nodeSelectionsVisitor.hasUnresolvedFields = true
report.AddInternalError(fmt.Errorf("could not resolve a field: %v", resolvableReport))
+ return
}
}
@@ -213,22 +218,28 @@ func (p *NodeSelectionBuilder) printOperation(operation *ast.Document) {
if p.config.Debug.PrintOperationEnableASTRefs {
pp, _ = astprinter.PrintStringIndentDebug(operation, " ", func(fieldRef int, out io.Writer) {
- if p.nodeSelectionsVisitor.nodeSuggestions == nil {
- return
- }
+ if p.config.Debug.PrintNodeSuggestions {
+ if p.nodeSelectionsVisitor.nodeSuggestions == nil {
+ return
+ }
- treeNodeId := TreeNodeID(fieldRef)
- node, ok := p.nodeSelectionsVisitor.nodeSuggestions.responseTree.Find(treeNodeId)
- if !ok {
- return
- }
+ treeNodeId := TreeNodeID(fieldRef)
+ node, ok := p.nodeSelectionsVisitor.nodeSuggestions.responseTree.Find(treeNodeId)
+ if !ok {
+ return
+ }
- items := node.GetData()
- for _, id := range items {
- if p.nodeSelectionsVisitor.nodeSuggestions.items[id].Selected {
- fmt.Fprintf(out, " %s", p.nodeSelectionsVisitor.nodeSuggestions.items[id].StringShort())
+ items := node.GetData()
+ for _, id := range items {
+ if p.nodeSelectionsVisitor.nodeSuggestions.items[id].Selected {
+ _, _ = fmt.Fprintf(out, " %s", p.nodeSelectionsVisitor.nodeSuggestions.items[id].StringShort())
+ }
}
}
+
+ if slices.Contains(p.nodeSelectionsVisitor.skipFieldsRefs, fieldRef) {
+ _, _ = fmt.Fprintf(out, " (skip)")
+ }
})
} else {
pp, _ = astprinter.PrintStringIndent(operation, " ")
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/node_selection_visitor.go b/app/v2/pkg/engine/plan/node_selection_visitor.go
index 0345af2..ee59305 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/node_selection_visitor.go
+++ b/app/v2/pkg/engine/plan/node_selection_visitor.go
@@ -41,17 +41,17 @@ type nodeSelectionVisitor struct {
fieldLandedTo map[int]DSHash // fieldLandedTo is a map[fieldRef]DSHash - holds a datasource hash where field was landed to
fieldDependencyKind map[fieldDependencyKey]fieldDependencyKind
- secondaryRun bool // secondaryRun is a flag to indicate that we're running the nodeSelectionVisitor not the first time
- hasNewFields bool // hasNewFields is used to determine if we need to run the planner again. It will be true in case required fields were added
- hasUnresolvedFields bool // hasUnresolvedFields is used to determine if we need to run the planner again. We should set it to true in case we have unresolved fields
+ secondaryRun bool // secondaryRun is a flag to indicate that we're running the nodeSelectionVisitor not the first time
+ hasNewFields bool // hasNewFields is used to determine if we need to run the planner again. It will be true in case required fields were added
- rewrittenFieldRefs []int
+ rewrittenFieldRefs []int // rewrittenFieldRefs holds field refs which had their selection sets rewritten during the current walk
+ persistedRewrittenFieldRefs map[int]struct{} // persistedRewrittenFieldRefs holds field refs which had their selection sets rewritten during any of the walks
// addTypenameInNestedSelections controls forced addition of __typename to nested selection sets
// used by "requires" keys, not only when fragments are present.
addTypenameInNestedSelections bool
- newFieldRefs map[int]struct{}
+ newFieldRefs map[int]struct{} // newFieldRefs is a set of field refs which were added by the visitor or was modified by a rewrite
}
func (c *nodeSelectionVisitor) addSkipFieldRefs(fieldRefs ...int) {
@@ -70,10 +70,6 @@ type fieldDependencyKey struct {
field, dependsOn int
}
-func (c *nodeSelectionVisitor) shouldRevisit() bool {
- return c.hasNewFields || c.hasUnresolvedFields
-}
-
type fieldIndexKey struct {
fieldRef int
dsHash DSHash
@@ -135,7 +131,6 @@ func (c *nodeSelectionVisitor) debugPrint(args ...any) {
func (c *nodeSelectionVisitor) EnterDocument(operation, definition *ast.Document) {
c.hasNewFields = false
- c.hasUnresolvedFields = false
c.rewrittenFieldRefs = c.rewrittenFieldRefs[:0]
if c.selectionSetRefs == nil {
@@ -154,6 +149,7 @@ func (c *nodeSelectionVisitor) EnterDocument(operation, definition *ast.Document
c.skipFieldsRefs = make([]int, 0, 8)
}
+ c.persistedRewrittenFieldRefs = make(map[int]struct{})
c.visitedFieldsAbstractChecks = make(map[int]struct{})
c.visitedFieldsRequiresChecks = make(map[fieldIndexKey]struct{})
c.visitedFieldsKeyChecks = make(map[fieldIndexKey]struct{})
@@ -182,16 +178,39 @@ func (c *nodeSelectionVisitor) EnterOperationDefinition(ref int) {
func (c *nodeSelectionVisitor) EnterSelectionSet(ref int) {
c.debugPrint("EnterSelectionSet ref:", ref)
c.selectionSetRefs = append(c.selectionSetRefs, ref)
+
+ c.handleRequiresInSelectionSet(ref)
+}
+
+// handleRequiresInSelectionSet adds required fields for fields having @requires directive
+// before walker actually walks into field selections
+// this is needed for the case when requires configuration has deeply nested fields
+// which will modify current field sublings
+func (c *nodeSelectionVisitor) handleRequiresInSelectionSet(selectionSetRef int) {
+ fieldSelectionsRefs := c.operation.SelectionSetFieldSelections(selectionSetRef)
+ for _, fieldSelectionRef := range fieldSelectionsRefs {
+ fieldRef := c.operation.Selections[fieldSelectionRef].Ref
+ // process the field but handle only requires configurations
+ c.handleEnterField(fieldRef, true)
+ }
+
+ // add required fields into operation right away
+ // so when the walker walks into fields, required fields will be already present
+ c.processPendingFieldRequirements(selectionSetRef)
}
func (c *nodeSelectionVisitor) LeaveSelectionSet(ref int) {
c.debugPrint("LeaveSelectionSet ref:", ref)
- c.processPendingFieldRequirements(ref)
c.processPendingKeyRequirements(ref)
c.selectionSetRefs = c.selectionSetRefs[:len(c.selectionSetRefs)-1]
}
func (c *nodeSelectionVisitor) EnterField(fieldRef int) {
+ // process field to handle keys and do rewrites of abstract selections
+ c.handleEnterField(fieldRef, false)
+}
+
+func (c *nodeSelectionVisitor) handleEnterField(fieldRef int, handleRequires bool) {
root := c.walker.Ancestors[0]
if root.Kind != ast.NodeKindOperationDefinition {
return
@@ -218,8 +237,12 @@ func (c *nodeSelectionVisitor) EnterField(fieldRef int) {
}
ds := c.dataSources[dsIdx]
- // check if the field has @requires directive
- c.handleFieldRequiredByRequires(fieldRef, parentPath, typeName, fieldName, currentPath, ds)
+ if handleRequires {
+ // check if the field has @requires directive
+ c.handleFieldRequiredByRequires(fieldRef, parentPath, typeName, fieldName, currentPath, ds)
+ // skip to the next suggestion as we only handle requires here
+ continue
+ }
if suggestion.requiresKey != nil {
// add @key requirements for the field
@@ -592,14 +615,18 @@ func (c *nodeSelectionVisitor) addKeyRequirementsToOperation(selectionSetRef int
// setup deps between key chain items
if currentFieldRefs != nil && previousJump != nil {
for _, requestedByFieldRef := range addFieldsResult.requiredFieldRefs {
- if slices.Contains(currentFieldRefs, requestedByFieldRef) {
- // we should not add field ref to fieldDependsOn map if it is part of a key
- continue
+ fieldKey := fieldIndexKey{requestedByFieldRef, jump.From}
+
+ for _, requiredFieldRef := range currentFieldRefs {
+ if requiredFieldRef == requestedByFieldRef {
+ // we should not add field ref to fieldDependsOn map if it is part of a key,
+ // e.g., if it depends on itself
+ continue
+ }
+ c.fieldDependsOn[fieldKey] = append(c.fieldDependsOn[fieldKey], requiredFieldRef)
+ c.fieldRefDependsOn[requestedByFieldRef] = append(c.fieldRefDependsOn[requestedByFieldRef], requiredFieldRef)
}
- fieldKey := fieldIndexKey{requestedByFieldRef, jump.From}
- c.fieldDependsOn[fieldKey] = append(c.fieldDependsOn[fieldKey], currentFieldRefs...)
- c.fieldRefDependsOn[requestedByFieldRef] = append(c.fieldRefDependsOn[requestedByFieldRef], currentFieldRefs...)
c.fieldRequirementsConfigs[fieldKey] = append(c.fieldRequirementsConfigs[fieldKey], FederationFieldConfiguration{
TypeName: previousJump.TypeName,
SelectionSet: previousJump.SelectionSet,
@@ -615,19 +642,23 @@ func (c *nodeSelectionVisitor) addKeyRequirementsToOperation(selectionSetRef int
if lastJump {
// add mapping for the field dependencies
for _, requestedByFieldRef := range pendingKey.requestedByFieldRefs {
- if slices.Contains(currentFieldRefs, requestedByFieldRef) {
- // we should not add field ref to fieldDependsOn map if it is part of a key
- continue
+ fieldKey := fieldIndexKey{requestedByFieldRef, pendingKey.targetDSHash}
+
+ for _, requiredFieldRef := range currentFieldRefs {
+ if requiredFieldRef == requestedByFieldRef {
+ // we should not add field ref to fieldDependsOn map if it is part of a key,
+ // e.g., if it depends on itself
+ continue
+ }
+ c.fieldDependsOn[fieldKey] = append(c.fieldDependsOn[fieldKey], requiredFieldRef)
+ c.fieldRefDependsOn[requestedByFieldRef] = append(c.fieldRefDependsOn[requestedByFieldRef], requiredFieldRef)
}
- fieldKey := fieldIndexKey{requestedByFieldRef, pendingKey.targetDSHash}
- c.fieldDependsOn[fieldKey] = append(c.fieldDependsOn[fieldKey], addFieldsResult.requiredFieldRefs...)
- c.fieldRefDependsOn[requestedByFieldRef] = append(c.fieldRefDependsOn[requestedByFieldRef], addFieldsResult.requiredFieldRefs...)
c.fieldRequirementsConfigs[fieldKey] = append(c.fieldRequirementsConfigs[fieldKey], FederationFieldConfiguration{
TypeName: jump.TypeName,
SelectionSet: jump.SelectionSet,
})
- for _, requiredFieldRef := range addFieldsResult.requiredFieldRefs {
+ for _, requiredFieldRef := range currentFieldRefs {
c.fieldDependencyKind[fieldDependencyKey{field: requestedByFieldRef, dependsOn: requiredFieldRef}] = fieldDependencyKindKey
}
}
@@ -654,7 +685,16 @@ func (c *nodeSelectionVisitor) rewriteSelectionSetHavingAbstractFragments(fieldR
return
}
- rewriter, err := newFieldSelectionRewriter(c.operation, c.definition, ds)
+ var options []rewriterOption
+ if _, wasRewritten := c.persistedRewrittenFieldRefs[fieldRef]; wasRewritten {
+ // When field was already rewritten in previous walker runs,
+ // but we are visiting it again - it means that we have appended more required fields to it.
+ // So we have to force rewriting it again, because without force we could end up with duplicated fields outside of fragments.
+ // When newly added fields are local - rewriter will consider that rewrite is not necessary.
+ options = append(options, withForceRewrite())
+ }
+
+ rewriter, err := newFieldSelectionRewriter(c.operation, c.definition, ds, options...)
if err != nil {
c.walker.StopWithInternalErr(err)
return
@@ -673,14 +713,18 @@ func (c *nodeSelectionVisitor) rewriteSelectionSetHavingAbstractFragments(fieldR
c.addSkipFieldRefs(rewriter.skipFieldRefs...)
c.hasNewFields = true
c.rewrittenFieldRefs = append(c.rewrittenFieldRefs, fieldRef)
+ c.persistedRewrittenFieldRefs[fieldRef] = struct{}{}
c.updateFieldDependsOn(result.changedFieldRefs)
+ c.updateSkipFieldRefs(result.changedFieldRefs)
// skip walking into a rewritten field instead of stoping the whole visitor
// should allow to do fewer walks over the operation
c.walker.SkipNode()
}
+// resetVisitedAbstractChecksForModifiedFields - when we modify the operation by adding required fields
+// we need to reset visitedFieldsAbstractChecks for modified fields to allow additional rewrites if necessary
func (c *nodeSelectionVisitor) resetVisitedAbstractChecksForModifiedFields(modifiedFields []int) {
for _, fieldRef := range modifiedFields {
delete(c.visitedFieldsAbstractChecks, fieldRef)
@@ -718,3 +762,11 @@ func (c *nodeSelectionVisitor) updateFieldDependsOn(changedFieldRefs map[int][]i
c.addNewFieldRefs(newRefs...)
}
}
+
+func (c *nodeSelectionVisitor) updateSkipFieldRefs(changedFieldRefs map[int][]int) {
+ for _, fieldRef := range c.skipFieldsRefs {
+ if newRefs := changedFieldRefs[fieldRef]; newRefs != nil {
+ c.skipFieldsRefs = append(c.skipFieldsRefs, newRefs...)
+ }
+ }
+}
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/path_builder.go b/app/v2/pkg/engine/plan/path_builder.go
index 7483e1b..acd5ea4 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/path_builder.go
+++ b/app/v2/pkg/engine/plan/path_builder.go
@@ -158,7 +158,11 @@ func (p *PathBuilder) printPlanningPaths() {
if requiredFields != nil && len(*requiredFields) > 0 {
fmt.Println("Required fields:")
for _, field := range *requiredFields {
- fmt.Println(field)
+ if field.FieldName != "" {
+ fmt.Printf("required by %s: %s\n", field.FieldName, field)
+ continue
+ }
+ fmt.Println("key:", field)
}
}
fmt.Println("Paths:")
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/path_builder_visitor.go b/app/v2/pkg/engine/plan/path_builder_visitor.go
index 2152cf8..67ce9db 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/path_builder_visitor.go
+++ b/app/v2/pkg/engine/plan/path_builder_visitor.go
@@ -54,6 +54,13 @@ type pathBuilderVisitor struct {
currentFetchPath []resolve.FetchItemPathElement
currentResponsePath []string
+
+ skipDS []DSSkip // skipDS is a list of datasources to skip when walking field children
+}
+
+type DSSkip struct {
+ popOnFieldRef int
+ DSHash DSHash
}
type FailedToCreatePlanningPathsError struct {
@@ -459,6 +466,12 @@ func (c *pathBuilderVisitor) EnterField(fieldRef int) {
suggestions := c.nodeSuggestions.SuggestionsForPath(typeName, fieldName, currentPath)
shareable := len(suggestions) > 1
for _, suggestion := range suggestions {
+ if idx := slices.IndexFunc(c.skipDS, func(skip DSSkip) bool {
+ return skip.DSHash == suggestion.DataSourceHash
+ }); idx != -1 {
+ continue
+ }
+
dsIdx := slices.IndexFunc(c.dataSources, func(d DataSource) bool {
return d.Hash() == suggestion.DataSourceHash
})
@@ -471,28 +484,70 @@ func (c *pathBuilderVisitor) EnterField(fieldRef int) {
if !c.couldPlanField(fieldRef, ds.Hash()) {
c.handleMissingPath(false, typeName, fieldName, currentPath, shareable)
- // if we could not plan the field, we should skip walking into it
- // as the dependencies conditions are tight to this field,
- // and we could mistakenly plan the nested fields on this datasource without current field
- // It could happen when there are the same field as current on another datasource, and it is allowed to plan it
- c.walker.SkipNode()
- return
+ /*
+ if we could not plan the field, we should skip planning children on the same datasource
+ as the dependency conditions are tight to this field.
+ We could mistakenly plan the nested fields on this datasource without current field/
+ It could happen when there is the same field as current on another datasource, and it is allowed to plan it.
+ We want to be able to plan fields on other datasource, because current field on this datasource
+ could depend on the child fields of the same field on another datasource
+
+ example:
+
+ type Entity @key(fields: "nested { a b }") {
+ nested: NestedType
+ }
+
+ type NestedType {
+ a: String
+ b: String
+ c: String
+ }
+
+ We want to get Entity.nested.c but it depends on Entity.nested.a and Entity.nested.b as keys
+ Within query it will mean that field Entity.nested depends on fields Entity.nested.a and Entity.nested.b
+ But as we track all fields including parent, it will mean that Entity.nested depends on Entity.nested within
+ the same query and field ref will be the same
+
+ So we need to allow processing nested fields on other datasources, but not on the current one
+ */
+
+ c.skipDS = append(c.skipDS, DSSkip{
+ popOnFieldRef: fieldRef,
+ DSHash: ds.Hash(),
+ })
+
+ continue
}
c.handlePlanningField(fieldRef, typeName, fieldName, currentPath, parentPath, precedingParentPath, suggestion, ds, shareable)
}
- // we should update response path and array fields only when we are able to plan - so field is not skipped
- // 1. Because current response path for the field should not include field itself
- // 2. To have correct state - when we add something in the EnterField callback,
- // but we call walker.SkipNode - LeaveField callback won't be called,
- // and we will have an incorrect state
-
c.addArrayField(fieldRef, currentPath)
// pushResponsePath uses array fields so it should be called after addArrayField
c.pushResponsePath(fieldRef, fieldAliasOrName)
}
+func (c *pathBuilderVisitor) LeaveField(ref int) {
+ if c.isNotOperationDefinitionRoot() {
+ return
+ }
+
+ if c.plannerConfiguration.Debug.ConfigurationVisitor {
+ fieldAliasOrName := c.operation.FieldAliasOrNameString(ref)
+ typeName := c.walker.EnclosingTypeDefinition.NameString(c.definition)
+ c.debugPrint("LeaveField ref:", ref, "fieldName:", fieldAliasOrName, "typeName:", typeName)
+ }
+
+ // pop response path uses array fields so it should be called before removeArrayField
+ c.popResponsePath(ref)
+ c.removeArrayField(ref)
+
+ c.skipDS = slices.DeleteFunc(c.skipDS, func(s DSSkip) bool {
+ return s.popOnFieldRef == ref
+ })
+}
+
func (c *pathBuilderVisitor) handlePlanningField(fieldRef int, typeName, fieldName, currentPath, parentPath, precedingParentPath string, suggestion *NodeSuggestion, ds DataSource, shareable bool) {
plannedOnPlannerIds := c.fieldsPlannedOn[fieldRef]
@@ -1209,24 +1264,6 @@ func (c *pathBuilderVisitor) handleMissingPath(planned bool, typeName string, fi
// all suggestions were planned, so we should not record a missing path
return
}
-
- if !shareable {
- c.walker.SkipNode()
- }
-}
-
-func (c *pathBuilderVisitor) LeaveField(ref int) {
- if c.isNotOperationDefinitionRoot() {
- return
- }
-
- fieldAliasOrName := c.operation.FieldAliasOrNameString(ref)
- typeName := c.walker.EnclosingTypeDefinition.NameString(c.definition)
- c.debugPrint("LeaveField ref:", ref, "fieldName:", fieldAliasOrName, "typeName:", typeName)
-
- // pop response path uses array fields so it should be called before removeArrayField
- c.popResponsePath(ref)
- c.removeArrayField(ref)
}
// addPlannerPathForTypename adds a path for the __typename field.
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/required_fields_visitor.go b/app/v2/pkg/engine/plan/required_fields_visitor.go
index 4f3d479..2123605 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/required_fields_visitor.go
+++ b/app/v2/pkg/engine/plan/required_fields_visitor.go
@@ -12,31 +12,44 @@ import (
)
const (
- requiredFieldsFragmentTemplate = `fragment Key on %s {%s}`
- requiredFieldsFragmentTemplateWithTypeName = `fragment Key on %s { __typename %s}`
+ keyFieldsFragmentTemplate = `fragment Key on %s {%s}`
+ requiresFragmentTemplate = `fragment Requires_for_%s on %s { %s }`
)
-func RequiredFieldsFragment(typeName, requiredFields string, includeTypename bool) (*ast.Document, *operationreport.Report) {
- template := requiredFieldsFragmentTemplate
+func RequiredFieldsFragmentString(typeName, fieldName, requiredFields string, includeTypename bool) (fragment string) {
if includeTypename {
- template = requiredFieldsFragmentTemplateWithTypeName
+ requiredFields = "__typename " + requiredFields
}
- key, report := astparser.ParseGraphqlDocumentString(fmt.Sprintf(template, typeName, requiredFields))
- return &key, &report
-}
-
-func QueryPlanRequiredFieldsFragment(fieldName, typeName, requiredFields string) (*ast.Document, *operationreport.Report) {
- var fragment string
if fieldName == "" {
- fragment = fmt.Sprintf("fragment Key on %s { __typename %s }", typeName, requiredFields)
+ fragment = fmt.Sprintf(keyFieldsFragmentTemplate, typeName, requiredFields)
} else {
- fragment = fmt.Sprintf("fragment Requires_for_%s on %s { %s }", fieldName, typeName, requiredFields)
+ fragment = fmt.Sprintf(requiresFragmentTemplate, fieldName, typeName, requiredFields)
}
+
+ return fragment
+}
+
+func RequiredFieldsFragment(typeName, requiredFields string, includeTypename bool) (*ast.Document, *operationreport.Report) {
+ return ParseRequiredFieldsFragment(typeName, "", requiredFields, includeTypename)
+}
+
+func ParseRequiredFieldsFragment(typeName, fieldName, requiredFields string, includeTypename bool) (*ast.Document, *operationreport.Report) {
+ fragment := RequiredFieldsFragmentString(typeName, fieldName, requiredFields, includeTypename)
+
key, report := astparser.ParseGraphqlDocumentString(fragment)
+ if report.HasErrors() {
+ return nil, &report
+ }
+
return &key, &report
}
+func QueryPlanRequiredFieldsFragment(typeName, fieldName, requiredFields string) (*ast.Document, *operationreport.Report) {
+ // fieldName == "" - is not fully accurate, for interface objects we do not request a __typename
+ return ParseRequiredFieldsFragment(typeName, fieldName, requiredFields, fieldName == "")
+}
+
type addRequiredFieldsConfiguration struct {
operation, definition *ast.Document
operationSelectionSetRef int
diff --git a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/required_fields_visitor_test.go b/app/v2/pkg/engine/plan/required_fields_visitor_test.go
index 9ea2ee5..0d305f4 100644
--- a/tmp/agent-patch-flux-pr-1338.1-of-1.2026-03-05__17-48-26__gpt-5-1-codex-mini/app/v2/pkg/engine/plan/required_fields_visitor_test.go
+++ b/app/v2/pkg/engine/plan/required_fields_visitor_test.go
@@ -607,7 +607,7 @@ func TestQueryPlanRequiredFieldsFragment(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- fragment, report := QueryPlanRequiredFieldsFragment(tt.fieldName, tt.typeName, tt.requiredFields)
+ fragment, report := QueryPlanRequiredFieldsFragment(tt.typeName, tt.fieldName, tt.requiredFields)
require.False(t, report.HasErrors(), "unexpected error")
require.NotNil(t, fragment)