From 903deadc6a9c7167641ad319fbc437e4c5ae0aed Mon Sep 17 00:00:00 2001 From: Yaacov Rydzinski Date: Tue, 2 Jul 2024 17:47:08 +0300 Subject: [PATCH] upgrade executor to non-duplicating incremental delivery format includes granular options to allow for prior branching format --- .changeset/fifty-bobcats-jog.md | 14 + .../delegate/src/defaultMergedResolver.ts | 4 +- packages/delegate/src/leftOver.ts | 17 +- .../executor/src/execution/AccumulatorMap.ts | 17 + .../src/execution/BoxedPromiseOrValue.ts | 25 + .../src/execution/IncrementalGraph.ts | 294 ++ .../src/execution/IncrementalPublisher.ts | 434 +++ .../execution/__tests__/abort-signal.test.ts | 103 +- .../execution/__tests__/backpressure.test.ts | 6 +- .../src/execution/__tests__/defer-test.ts | 2496 +++++++++++++++-- .../src/execution/__tests__/lists-test.ts | 39 +- .../src/execution/__tests__/mutations-test.ts | 12 +- .../src/execution/__tests__/nonnull-test.ts | 32 +- .../src/execution/__tests__/stream-test.ts | 1175 ++++++-- .../src/execution/__tests__/subscribe.test.ts | 202 +- .../src/execution/buildExecutionPlan.ts | 95 + .../executor/src/execution/collectFields.ts | 320 +++ packages/executor/src/execution/execute.ts | 2121 ++++++++------ packages/executor/src/execution/getBySet.ts | 13 + packages/executor/src/execution/isSameSet.ts | 11 + packages/executor/src/execution/types.ts | 296 ++ .../test/__snapshots__/defer.test.ts.snap | 152 +- packages/federation/test/defer.test.ts | 35 +- packages/utils/package.json | 2 + packages/utils/src/Interfaces.ts | 3 + packages/utils/src/createDeferred.ts | 16 + packages/utils/src/index.ts | 1 + packages/utils/src/mergeIncrementalResult.ts | 64 +- .../tests/mergeIncrementalResult.spec.ts | 138 + yarn.lock | 5 + 30 files changed, 6614 insertions(+), 1528 deletions(-) create mode 100644 .changeset/fifty-bobcats-jog.md create mode 100644 packages/executor/src/execution/AccumulatorMap.ts create mode 100644 packages/executor/src/execution/BoxedPromiseOrValue.ts create mode 100644 packages/executor/src/execution/IncrementalGraph.ts create mode 100644 packages/executor/src/execution/IncrementalPublisher.ts create mode 100644 packages/executor/src/execution/buildExecutionPlan.ts create mode 100644 packages/executor/src/execution/collectFields.ts create mode 100644 packages/executor/src/execution/getBySet.ts create mode 100644 packages/executor/src/execution/isSameSet.ts create mode 100644 packages/executor/src/execution/types.ts create mode 100644 packages/utils/src/createDeferred.ts diff --git a/.changeset/fifty-bobcats-jog.md b/.changeset/fifty-bobcats-jog.md new file mode 100644 index 00000000000..ba41f5dbb74 --- /dev/null +++ b/.changeset/fifty-bobcats-jog.md @@ -0,0 +1,14 @@ +--- +'@graphql-tools/executor': major +'@graphql-tools/utils': minor +--- + +Upgrade to non-duplicating Incremental Delivery format + +## Description + +GraphQL Incremental Delivery is moving to a [new response format without duplication](https://github.com/graphql/defer-stream-wg/discussions/69). + +This PR updates the executor within graphql-tools to follow the new format, a BREAKING CHANGE. + +Incremental Delivery has now been disabled for subscriptions, also a BREAKING CHANGE. The GraphQL Working Group has decided to disable incremental delivery support for subscriptions (1) to gather more information about use cases and (2) explore how to interleaving the incremental response streams generated from different source events into one overall subscription response stream. diff --git a/packages/delegate/src/defaultMergedResolver.ts b/packages/delegate/src/defaultMergedResolver.ts index 4946f6d8ccc..95b2c017ff7 100644 --- a/packages/delegate/src/defaultMergedResolver.ts +++ b/packages/delegate/src/defaultMergedResolver.ts @@ -5,8 +5,8 @@ import { responsePathAsArray, SelectionSetNode, } from 'graphql'; -import { getResponseKeyFromInfo, isPromise } from '@graphql-tools/utils'; -import { createDeferred, DelegationPlanLeftOver, getPlanLeftOverFromParent } from './leftOver.js'; +import { createDeferred, getResponseKeyFromInfo, isPromise } from '@graphql-tools/utils'; +import { DelegationPlanLeftOver, getPlanLeftOverFromParent } from './leftOver.js'; import { getSubschema, getUnpathedErrors, diff --git a/packages/delegate/src/leftOver.ts b/packages/delegate/src/leftOver.ts index a2c6f39aa64..0aa85815d58 100644 --- a/packages/delegate/src/leftOver.ts +++ b/packages/delegate/src/leftOver.ts @@ -1,23 +1,8 @@ import { FieldNode } from 'graphql'; +import { Deferred } from '@graphql-tools/utils'; import { Subschema } from './Subschema.js'; import { DelegationPlanBuilder, ExternalObject } from './types.js'; -export type Deferred = PromiseWithResolvers; - -// TODO: Remove this after Node 22 -export function createDeferred(): Deferred { - if (Promise.withResolvers) { - return Promise.withResolvers(); - } - let resolve: (value: T | PromiseLike) => void; - let reject: (error: unknown) => void; - const promise = new Promise((_resolve, _reject) => { - resolve = _resolve; - reject = _reject; - }); - return { promise, resolve: resolve!, reject: reject! }; -} - export interface DelegationPlanLeftOver { unproxiableFieldNodes: Array; nonProxiableSubschemas: Array; diff --git a/packages/executor/src/execution/AccumulatorMap.ts b/packages/executor/src/execution/AccumulatorMap.ts new file mode 100644 index 00000000000..156fe71c207 --- /dev/null +++ b/packages/executor/src/execution/AccumulatorMap.ts @@ -0,0 +1,17 @@ +/** + * ES6 Map with additional `add` method to accumulate items. + */ +export class AccumulatorMap extends Map> { + get [Symbol.toStringTag]() { + return 'AccumulatorMap'; + } + + add(key: K, item: T): void { + const group = this.get(key); + if (group === undefined) { + this.set(key, [item]); + } else { + group.push(item); + } + } +} diff --git a/packages/executor/src/execution/BoxedPromiseOrValue.ts b/packages/executor/src/execution/BoxedPromiseOrValue.ts new file mode 100644 index 00000000000..630d1e6fcf8 --- /dev/null +++ b/packages/executor/src/execution/BoxedPromiseOrValue.ts @@ -0,0 +1,25 @@ +import { isPromise } from '@graphql-tools/utils'; +import type { MaybePromise } from '@graphql-tools/utils'; + +/** + * A BoxedPromiseOrValue is a container for a value or promise where the value + * will be updated when the promise resolves. + * + * A BoxedPromiseOrValue may only be used with promises whose possible + * rejection has already been handled, otherwise this will lead to unhandled + * promise rejections. + * + * @internal + * */ +export class BoxedPromiseOrValue { + value: MaybePromise; + + constructor(value: MaybePromise) { + this.value = value; + if (isPromise(value)) { + value.then(resolved => { + this.value = resolved; + }); + } + } +} diff --git a/packages/executor/src/execution/IncrementalGraph.ts b/packages/executor/src/execution/IncrementalGraph.ts new file mode 100644 index 00000000000..83a6358cab0 --- /dev/null +++ b/packages/executor/src/execution/IncrementalGraph.ts @@ -0,0 +1,294 @@ +import type { GraphQLError } from 'graphql'; +import { createDeferred, isPromise } from '@graphql-tools/utils'; +import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js'; +import { invariant } from './invariant.js'; +import type { + CompletedExecutionGroup, + DeferredFragmentRecord, + DeliveryGroup, + IncrementalDataRecord, + IncrementalDataRecordResult, + PendingExecutionGroup, + StreamItemRecord, + StreamRecord, + SuccessfulExecutionGroup, +} from './types.js'; +import { isDeferredFragmentRecord, isPendingExecutionGroup } from './types.js'; + +/** + * @internal + */ +export class IncrementalGraph { + private _rootNodes: Set; + + private _completedQueue: Array; + private _nextQueue: Array<(iterable: Iterable | undefined) => void>; + + constructor() { + this._rootNodes = new Set(); + this._completedQueue = []; + this._nextQueue = []; + } + + getNewPending( + incrementalDataRecords: ReadonlyArray, + ): ReadonlyArray { + const initialResultChildren = new Set(); + this._addIncrementalDataRecords(incrementalDataRecords, undefined, initialResultChildren); + return this._promoteNonEmptyToRoot(initialResultChildren); + } + + addCompletedSuccessfulExecutionGroup(successfulExecutionGroup: SuccessfulExecutionGroup): void { + const { pendingExecutionGroup, incrementalDataRecords } = successfulExecutionGroup; + + const deferredFragmentRecords = pendingExecutionGroup.deferredFragmentRecords; + + for (const deferredFragmentRecord of deferredFragmentRecords) { + const { pendingExecutionGroups, successfulExecutionGroups } = deferredFragmentRecord; + pendingExecutionGroups.delete(successfulExecutionGroup.pendingExecutionGroup); + successfulExecutionGroups.add(successfulExecutionGroup); + } + + if (incrementalDataRecords !== undefined) { + this._addIncrementalDataRecords(incrementalDataRecords, deferredFragmentRecords); + } + } + + *currentCompletedBatch(): Generator { + let completed; + while ((completed = this._completedQueue.shift()) !== undefined) { + yield completed; + } + if (this._rootNodes.size === 0) { + for (const resolve of this._nextQueue) { + resolve(undefined); + } + } + } + + nextCompletedBatch(): Promise | undefined> { + const { promise, resolve } = createDeferred< + Iterable | undefined + >(); + this._nextQueue.push(resolve); + return promise; + } + + abort(): void { + for (const resolve of this._nextQueue) { + resolve(undefined); + } + } + + hasNext(): boolean { + return this._rootNodes.size > 0; + } + + completeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): + | { + newPending: ReadonlyArray; + successfulExecutionGroups: ReadonlyArray; + } + | undefined { + if ( + !this._rootNodes.has(deferredFragmentRecord) || + deferredFragmentRecord.pendingExecutionGroups.size > 0 + ) { + return; + } + const successfulExecutionGroups = Array.from(deferredFragmentRecord.successfulExecutionGroups); + this._rootNodes.delete(deferredFragmentRecord); + for (const successfulExecutionGroup of successfulExecutionGroups) { + for (const otherDeferredFragmentRecord of successfulExecutionGroup.pendingExecutionGroup + .deferredFragmentRecords) { + otherDeferredFragmentRecord.successfulExecutionGroups.delete(successfulExecutionGroup); + } + } + const newPending = this._promoteNonEmptyToRoot(deferredFragmentRecord.children); + return { newPending, successfulExecutionGroups }; + } + + removeDeferredFragment(deferredFragmentRecord: DeferredFragmentRecord): boolean { + if (!this._rootNodes.has(deferredFragmentRecord)) { + return false; + } + this._rootNodes.delete(deferredFragmentRecord); + return true; + } + + removeStream(streamRecord: StreamRecord): void { + this._rootNodes.delete(streamRecord); + } + + private _addIncrementalDataRecords( + incrementalDataRecords: ReadonlyArray, + parents: ReadonlyArray | undefined, + initialResultChildren?: Set | undefined, + ): void { + for (const incrementalDataRecord of incrementalDataRecords) { + if (isPendingExecutionGroup(incrementalDataRecord)) { + for (const deferredFragmentRecord of incrementalDataRecord.deferredFragmentRecords) { + this._addDeferredFragment(deferredFragmentRecord, initialResultChildren); + deferredFragmentRecord.pendingExecutionGroups.add(incrementalDataRecord); + } + if (this._hasPendingFragment(incrementalDataRecord)) { + this._onExecutionGroup(incrementalDataRecord); + } + } else if (parents === undefined) { + invariant(initialResultChildren !== undefined); + initialResultChildren.add(incrementalDataRecord); + } else { + for (const parent of parents) { + this._addDeferredFragment(parent, initialResultChildren); + parent.children.add(incrementalDataRecord); + } + } + } + } + + private _promoteNonEmptyToRoot( + maybeEmptyNewPending: Set, + ): ReadonlyArray { + const newPending: Array = []; + for (const deliveryGroup of maybeEmptyNewPending) { + if (isDeferredFragmentRecord(deliveryGroup)) { + if (deliveryGroup.pendingExecutionGroups.size > 0) { + deliveryGroup.setAsPending(); + for (const pendingExecutionGroup of deliveryGroup.pendingExecutionGroups) { + if (!this._hasPendingFragment(pendingExecutionGroup)) { + this._onExecutionGroup(pendingExecutionGroup); + } + } + this._rootNodes.add(deliveryGroup); + newPending.push(deliveryGroup); + continue; + } + for (const child of deliveryGroup.children) { + maybeEmptyNewPending.add(child); + } + } else { + this._rootNodes.add(deliveryGroup); + newPending.push(deliveryGroup); + + this._onStreamItems(deliveryGroup); + } + } + return newPending; + } + + private _hasPendingFragment(pendingExecutionGroup: PendingExecutionGroup): boolean { + return pendingExecutionGroup.deferredFragmentRecords.some(deferredFragmentRecord => + this._rootNodes.has(deferredFragmentRecord), + ); + } + + private _addDeferredFragment( + deferredFragmentRecord: DeferredFragmentRecord, + deliveryGroups: Set | undefined, + ): void { + if (this._rootNodes.has(deferredFragmentRecord)) { + return; + } + const parent = deferredFragmentRecord.parent; + if (parent === undefined) { + invariant(deliveryGroups !== undefined); + deliveryGroups.add(deferredFragmentRecord); + return; + } + parent.children.add(deferredFragmentRecord); + this._addDeferredFragment(parent, deliveryGroups); + } + + private _onExecutionGroup(pendingExecutionGroup: PendingExecutionGroup): void { + const result = (pendingExecutionGroup.result as BoxedPromiseOrValue) + .value; + if (isPromise(result)) { + result.then(resolved => this._enqueue(resolved)); + } else { + this._enqueue(result); + } + } + + private async _onStreamItems(streamRecord: StreamRecord): Promise { + let items: Array = []; + let errors: Array = []; + let incrementalDataRecords: Array = []; + const streamItemQueue = streamRecord.streamItemQueue; + let streamItemRecord: StreamItemRecord | undefined; + while ((streamItemRecord = streamItemQueue.shift()) !== undefined) { + let result = + streamItemRecord instanceof BoxedPromiseOrValue + ? streamItemRecord.value + : streamItemRecord().value; + if (isPromise(result)) { + if (items.length > 0) { + this._enqueue({ + streamRecord, + result: + // TODO add additional test case or rework for coverage + errors.length > 0 /* c8 ignore start */ + ? { items, errors } /* c8 ignore stop */ + : { items }, + incrementalDataRecords, + }); + items = []; + errors = []; + incrementalDataRecords = []; + } + result = await result; + // wait an additional tick to coalesce resolving additional promises + // within the queue + await Promise.resolve(); + } + if (result.item === undefined) { + if (items.length > 0) { + this._enqueue({ + streamRecord, + result: errors.length > 0 ? { items, errors } : { items }, + incrementalDataRecords, + }); + } + this._enqueue( + result.errors === undefined + ? { streamRecord } + : { + streamRecord, + errors: result.errors, + }, + ); + return; + } + items.push(result.item); + if (result.errors !== undefined) { + errors.push(...result.errors); + } + if (result.incrementalDataRecords !== undefined) { + incrementalDataRecords.push(...result.incrementalDataRecords); + } + } + } + + private *_yieldCurrentCompletedIncrementalData( + first: IncrementalDataRecordResult, + ): Generator { + yield first; + let completed; + while ((completed = this._completedQueue.shift()) !== undefined) { + yield completed; + } + if (this._rootNodes.size === 0) { + for (const resolve of this._nextQueue) { + resolve(undefined); + } + } + } + + private _enqueue(completed: IncrementalDataRecordResult): void { + const next = this._nextQueue.shift(); + if (next !== undefined) { + next(this._yieldCurrentCompletedIncrementalData(completed)); + return; + } + this._completedQueue.push(completed); + } +} diff --git a/packages/executor/src/execution/IncrementalPublisher.ts b/packages/executor/src/execution/IncrementalPublisher.ts new file mode 100644 index 00000000000..5f3fe72cd87 --- /dev/null +++ b/packages/executor/src/execution/IncrementalPublisher.ts @@ -0,0 +1,434 @@ +import type { GraphQLError } from 'graphql'; +import { addPath, pathToArray } from '@graphql-tools/utils'; +import { IncrementalGraph } from './IncrementalGraph.js'; +import { invariant } from './invariant.js'; +import type { + CancellableStreamRecord, + CompletedExecutionGroup, + CompletedResult, + DeferredFragmentRecord, + DeliveryGroup, + IncrementalDataRecord, + IncrementalDataRecordResult, + IncrementalDeferResult, + IncrementalExecutionResults, + IncrementalResult, + IncrementalStreamResult, + InitialIncrementalExecutionResult, + PendingResult, + StreamItemsResult, + SubsequentIncrementalExecutionResult, +} from './types.js'; +import { + isCancellableStreamRecord, + isCompletedExecutionGroup, + isFailedExecutionGroup, +} from './types.js'; + +export function buildIncrementalResponse( + context: IncrementalPublisherContext, + result: TData, + errors: ReadonlyArray | undefined, + incrementalDataRecords: ReadonlyArray, +): IncrementalExecutionResults { + const incrementalPublisher = new IncrementalPublisher(context); + return incrementalPublisher.buildResponse(result, errors, incrementalDataRecords); +} + +interface IncrementalPublisherContext { + sendIncrementalErrorsAsNull: boolean; + sendPathAndLabelOnIncremental: boolean; + signal: AbortSignal | undefined; + cancellableStreams: Set | undefined; +} + +interface SubsequentIncrementalExecutionResultContext { + pending: Array; + incremental: Array>; + completed: Array; +} + +/** + * The IncrementalPublisherState Enum tracks the state of the IncrementalPublisher, which is initialized to + * "Started". When there are no more incremental results to publish, the state is set to "Completed". On the + * next call to next, clean-up is potentially performed and the state is set to "Finished". + * + * If the IncrementalPublisher is ended early, it may be advanced directly from "Started" to "Finished". + */ +enum IncrementalPublisherState { + Started = 1, + Completed = 2, + Finished = 3, +} + +/** + * This class is used to publish incremental results to the client, enabling semi-concurrent + * execution while preserving result order. + * + * @internal + */ +class IncrementalPublisher { + private _context: IncrementalPublisherContext; + private _nextId: number; + private _incrementalGraph: IncrementalGraph; + + constructor(context: IncrementalPublisherContext) { + this._context = context; + this._nextId = 0; + this._incrementalGraph = new IncrementalGraph(); + } + + buildResponse( + data: TData, + errors: ReadonlyArray | undefined, + incrementalDataRecords: ReadonlyArray, + ): IncrementalExecutionResults { + const newPending = this._incrementalGraph.getNewPending(incrementalDataRecords); + + const pending = this._pendingSourcesToResults(newPending); + + const initialResult: InitialIncrementalExecutionResult = + errors === undefined + ? { data, pending, hasNext: true } + : { errors, data, pending, hasNext: true }; + + return { + initialResult, + subsequentResults: this._subscribe(), + }; + } + + private _pendingSourcesToResults(newPending: ReadonlyArray): Array { + const pendingResults: Array = []; + for (const pendingSource of newPending) { + const id = String(this._getNextId()); + pendingSource.id = id; + const pendingResult: PendingResult = { + id, + path: pathToArray(pendingSource.path), + }; + if (pendingSource.label !== undefined) { + pendingResult.label = pendingSource.label; + } + pendingResults.push(pendingResult); + } + return pendingResults; + } + + private _getNextId(): string { + return String(this._nextId++); + } + + private _subscribe(): AsyncGenerator< + SubsequentIncrementalExecutionResult, + void, + void + > { + let incrementalPublisherState: IncrementalPublisherState = IncrementalPublisherState.Started; + + const _finish = async (): Promise => { + incrementalPublisherState = IncrementalPublisherState.Finished; + this._incrementalGraph.abort(); + await this._returnAsyncIterators(); + }; + + this._context.signal?.addEventListener('abort', () => { + this._incrementalGraph.abort(); + }); + + const _next = async (): Promise< + IteratorResult, void> + > => { + switch (incrementalPublisherState) { + case IncrementalPublisherState.Finished: { + return { value: undefined, done: true }; + } + case IncrementalPublisherState.Completed: { + await _finish(); + return { value: undefined, done: true }; + } + case IncrementalPublisherState.Started: { + // continue + } + } + + const context: SubsequentIncrementalExecutionResultContext = { + pending: [], + incremental: [], + completed: [], + }; + + let batch: Iterable | undefined = + this._incrementalGraph.currentCompletedBatch(); + do { + for (const completedResult of batch) { + this._handleCompletedIncrementalData(completedResult, context); + } + + const { incremental, completed } = context; + if (incremental.length > 0 || completed.length > 0) { + const hasNext = this._incrementalGraph.hasNext(); + + if (!hasNext) { + incrementalPublisherState = IncrementalPublisherState.Completed; + } + + const subsequentIncrementalExecutionResult: SubsequentIncrementalExecutionResult = + { + hasNext, + }; + + const pending = context.pending; + if (pending.length > 0) { + subsequentIncrementalExecutionResult.pending = pending; + } + if (incremental.length > 0) { + subsequentIncrementalExecutionResult.incremental = incremental; + } + if (completed.length > 0) { + subsequentIncrementalExecutionResult.completed = completed; + } + + return { value: subsequentIncrementalExecutionResult, done: false }; + } + + batch = await this._incrementalGraph.nextCompletedBatch(); + } while (batch !== undefined); + + if (this._context.signal?.aborted) { + throw this._context.signal.reason; + } + + return { value: undefined, done: true }; + }; + + const _return = async (): Promise< + IteratorResult, void> + > => { + await _finish(); + return { value: undefined, done: true }; + }; + + const _throw = async ( + error?: unknown, + ): Promise, void>> => { + await _finish(); + return Promise.reject(error); + }; + + return { + [Symbol.asyncIterator]() { + return this; + }, + next: _next, + return: _return, + throw: _throw, + }; + } + + private _handleCompletedIncrementalData( + completedIncrementalData: IncrementalDataRecordResult, + context: SubsequentIncrementalExecutionResultContext, + ): void { + if (isCompletedExecutionGroup(completedIncrementalData)) { + this._handleCompletedExecutionGroup(completedIncrementalData, context); + } else { + this._handleCompletedStreamItems(completedIncrementalData, context); + } + } + + private _handleCompletedExecutionGroup( + completedExecutionGroup: CompletedExecutionGroup, + context: SubsequentIncrementalExecutionResultContext, + ): void { + if (isFailedExecutionGroup(completedExecutionGroup)) { + for (const deferredFragmentRecord of completedExecutionGroup.pendingExecutionGroup + .deferredFragmentRecords) { + const id = deferredFragmentRecord.id; + if (!this._incrementalGraph.removeDeferredFragment(deferredFragmentRecord)) { + // This can occur if multiple deferred grouped field sets error for a fragment. + continue; + } + invariant(id !== undefined); + if (this._context.sendIncrementalErrorsAsNull) { + const incrementalEntry: IncrementalDeferResult = { + id, + data: null, + errors: completedExecutionGroup.errors, + }; + if (this._context.sendPathAndLabelOnIncremental) { + const { path, label } = deferredFragmentRecord; + incrementalEntry.path = pathToArray(path); + if (label !== undefined) { + incrementalEntry.label = label; + } + } + context.incremental.push(incrementalEntry); + context.completed.push({ id }); + } else { + context.completed.push({ + id, + errors: completedExecutionGroup.errors, + }); + } + } + return; + } + + this._incrementalGraph.addCompletedSuccessfulExecutionGroup(completedExecutionGroup); + + for (const deferredFragmentRecord of completedExecutionGroup.pendingExecutionGroup + .deferredFragmentRecords) { + const completion = this._incrementalGraph.completeDeferredFragment(deferredFragmentRecord); + if (completion === undefined) { + continue; + } + const id = deferredFragmentRecord.id; + invariant(id !== undefined); + const incremental = context.incremental; + const { newPending, successfulExecutionGroups } = completion; + context.pending.push(...this._pendingSourcesToResults(newPending)); + for (const successfulExecutionGroup of successfulExecutionGroups) { + const { bestId, subPath } = this._getBestIdAndSubPath( + id, + deferredFragmentRecord, + successfulExecutionGroup, + ); + const incrementalEntry: IncrementalDeferResult = { + ...successfulExecutionGroup.result, + id: bestId, + }; + if (this._context.sendPathAndLabelOnIncremental) { + const { path, label } = deferredFragmentRecord; + incrementalEntry.path = pathToArray(path); + if (label !== undefined) { + incrementalEntry.label = label; + } + } + if (subPath !== undefined) { + incrementalEntry.subPath = subPath; + } + incremental.push(incrementalEntry); + } + context.completed.push({ id }); + } + } + + private _handleCompletedStreamItems( + streamItemsResult: StreamItemsResult, + context: SubsequentIncrementalExecutionResultContext, + ): void { + const streamRecord = streamItemsResult.streamRecord; + const id = streamRecord.id; + invariant(id !== undefined); + if (streamItemsResult.errors !== undefined) { + if (this._context.sendIncrementalErrorsAsNull) { + const incrementalEntry: IncrementalStreamResult = { + items: null, + id, + errors: streamItemsResult.errors, + }; + if (this._context.sendPathAndLabelOnIncremental) { + const { path, label, index } = streamRecord; + incrementalEntry.path = pathToArray(addPath(path, index, undefined)); + if (label !== undefined) { + incrementalEntry.label = label; + } + } + context.incremental.push(incrementalEntry); + context.completed.push({ id }); + } else { + context.completed.push({ + id, + errors: streamItemsResult.errors, + }); + } + this._incrementalGraph.removeStream(streamRecord); + if (isCancellableStreamRecord(streamRecord)) { + invariant(this._context.cancellableStreams !== undefined); + this._context.cancellableStreams.delete(streamRecord); + streamRecord.earlyReturn().catch(() => { + /* c8 ignore next 1 */ + // ignore error + }); + } + } else if (streamItemsResult.result === undefined) { + context.completed.push({ id }); + this._incrementalGraph.removeStream(streamRecord); + if (isCancellableStreamRecord(streamRecord)) { + invariant(this._context.cancellableStreams !== undefined); + this._context.cancellableStreams.delete(streamRecord); + } + } else { + const bareResult = streamItemsResult.result; + const incrementalEntry: IncrementalStreamResult = { + id, + ...bareResult, + }; + if (this._context.sendPathAndLabelOnIncremental) { + const { path, label, index } = streamRecord; + incrementalEntry.path = pathToArray(addPath(path, index, undefined)); + streamRecord.index += bareResult.items.length; + if (label !== undefined) { + incrementalEntry.label = label; + } + } + context.incremental.push(incrementalEntry); + + const incrementalDataRecords = streamItemsResult.incrementalDataRecords; + if (incrementalDataRecords !== undefined) { + const newPending = this._incrementalGraph.getNewPending(incrementalDataRecords); + context.pending.push(...this._pendingSourcesToResults(newPending)); + } + } + } + + private _getBestIdAndSubPath( + initialId: string, + initialDeferredFragmentRecord: DeferredFragmentRecord, + completedExecutionGroup: CompletedExecutionGroup, + ): { bestId: string; subPath: ReadonlyArray | undefined } { + let maxLength = pathToArray(initialDeferredFragmentRecord.path).length; + let bestId = initialId; + + for (const deferredFragmentRecord of completedExecutionGroup.pendingExecutionGroup + .deferredFragmentRecords) { + if (deferredFragmentRecord === initialDeferredFragmentRecord) { + continue; + } + const id = deferredFragmentRecord.id; + // TODO: add test case for when an fragment has not been released, but might be processed for the shortest path. + /* c8 ignore next 3 */ + if (id === undefined) { + continue; + } + const fragmentPath = pathToArray(deferredFragmentRecord.path); + const length = fragmentPath.length; + if (length > maxLength) { + maxLength = length; + bestId = id; + } + } + const subPath = completedExecutionGroup.path.slice(maxLength); + return { + bestId, + subPath: subPath.length > 0 ? subPath : undefined, + }; + } + + private async _returnAsyncIterators(): Promise { + await this._incrementalGraph.abort(); + + const cancellableStreams = this._context.cancellableStreams; + if (cancellableStreams === undefined) { + return; + } + const promises: Array> = []; + for (const streamRecord of cancellableStreams) { + if (streamRecord.earlyReturn !== undefined) { + promises.push(streamRecord.earlyReturn()); + } + } + await Promise.all(promises); + } +} diff --git a/packages/executor/src/execution/__tests__/abort-signal.test.ts b/packages/executor/src/execution/__tests__/abort-signal.test.ts index 920d7c95165..a0c7fdee6c0 100644 --- a/packages/executor/src/execution/__tests__/abort-signal.test.ts +++ b/packages/executor/src/execution/__tests__/abort-signal.test.ts @@ -1,7 +1,6 @@ import { parse } from 'graphql'; -import { createDeferred } from '@graphql-tools/delegate'; import { makeExecutableSchema } from '@graphql-tools/schema'; -import { isAsyncIterable } from '@graphql-tools/utils'; +import { createDeferred, isAsyncIterable } from '@graphql-tools/utils'; import { Repeater } from '@repeaterjs/repeater'; import { assertAsyncIterable } from '../../../../loaders/url/tests/test-utils'; import { normalizedExecutor } from '../normalizedExecutor'; @@ -143,7 +142,7 @@ describe('Abort Signal', () => { Mutation: { first() { didInvokeFirstFn = true; - return true; + return Promise.resolve(true); }, second() { didInvokeSecondFn = true; @@ -168,7 +167,7 @@ describe('Abort Signal', () => { `), signal: controller.signal, }); - expect(result$).rejects.toMatchInlineSnapshot(`DOMException {}`); + await expect(result$).rejects.toMatchInlineSnapshot(`DOMException {}`); expect(didInvokeFirstFn).toBe(true); expect(didInvokeSecondFn).toBe(true); expect(didInvokeThirdFn).toBe(false); @@ -275,6 +274,7 @@ describe('Abort Signal', () => { data: { counter: [], }, + pending: [{ id: '0', path: ['counter'] }], hasNext: true, }, }); @@ -356,6 +356,10 @@ describe('Abort Signal', () => { counter1: [], counter2: [], }, + pending: [ + { id: '0', path: ['counter1'] }, + { id: '1', path: ['counter2'] }, + ], hasNext: true, }, }); @@ -433,6 +437,14 @@ describe('Abort Signal', () => { "root": {}, }, "hasNext": true, + "pending": [ + { + "id": "0", + "path": [ + "root", + ], + }, + ], } `); const next$ = iterator.next(); @@ -442,6 +454,89 @@ describe('Abort Signal', () => { await expect(next$).rejects.toThrow('This operation was aborted'); expect(bResolverGotInvoked).toBe(false); }); + it('stops pending stream execution for never-returning incremental delivery (@defer)', async () => { + const aResolverGotInvokedD = createDeferred(); + const requestGotCancelledD = createDeferred(); + let bResolverGotInvoked = false; + + const schema = makeExecutableSchema({ + typeDefs: /* GraphQL */ ` + type Query { + root: A! + } + type A { + a: B! + } + type B { + b: String + } + `, + resolvers: { + Query: { + async root() { + return {}; + }, + }, + A: { + async a() { + aResolverGotInvokedD.resolve(); + await requestGotCancelledD.promise; + return {}; + }, + }, + B: { + b() { + bResolverGotInvoked = true; + return new Promise(() => {}); + }, + }, + }, + }); + const controller = new AbortController(); + const result = await normalizedExecutor({ + schema, + document: parse(/* GraphQL */ ` + query { + root { + ... @defer { + a { + b + } + } + } + } + `), + signal: controller.signal, + }); + + if (!isAsyncIterable(result)) { + throw new Error('Result is not an async iterable'); + } + + const iterator = result[Symbol.asyncIterator](); + const next = await iterator.next(); + expect(next.value).toMatchInlineSnapshot(` +{ + "data": { + "root": {}, + }, + "hasNext": true, + "pending": [ + { + "id": "0", + "path": [ + "root", + ], + }, + ], +} +`); + const next$ = iterator.next(); + await aResolverGotInvokedD.promise; + controller.abort(); + await expect(next$).rejects.toThrow('This operation was aborted'); + expect(bResolverGotInvoked).toBe(false); + }); it('stops promise execution', async () => { const controller = new AbortController(); const d = createDeferred(); diff --git a/packages/executor/src/execution/__tests__/backpressure.test.ts b/packages/executor/src/execution/__tests__/backpressure.test.ts index 9d8b8c78ec7..9d2124979d1 100644 --- a/packages/executor/src/execution/__tests__/backpressure.test.ts +++ b/packages/executor/src/execution/__tests__/backpressure.test.ts @@ -69,6 +69,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; @@ -93,6 +94,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; @@ -120,6 +122,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; @@ -128,7 +131,7 @@ describe('Defer Stream cancellation', () => { incremental: [ { items: [3], - path: ['countdownStream', 0], + id: '0', }, ], hasNext: true, @@ -158,6 +161,7 @@ describe('Defer Stream cancellation', () => { data: { countdownStream: [3], }, + pending: [{ id: '0', path: ['countdownStream'] }], hasNext: true, }); break; diff --git a/packages/executor/src/execution/__tests__/defer-test.ts b/packages/executor/src/execution/__tests__/defer-test.ts index bf577a7320a..d58b332db18 100644 --- a/packages/executor/src/execution/__tests__/defer-test.ts +++ b/packages/executor/src/execution/__tests__/defer-test.ts @@ -8,22 +8,20 @@ import { GraphQLString, parse, } from 'graphql'; +import { createDeferred } from '@graphql-tools/utils'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; +import { execute } from '../execute.js'; import type { InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, -} from '../execute.js'; -import { execute } from '../execute.js'; +} from '../types.js'; const friendType = new GraphQLObjectType({ fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, - promiseNonNullErrorField: { - type: new GraphQLNonNull(GraphQLString), - resolve: () => Promise.resolve(null), - }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, }, name: 'Friend', }); @@ -34,64 +32,118 @@ const friends = [ { name: 'C-3PO', id: 4 }, ]; +const deeperObject = new GraphQLObjectType({ + fields: { + foo: { type: GraphQLString }, + bar: { type: GraphQLString }, + baz: { type: GraphQLString }, + bak: { type: GraphQLString }, + }, + name: 'DeeperObject', +}); + +const nestedObject = new GraphQLObjectType({ + fields: { + deeperObject: { type: deeperObject }, + name: { type: GraphQLString }, + }, + name: 'NestedObject', +}); + +const anotherNestedObject = new GraphQLObjectType({ + fields: { + deeperObject: { type: deeperObject }, + }, + name: 'AnotherNestedObject', +}); + +const hero = { + name: 'Luke', + id: 1, + friends, + nestedObject, + anotherNestedObject, +}; + +const c = new GraphQLObjectType({ + fields: { + d: { type: GraphQLString }, + nonNullErrorField: { type: new GraphQLNonNull(GraphQLString) }, + }, + name: 'c', +}); + +const e = new GraphQLObjectType({ + fields: { + f: { type: GraphQLString }, + }, + name: 'e', +}); + +const b = new GraphQLObjectType({ + fields: { + c: { type: c }, + e: { type: e }, + }, + name: 'b', +}); + +const a = new GraphQLObjectType({ + fields: { + b: { type: b }, + someField: { type: GraphQLString }, + }, + name: 'a', +}); + +const g = new GraphQLObjectType({ + fields: { + h: { type: GraphQLString }, + }, + name: 'g', +}); + const heroType = new GraphQLObjectType({ fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, - slowField: { - type: GraphQLString, - resolve: async () => { - await resolveOnNextTick(); - return 'slow'; - }, - }, - errorField: { - type: GraphQLString, - resolve: () => { - throw new Error('bad'); - }, - }, - nonNullErrorField: { - type: new GraphQLNonNull(GraphQLString), - resolve: () => null, - }, - promiseNonNullErrorField: { - type: new GraphQLNonNull(GraphQLString), - resolve: () => Promise.resolve(null), - }, + nonNullName: { type: new GraphQLNonNull(GraphQLString) }, friends: { type: new GraphQLList(friendType), - resolve: () => friends, - }, - asyncFriends: { - type: new GraphQLList(friendType), - async *resolve() { - yield await Promise.resolve(friends[0]); - }, }, + nestedObject: { type: nestedObject }, + anotherNestedObject: { type: anotherNestedObject }, }, name: 'Hero', }); -const hero = { name: 'Luke', id: 1 }; - const query = new GraphQLObjectType({ fields: { hero: { type: heroType, - resolve: () => hero, }, + a: { type: a }, + g: { type: g }, }, name: 'Query', }); const schema = new GraphQLSchema({ query }); -async function complete(document: DocumentNode) { +async function complete( + document: DocumentNode, + rootValue: unknown = { hero }, + enableEarlyExecution = false, + useLatestFormat = true, +) { const result = await execute({ schema, document, - rootValue: {}, + rootValue, + enableEarlyExecution, + deduplicateDefers: useLatestFormat, + sendIncrementalErrorsAsNull: !useLatestFormat, + sendPathAndLabelOnIncremental: !useLatestFormat, }); if ('initialResult' in result) { @@ -107,7 +159,7 @@ async function complete(document: DocumentNode) { describe('Execute: defer directive', () => { it('Can defer fragments containing scalar types', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -115,39 +167,37 @@ describe('Execute: defer directive', () => { } } fragment NameFragment on Hero { - id name } `); - const result = await complete(document); - expect(result).toEqual([ + expectJSON(result).toDeepEqual([ { data: { hero: { id: '1', }, }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { incremental: [ { data: { - id: '1', name: 'Luke', }, - path: ['hero'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can disable defer using if argument', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -158,7 +208,6 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); expectJSON(result).toDeepEqual({ @@ -170,9 +219,8 @@ describe('Execute: defer directive', () => { }, }); }); - it('Does not disable defer with null if argument', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery($shouldDefer: Boolean) { hero { id @@ -183,27 +231,139 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { incremental: [ { data: { name: 'Luke' }, - path: ['hero'], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Does not execute deferred fragments early when not specified', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const order: Array = []; + const result = await complete(document, { + hero: { + ...hero, + id: async () => { + await resolveOnNextTick(); + await resolveOnNextTick(); + order.push('slow-id'); + return hero.id; + }, + name: () => { + order.push('fast-name'); + return hero.name; + }, + }, + }); + + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + id: '1', + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: 'Luke', + }, + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); + expect(order).toEqual(['slow-id', 'fast-name']); }); + it('Does execute deferred fragments early when specified', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const order: Array = []; + const result = await complete( + document, + { + hero: { + ...hero, + id: async () => { + await resolveOnNextTick(); + await resolveOnNextTick(); + order.push('slow-id'); + return hero.id; + }, + name: () => { + order.push('fast-name'); + return hero.name; + }, + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + id: '1', + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + name: 'Luke', + }, + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual(['fast-name', 'slow-id']); + }); it('Can defer fragments on the top level Query field', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { ...QueryFragment @defer(label: "DeferQuery") } @@ -213,12 +373,12 @@ describe('Execute: defer directive', () => { } } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ id: '0', path: [], label: 'DeferQuery' }], hasNext: true, }, { @@ -229,32 +389,38 @@ describe('Execute: defer directive', () => { id: '1', }, }, - path: [], - label: 'DeferQuery', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can defer fragments with errors on the top level Query field', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { ...QueryFragment @defer(label: "DeferQuery") } fragment QueryFragment on Query { hero { - errorField + name } } `); - - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + name: () => { + throw new Error('bad'); + }, + }, + }); expectJSON(result).toDeepEqual([ { data: {}, + pending: [{ id: '0', path: [], label: 'DeferQuery' }], hasNext: true, }, { @@ -262,35 +428,33 @@ describe('Execute: defer directive', () => { { data: { hero: { - errorField: null, + name: null, }, }, errors: [ { message: 'bad', locations: [{ line: 7, column: 11 }], - path: ['hero', 'errorField'], + path: ['hero', 'name'], }, ], - path: [], - label: 'DeferQuery', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can defer a fragment within an already deferred fragment', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - id ...TopFragment @defer(label: "DeferTop") } } fragment TopFragment on Hero { - name + id ...NestedFragment @defer(label: "DeferNested") } fragment NestedFragment on Hero { @@ -299,45 +463,41 @@ describe('Execute: defer directive', () => { } } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { - hero: { - id: '1', - }, + hero: {}, }, + pending: [{ id: '0', path: ['hero'], label: 'DeferTop' }], hasNext: true, }, { + pending: [{ id: '1', path: ['hero'], label: 'DeferNested' }], incremental: [ { data: { - friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + id: '1', }, - path: ['hero'], - label: 'DeferNested', + id: '0', }, { data: { - name: 'Luke', + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], }, - path: ['hero'], - label: 'DeferTop', + id: '1', }, ], + completed: [{ id: '0' }, { id: '1' }], hasNext: false, }, ]); }); - it('Can defer a fragment that is also not deferred, deferred fragment is first', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - id ...TopFragment @defer(label: "DeferTop") ...TopFragment } @@ -346,38 +506,19 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); - expectJSON(result).toDeepEqual([ - { - data: { - hero: { - id: '1', - name: 'Luke', - }, + expectJSON(result).toDeepEqual({ + data: { + hero: { + name: 'Luke', }, - hasNext: true, - }, - { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], - hasNext: false, }, - ]); + }); }); - it('Can defer a fragment that is also not deferred, non-deferred fragment is first', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - id ...TopFragment ...TopFragment @defer(label: "DeferTop") } @@ -386,35 +527,18 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); - expectJSON(result).toDeepEqual([ - { - data: { - hero: { - id: '1', - name: 'Luke', - }, + expectJSON(result).toDeepEqual({ + data: { + hero: { + name: 'Luke', }, - hasNext: true, - }, - { - incremental: [ - { - data: { - name: 'Luke', - }, - path: ['hero'], - label: 'DeferTop', - }, - ], - hasNext: false, }, - ]); + }); }); it('Can defer an inline fragment', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -424,102 +548,1973 @@ describe('Execute: defer directive', () => { } } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'], label: 'InlineDeferred' }], hasNext: true, }, { - incremental: [{ data: { name: 'Luke' }, path: ['hero'], label: 'InlineDeferred' }], + incremental: [{ data: { name: 'Luke' }, id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles errors thrown in deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + it('Does not emit empty defer fragments', async () => { + const document = parse(` query HeroNameQuery { hero { - id - ...NameFragment @defer + ... @defer { + name @skip(if: true) + } } } - fragment NameFragment on Hero { - errorField + fragment TopFragment on Hero { + name } `); + const result = await complete(document); + expectJSON(result).toDeepEqual({ + data: { + hero: {}, + }, + }); + }); + it('Emits children of empty defer fragments', async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer { + ... @defer { + name + } + } + } + } + `); const result = await complete(document); expectJSON(result).toDeepEqual([ { - data: { hero: { id: '1' } }, + data: { + hero: {}, + }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { - incremental: [ - { - data: { errorField: null }, - path: ['hero'], - errors: [ - { - message: 'bad', - locations: [{ line: 9, column: 9 }], - path: ['hero', 'errorField'], - }, - ], - }, - ], + incremental: [{ data: { name: 'Luke' }, id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles non-nullable errors thrown in deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + it('Can separately emit defer fragments with different labels with varying fields', async () => { + const document = parse(` query HeroNameQuery { hero { - id - ...NameFragment @defer + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } } } - fragment NameFragment on Hero { - nonNullErrorField - } `); - const result = await complete(document); expectJSON(result).toDeepEqual([ { - data: { hero: { id: '1' } }, + data: { + hero: {}, + }, + pending: [ + { id: '0', path: ['hero'], label: 'DeferID' }, + { id: '1', path: ['hero'], label: 'DeferName' }, + ], hasNext: true, }, { incremental: [ { - data: null, + data: { + id: '1', + }, + id: '0', + }, + { + data: { + name: 'Luke', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with different labels with varying subfields', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [], label: 'DeferID' }, + { id: '1', path: [], label: 'DeferName' }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: '0', + }, + { + data: { id: '1' }, + id: '0', + subPath: ['hero'], + }, + { + data: { name: 'Luke' }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with different labels with varying subfields that return promises', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document, { + hero: { + id: () => Promise.resolve('1'), + name: () => Promise.resolve('Luke'), + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [], label: 'DeferID' }, + { id: '1', path: [], label: 'DeferName' }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: '0', + }, + { + data: { id: '1' }, + id: '0', + subPath: ['hero'], + }, + { + data: { name: 'Luke' }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits defer fragments with varying subfields of same priorities but different level of defers', async () => { + const document = parse(` + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [ + { id: '0', path: ['hero'], label: 'DeferID' }, + { id: '1', path: [], label: 'DeferName' }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + id: '1', + }, + id: '0', + }, + { + data: { + name: 'Luke', + }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Separately emits nested defer fragments with varying subfields of same priorities but different level of defers', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer(label: "DeferName") { + hero { + name + ... @defer(label: "DeferID") { + id + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: '0', path: [], label: 'DeferName' }], + hasNext: true, + }, + { + pending: [{ id: '1', path: ['hero'], label: 'DeferID' }], + incremental: [ + { + data: { + hero: { + name: 'Luke', + }, + }, + id: '0', + }, + { + data: { + id: '1', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Initiates deferred grouped field sets only if they have been released as pending', async () => { + const document = parse(` + query { + ... @defer { + a { + ... @defer { + b { + c { d } + } + } + } + } + ... @defer { + a { + someField + ... @defer { + b { + e { f } + } + } + } + } + } + `); + + const { promise: slowFieldPromise, resolve: resolveSlowField } = createDeferred(); + let cResolverCalled = false; + let eResolverCalled = false; + const executeResult = execute({ + schema, + document, + rootValue: { + a: { + someField: slowFieldPromise, + b: { + c: () => { + cResolverCalled = true; + return { d: 'd' }; + }, + e: () => { + eResolverCalled = true; + return { f: 'f' }; + }, + }, + }, + }, + enableEarlyExecution: false, + }); + + expect('initialResult' in executeResult).toBeTruthy(); + + // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine + const result1 = executeResult.initialResult; + expectJSON(result1).toDeepEqual({ + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }); + + // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine + const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + + expect(cResolverCalled).toBe(false); + expect(eResolverCalled).toBe(false); + + const result2 = await iterator.next(); + expectJSON(result2).toDeepEqual({ + value: { + pending: [{ id: '2', path: ['a'] }], + incremental: [ + { + data: { a: {} }, + id: '0', + }, + { + data: { b: {} }, + id: '2', + }, + { + data: { c: { d: 'd' } }, + id: '2', + subPath: ['b'], + }, + ], + completed: [{ id: '0' }, { id: '2' }], + hasNext: true, + }, + done: false, + }); + + expect(cResolverCalled).toBe(true); + expect(eResolverCalled).toBe(false); + + resolveSlowField('someField'); + + const result3 = await iterator.next(); + expectJSON(result3).toDeepEqual({ + value: { + pending: [{ id: '3', path: ['a'] }], + incremental: [ + { + data: { someField: 'someField' }, + id: '1', + subPath: ['a'], + }, + { + data: { e: { f: 'f' } }, + id: '3', + subPath: ['b'], + }, + ], + completed: [{ id: '1' }, { id: '3' }], + hasNext: false, + }, + done: false, + }); + + expect(eResolverCalled).toBe(true); + + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ + value: undefined, + done: true, + }); + }); + + it('Initiates all deferred grouped field sets immediately once they have been released as pending', async () => { + const document = parse(` + query { + ... @defer { + a { + ... @defer { + b { + c { d } + } + } + } + } + ... @defer { + a { + ... @defer { + b { + c { d } + e { f } + } + } + } + } + } + `); + + const { promise: cPromise, resolve: resolveC } = createDeferred(); + let cResolverCalled = false; + let eResolverCalled = false; + const executeResult = execute({ + schema, + document, + rootValue: { + a: { + b: { + c: async () => { + cResolverCalled = true; + await cPromise; + return { d: 'd' }; + }, + e: () => { + eResolverCalled = true; + return { f: 'f' }; + }, + }, + }, + }, + enableEarlyExecution: false, + }); + + // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine + const result1 = executeResult.initialResult; + expectJSON(result1).toDeepEqual({ + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }); + + // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine + const iterator = executeResult.subsequentResults[Symbol.asyncIterator](); + + expect(cResolverCalled).toBe(false); + expect(eResolverCalled).toBe(false); + + const result2 = await iterator.next(); + expectJSON(result2).toDeepEqual({ + value: { + pending: [ + { id: '2', path: ['a'] }, + { id: '3', path: ['a'] }, + ], + incremental: [ + { + data: { a: {} }, + id: '0', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: true, + }, + done: false, + }); + + resolveC(); + + expect(cResolverCalled).toBe(true); + expect(eResolverCalled).toBe(true); + + const result3 = await iterator.next(); + expectJSON(result3).toDeepEqual({ + value: { + incremental: [ + { + data: { b: { c: { d: 'd' } } }, + id: '2', + }, + { + data: { e: { f: 'f' } }, + id: '3', + subPath: ['b'], + }, + ], + completed: [{ id: '2' }, { id: '3' }], + hasNext: false, + }, + done: false, + }); + + const result4 = await iterator.next(); + expectJSON(result4).toDeepEqual({ + value: undefined, + done: true, + }); + }); + + it('Can deduplicate multiple defers on the same object', async () => { + const document = parse(` + query { + hero { + friends { + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + } + } + } + } + } + } + } + + fragment FriendFrag on Friend { + id + name + } + `); + const result = await complete(document); + + expectJSON(result).toDeepEqual([ + { + data: { hero: { friends: [{}, {}, {}] } }, + pending: [ + { id: '0', path: ['hero', 'friends', 0] }, + { id: '1', path: ['hero', 'friends', 1] }, + { id: '2', path: ['hero', 'friends', 2] }, + ], + hasNext: true, + }, + { + incremental: [ + { data: { id: '2', name: 'Han' }, id: '0' }, + { data: { id: '3', name: 'Leia' }, id: '1' }, + { data: { id: '4', name: 'C-3PO' }, id: '2' }, + ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates fields present in the initial payload', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + `); + const result = await complete(document, { + hero: { + nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } }, + anotherNestedObject: { deeperObject: { foo: 'foo' } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + anotherNestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { bar: 'bar' }, + id: '0', + subPath: ['nestedObject', 'deeperObject'], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + + it('Can duplicate fields present in the initial payload if specified, using branching executor format', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + `); + const result = await complete( + document, + { + hero: { + nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } }, + anotherNestedObject: { deeperObject: { foo: 'foo' } }, + }, + }, + undefined, + false, + ); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + anotherNestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + nestedObject: { + deeperObject: { + bar: 'bar', + }, + }, + anotherNestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + }, + id: '0', + path: ['hero'], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates fields present in a parent defer payload', async () => { + const document = parse(` + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await complete(document, { + hero: { nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } } }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + pending: [{ id: '1', path: ['hero', 'nestedObject', 'deeperObject'] }], + incremental: [ + { + data: { + nestedObject: { + deeperObject: { foo: 'foo' }, + }, + }, + id: '0', + }, + { + data: { + bar: 'bar', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Can duplicate fields present in a parent defer payload if specified, using branching executor format', async () => { + const document = parse(` + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await complete( + document, + { + hero: { nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } } }, + }, + undefined, + false, + ); + expectJSON(result).toDeepEqual([ + { + data: { + hero: {}, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + pending: [{ id: '1', path: ['hero', 'nestedObject', 'deeperObject'] }], + incremental: [ + { + data: { + nestedObject: { + deeperObject: { foo: 'foo' }, + }, + }, + id: '0', path: ['hero'], + }, + { + data: { + foo: 'foo', + bar: 'bar', + }, + id: '1', + path: ['hero', 'nestedObject', 'deeperObject'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates fields with deferred fragments at multiple levels', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + foo + bar + } + ... @defer { + deeperObject { + foo + bar + baz + ... @defer { + foo + bar + baz + bak + } + } + } + } + } + } + } + `); + const result = await complete(document, { + hero: { + nestedObject: { + deeperObject: { foo: 'foo', bar: 'bar', baz: 'baz', bak: 'bak' }, + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: { + foo: 'foo', + }, + }, + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + pending: [ + { id: '1', path: ['hero', 'nestedObject'] }, + { id: '2', path: ['hero', 'nestedObject', 'deeperObject'] }, + ], + incremental: [ + { + data: { bar: 'bar' }, + id: '0', + subPath: ['nestedObject', 'deeperObject'], + }, + { + data: { baz: 'baz' }, + id: '1', + subPath: ['deeperObject'], + }, + { + data: { bak: 'bak' }, + id: '2', + }, + ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates multiple fields from deferred fragments from different branches occurring at the same level', async () => { + const document = parse(` + query { + hero { + nestedObject { + deeperObject { + ... @defer { + foo + } + } + } + ... @defer { + nestedObject { + deeperObject { + ... @defer { + foo + bar + } + } + } + } + } + } + `); + const result = await complete(document, { + hero: { nestedObject: { deeperObject: { foo: 'foo', bar: 'bar' } } }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + nestedObject: { + deeperObject: {}, + }, + }, + }, + pending: [ + { id: '0', path: ['hero', 'nestedObject', 'deeperObject'] }, + { id: '1', path: ['hero', 'nestedObject', 'deeperObject'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { + foo: 'foo', + }, + id: '0', + }, + { + data: { + bar: 'bar', + }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicate fields with deferred fragments in different branches at multiple non-overlapping levels', async () => { + const document = parse(` + query { + a { + b { + c { + d + } + ... @defer { + e { + f + } + } + } + } + ... @defer { + a { + b { + e { + f + } + } + } + g { + h + } + } + } + `); + const result = await complete(document, { + a: { + b: { + c: { d: 'd' }, + e: { f: 'f' }, + }, + }, + g: { h: 'h' }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: { + b: { + c: { + d: 'd', + }, + }, + }, + }, + pending: [ + { id: '0', path: ['a', 'b'] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { e: { f: 'f' } }, + id: '0', + }, + { + data: { g: { h: 'h' } }, + id: '1', + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Correctly bundles varying subfields into incremental data records unique by defer combination, ignoring fields in a fragment masked by a parent defer', async () => { + const document = parse(` + query HeroNameQuery { + ... @defer { + hero { + id + } + } + ... @defer { + hero { + name + shouldBeWithNameDespiteAdditionalDefer: name + ... @defer { + shouldBeWithNameDespiteAdditionalDefer: name + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { hero: {} }, + id: '0', + }, + { + data: { id: '1' }, + id: '0', + subPath: ['hero'], + }, + { + data: { + name: 'Luke', + shouldBeWithNameDespiteAdditionalDefer: 'Luke', + }, + id: '1', + subPath: ['hero'], + }, + ], + completed: [{ id: '0' }, { id: '1' }], + hasNext: false, + }, + ]); + }); + + it('Nulls cross defer boundaries, null first', async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await complete(document, { + a: { b: { c: { d: 'd' } }, someField: 'someField' }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: [] }, + { id: '1', path: ['a'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: '1', + }, + { + data: { d: 'd' }, + id: '1', + subPath: ['b', 'c'], + }, + ], + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 8, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + }, + { id: '1' }, + ], + hasNext: false, + }, + ]); + }); + + it('Nulls do not cross defer boundaries, when using branching executor format', async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await complete( + document, + { + a: { b: { c: { d: 'd' } }, someField: 'someField' }, + }, + undefined, + false, + ); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: ['a'] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: { d: 'd' } } }, + id: '0', + path: ['a'], + }, + { + data: { a: { someField: 'someField', b: { c: null } } }, + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 8, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + id: '1', + path: [], + }, + ], + completed: [ + { + id: '0', + }, + { id: '1' }, + ], + hasNext: false, + }, + ]); + }); + + it('Nulls cross defer boundaries, value first', async () => { + const document = parse(` + query { + ... @defer { + a { + b { + c { + d + } + } + } + } + a { + ... @defer { + someField + b { + c { + nonNullErrorField + } + } + } + } + } + `); + const result = await complete(document, { + a: { + b: { c: { d: 'd' }, nonNullErrorFIeld: null }, + someField: 'someField', + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: [] }, + { id: '1', path: ['a'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: '1', + }, + { + data: { d: 'd' }, + id: '0', + subPath: ['a', 'b', 'c'], + }, + ], + completed: [ + { id: '0' }, + { + id: '1', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 17, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it('Handles multiple erroring deferred grouped field sets', async () => { + const document = parse(` + query { + ... @defer { + a { + b { + c { + someError: nonNullErrorField + } + } + } + } + ... @defer { + a { + b { + c { + anotherError: nonNullErrorField + } + } + } + } + } + `); + const result = await complete(document, { + a: { + b: { c: { nonNullErrorField: null } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 7, column: 17 }], + path: ['a', 'b', 'c', 'someError'], + }, + ], + }, + { + id: '1', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 16, column: 17 }], + path: ['a', 'b', 'c', 'anotherError'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it('Handles multiple erroring deferred grouped field sets for the same fragment', async () => { + const document = parse(` + query { + ... @defer { + a { + b { + someC: c { + d: d + } + anotherC: c { + d: d + } + } + } + } + ... @defer { + a { + b { + someC: c { + someError: nonNullErrorField + } + anotherC: c { + anotherError: nonNullErrorField + } + } + } + } + } + `); + const result = await complete(document, { + a: { + b: { c: { d: 'd', nonNullErrorField: null } }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [ + { id: '0', path: [] }, + { id: '1', path: [] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { a: { b: { someC: {}, anotherC: {} } } }, + id: '0', + }, + { + data: { d: 'd' }, + id: '0', + subPath: ['a', 'b', 'someC'], + }, + { + data: { d: 'd' }, + id: '0', + subPath: ['a', 'b', 'anotherC'], + }, + ], + completed: [ + { + id: '1', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 19, column: 17 }], + path: ['a', 'b', 'someC', 'someError'], + }, + ], + }, + { id: '0' }, + ], + hasNext: false, + }, + ]); + }); + + it('filters a payload with a null that cannot be merged', async () => { + const document = parse(` + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + `); + const result = await complete( + document, + { + a: { + b: { + c: { + d: 'd', + nonNullErrorField: async () => { + await resolveOnNextTick(); + return null; + }, + }, + }, + someField: 'someField', + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + a: {}, + }, + pending: [ + { id: '0', path: [] }, + { id: '1', path: ['a'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + data: { b: { c: {} } }, + id: '1', + }, + { + data: { d: 'd' }, + id: '1', + subPath: ['b', 'c'], + }, + ], + completed: [{ id: '1' }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field c.nonNullErrorField.', + locations: [{ line: 8, column: 17 }], + path: ['a', 'b', 'c', 'nonNullErrorField'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + + it('Cancels deferred fields when initial result exhibits null bubbling', async () => { + const document = parse(` + query { + hero { + nonNullName + } + ... @defer { + hero { + name + } + } + } + `); + const result = await complete( + document, + { + hero: { + ...hero, + nonNullName: () => null, + }, + }, + true, + ); + expectJSON(result).toDeepEqual({ + data: { + hero: null, + }, + errors: [ + { + message: 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 4, column: 11 }], + path: ['hero', 'nonNullName'], + }, + ], + }); + }); + + it('Cancels deferred fields when deferred result exhibits null bubbling', async () => { + const document = parse(` + query { + ... @defer { + hero { + nonNullName + name + } + } + } + `); + const result = await complete( + document, + { + hero: { + ...hero, + nonNullName: () => null, + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: {}, + pending: [{ id: '0', path: [] }], + hasNext: true, + }, + { + incremental: [ + { + data: { + hero: null, + }, errors: [ { - message: 'Cannot return null for non-nullable field Hero.nonNullErrorField.', - locations: [{ line: 9, column: 9 }], - path: ['hero', 'nonNullErrorField'], + message: 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 5, column: 13 }], + path: ['hero', 'nonNullName'], }, ], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + + it('Deduplicates list fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual({ + data: { + hero: { + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + }, + }, + }); + }); + + it('Deduplicates async iterable list fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + friends: async function* resolve() { + yield await Promise.resolve(friends[0]); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [{ name: 'Han' }] } }, + }); + }); + + it('Deduplicates empty async iterable list fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + // eslint-disable-next-line require-yield + friends: async function* resolve() { + await resolveOnNextTick(); + }, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [] } }, + }); + }); + + it('Does not deduplicate list fields with non-overlapping fields', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + id + } + } + } + } + `); + const result = await complete(document); + expectJSON(result).toDeepEqual([ + { + data: { + hero: { + friends: [{ name: 'Han' }, { name: 'Leia' }, { name: 'C-3PO' }], + }, + }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { id: '2' }, + id: '0', + subPath: ['friends', 0], + }, + { + data: { id: '3' }, + id: '0', + subPath: ['friends', 1], + }, + { + data: { id: '4' }, + id: '0', + subPath: ['friends', 2], }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); + it('Deduplicates list fields that return empty lists', async () => { + const document = parse(` + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + friends: () => [], + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { friends: [] } }, + }); + }); + + it('Deduplicates null object fields', async () => { + const document = parse(` + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + ...hero, + nestedObject: () => null, + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { nestedObject: null } }, + }); + }); + + it('Deduplicates promise object fields', async () => { + const document = parse(` + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + `); + const result = await complete(document, { + hero: { + nestedObject: () => Promise.resolve({ name: 'foo' }), + }, + }); + expectJSON(result).toDeepEqual({ + data: { hero: { nestedObject: { name: 'foo' } } }, + }); + }); + + it('Handles errors thrown in deferred fragments', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + `); + const result = await complete(document, { + hero: { + ...hero, + name: () => { + throw new Error('bad'); + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + incremental: [ + { + data: { name: null }, + id: '0', + errors: [ + { + message: 'bad', + locations: [{ line: 9, column: 9 }], + path: ['hero', 'name'], + }, + ], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles non-nullable errors thrown in deferred fragments', async () => { + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullName + } + `); + const result = await complete(document, { + hero: { + ...hero, + nonNullName: () => null, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Cannot return null for non-nullable field Hero.nonNullName.', + locations: [{ line: 9, column: 9 }], + path: ['hero', 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); it('Handles non-nullable errors thrown outside deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { - nonNullErrorField + nonNullName ...NameFragment @defer } } @@ -527,19 +2522,23 @@ describe('Execute: defer directive', () => { id } `); - - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + nonNullName: () => null, + }, + }); expectJSON(result).toDeepEqual({ errors: [ { - message: 'Cannot return null for non-nullable field Hero.nonNullErrorField.', + message: 'Cannot return null for non-nullable field Hero.nonNullName.', locations: [ { line: 4, column: 11, }, ], - path: ['hero', 'nonNullErrorField'], + path: ['hero', 'nonNullName'], }, ], data: { @@ -547,9 +2546,8 @@ describe('Execute: defer directive', () => { }, }); }); - it('Handles async non-nullable errors thrown in deferred fragments', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -557,26 +2555,30 @@ describe('Execute: defer directive', () => { } } fragment NameFragment on Hero { - promiseNonNullErrorField + nonNullName } `); - - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + nonNullName: () => Promise.resolve(null), + }, + }); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' } }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { - incremental: [ + completed: [ { - data: null, - path: ['hero'], + id: '0', errors: [ { - message: 'Cannot return null for non-nullable field Hero.promiseNonNullErrorField.', + message: 'Cannot return null for non-nullable field Hero.nonNullName.', locations: [{ line: 9, column: 9 }], - path: ['hero', 'promiseNonNullErrorField'], + path: ['hero', 'nonNullName'], }, ], }, @@ -585,9 +2587,8 @@ describe('Execute: defer directive', () => { }, ]); }); - it('Returns payloads in correct order', async () => { - const document = parse(/* GraphQL */ ` + const document = parse(` query HeroNameQuery { hero { id @@ -595,7 +2596,7 @@ describe('Execute: defer directive', () => { } } fragment NameFragment on Hero { - slowField + name friends { ...NestedFragment @defer } @@ -604,111 +2605,130 @@ describe('Execute: defer directive', () => { name } `); - const result = await complete(document); + const result = await complete(document, { + hero: { + ...hero, + name: async () => { + await resolveOnNextTick(); + return 'slow'; + }, + }, + }); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' }, }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { + pending: [ + { id: '1', path: ['hero', 'friends', 0] }, + { id: '2', path: ['hero', 'friends', 1] }, + { id: '3', path: ['hero', 'friends', 2] }, + ], incremental: [ { - data: { slowField: 'slow', friends: [{}, {}, {}] }, - path: ['hero'], + data: { name: 'slow', friends: [{}, {}, {}] }, + id: '0', }, + { data: { name: 'Han' }, id: '1' }, + { data: { name: 'Leia' }, id: '2' }, + { data: { name: 'C-3PO' }, id: '3' }, ], - hasNext: true, - }, - { - incremental: [ - { data: { name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, - ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }, { id: '3' }], hasNext: false, }, ]); }); - it('Returns payloads from synchronous data in correct order', async () => { - const document = parse(/* GraphQL */ ` - query HeroNameQuery { - hero { - id - ...NameFragment @defer - } - } - fragment NameFragment on Hero { - name - friends { - ...NestedFragment @defer - } + const document = parse(` + query HeroNameQuery { + hero { + id + ...NameFragment @defer } - fragment NestedFragment on Friend { - name + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer } - `); - + } + fragment NestedFragment on Friend { + name + } + `); const result = await complete(document); expectJSON(result).toDeepEqual([ { data: { hero: { id: '1' }, }, + pending: [{ id: '0', path: ['hero'] }], hasNext: true, }, { + pending: [ + { id: '1', path: ['hero', 'friends', 0] }, + { id: '2', path: ['hero', 'friends', 1] }, + { id: '3', path: ['hero', 'friends', 2] }, + ], incremental: [ { data: { name: 'Luke', friends: [{}, {}, {}], }, - path: ['hero'], + id: '0', }, + { data: { name: 'Han' }, id: '1' }, + { data: { name: 'Leia' }, id: '2' }, + { data: { name: 'C-3PO' }, id: '3' }, ], - hasNext: true, - }, - { - incremental: [ - { data: { name: 'Han' }, path: ['hero', 'friends', 0] }, - { data: { name: 'Leia' }, path: ['hero', 'friends', 1] }, - { data: { name: 'C-3PO' }, path: ['hero', 'friends', 2] }, - ], + completed: [{ id: '0' }, { id: '1' }, { id: '2' }, { id: '3' }], hasNext: false, }, ]); }); it('Filters deferred payloads when a list item returned by an async iterable is nulled', async () => { - const document = parse(/* GraphQL */ ` - query { - hero { - asyncFriends { - promiseNonNullErrorField - ...NameFragment @defer - } + const document = parse(` + query { + hero { + friends { + nonNullName + ...NameFragment @defer } } - fragment NameFragment on Friend { - name - } - `); - - const result = await complete(document); + } + fragment NameFragment on Friend { + name + } + `); + const result = await complete(document, { + hero: { + ...hero, + async *friends() { + yield await Promise.resolve({ + ...friends[0], + nonNullName: () => Promise.resolve(null), + }); + }, + }, + }); expectJSON(result).toDeepEqual({ data: { hero: { - asyncFriends: [null], + friends: [null], }, }, errors: [ { - message: 'Cannot return null for non-nullable field Friend.promiseNonNullErrorField.', - locations: [{ line: 5, column: 13 }], - path: ['hero', 'asyncFriends', 0, 'promiseNonNullErrorField'], + message: 'Cannot return null for non-nullable field Friend.nonNullName.', + locations: [{ line: 5, column: 11 }], + path: ['hero', 'friends', 0, 'nonNullName'], }, ], }); diff --git a/packages/executor/src/execution/__tests__/lists-test.ts b/packages/executor/src/execution/__tests__/lists-test.ts index eca608b712c..f7c7383a57a 100644 --- a/packages/executor/src/execution/__tests__/lists-test.ts +++ b/packages/executor/src/execution/__tests__/lists-test.ts @@ -2,6 +2,7 @@ import { buildSchema, GraphQLFieldResolver, GraphQLList, + GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLString, @@ -27,7 +28,7 @@ describe('Execute: Accepts any iterable as list value', () => { }); }); - it('Accepts an Generator function as a List value', () => { + it('Accepts a Generator function as a List value', () => { function* listField() { yield 'one'; yield 2; @@ -91,7 +92,7 @@ describe('Execute: Accepts async iterables as list value', () => { name: 'ObjectWrapper', fields: { index: { - type: GraphQLString, + type: new GraphQLNonNull(GraphQLString), resolve, }, }, @@ -127,12 +128,12 @@ describe('Execute: Accepts async iterables as list value', () => { } expectJSON(await complete({ listField })).toDeepEqual({ - data: { listField: ['two', '4', null] }, + data: { listField: null }, errors: [ { message: 'bad', locations: [{ line: 1, column: 3 }], - path: ['listField', 2], + path: ['listField'], }, ], }); @@ -190,7 +191,7 @@ describe('Execute: Accepts async iterables as list value', () => { return Promise.resolve(index); }), ).toDeepEqual({ - data: { listField: [{ index: '0' }, { index: '1' }, { index: null }] }, + data: { listField: [{ index: '0' }, { index: '1' }, null] }, errors: [ { message: 'bad', @@ -229,6 +230,34 @@ describe('Execute: Accepts async iterables as list value', () => { errors, }); }); + + it('Returns async iterable when list nulls', async () => { + const values = [1, null, 2]; + let i = 0; + let returned = false; + const listField = { + [Symbol.asyncIterator]: () => ({ + next: () => Promise.resolve({ value: values[i++], done: false }), + return: () => { + returned = true; + return Promise.resolve({ value: undefined, done: true }); + }, + }), + }; + const errors = [ + { + message: 'Cannot return null for non-nullable field Query.listField.', + locations: [{ line: 1, column: 3 }], + path: ['listField', 1], + }, + ]; + + expectJSON(await complete({ listField }, '[Int!]')).toDeepEqual({ + data: { listField: null }, + errors, + }); + expect(returned).toBe(true); + }); }); describe('Execute: Handles list nullability', () => { diff --git a/packages/executor/src/execution/__tests__/mutations-test.ts b/packages/executor/src/execution/__tests__/mutations-test.ts index c4b2d36f2dc..93bfef2d323 100644 --- a/packages/executor/src/execution/__tests__/mutations-test.ts +++ b/packages/executor/src/execution/__tests__/mutations-test.ts @@ -227,18 +227,19 @@ describe('Execute: Handles mutation execution ordering', () => { first: {}, second: { theNumber: 2 }, }, + pending: [{ id: '0', path: ['first'], label: 'defer-label' }], hasNext: true, }, { incremental: [ { - label: 'defer-label', - path: ['first'], + id: '0', data: { promiseToGetTheNumber: 2, }, }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); @@ -262,7 +263,7 @@ describe('Execute: Handles mutation execution ordering', () => { const rootValue = new Root(6); const mutationResult = await execute({ schema, document, rootValue }); - expectJSON(mutationResult).toDeepEqual({ + expect(mutationResult).toEqual({ data: { first: { theNumber: 1 }, second: { theNumber: 2 }, @@ -306,13 +307,13 @@ describe('Execute: Handles mutation execution ordering', () => { data: { second: { theNumber: 2 }, }, + pending: [{ id: '0', path: [], label: 'defer-label' }], hasNext: true, }, { incremental: [ { - label: 'defer-label', - path: [], + id: '0', data: { first: { theNumber: 1, @@ -320,6 +321,7 @@ describe('Execute: Handles mutation execution ordering', () => { }, }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); diff --git a/packages/executor/src/execution/__tests__/nonnull-test.ts b/packages/executor/src/execution/__tests__/nonnull-test.ts index fb4eead69bd..78bc207e026 100644 --- a/packages/executor/src/execution/__tests__/nonnull-test.ts +++ b/packages/executor/src/execution/__tests__/nonnull-test.ts @@ -247,6 +247,16 @@ describe('Execute: handles non-nullable types', () => { path: ['syncNest', 'syncNest', 'sync'], locations: [{ line: 6, column: 22 }], }, + { + message: promiseError.message, + path: ['syncNest', 'promise'], + locations: [{ line: 5, column: 11 }], + }, + { + message: promiseError.message, + path: ['syncNest', 'syncNest', 'promise'], + locations: [{ line: 6, column: 27 }], + }, { message: syncError.message, path: ['syncNest', 'promiseNest', 'sync'], @@ -262,21 +272,6 @@ describe('Execute: handles non-nullable types', () => { path: ['promiseNest', 'syncNest', 'sync'], locations: [{ line: 12, column: 22 }], }, - { - message: promiseError.message, - path: ['syncNest', 'promise'], - locations: [{ line: 5, column: 11 }], - }, - { - message: promiseError.message, - path: ['syncNest', 'syncNest', 'promise'], - locations: [{ line: 6, column: 27 }], - }, - { - message: syncError.message, - path: ['promiseNest', 'promiseNest', 'sync'], - locations: [{ line: 13, column: 25 }], - }, { message: promiseError.message, path: ['syncNest', 'promiseNest', 'promise'], @@ -292,6 +287,11 @@ describe('Execute: handles non-nullable types', () => { path: ['promiseNest', 'syncNest', 'promise'], locations: [{ line: 12, column: 27 }], }, + { + message: syncError.message, + path: ['promiseNest', 'promiseNest', 'sync'], + locations: [{ line: 13, column: 25 }], + }, { message: promiseError.message, path: ['promiseNest', 'promiseNest', 'promise'], @@ -521,7 +521,7 @@ describe('Execute: handles non-nullable types', () => { type: new GraphQLNonNull(GraphQLString), }, }, - resolve: (_, args: any) => 'Passed: ' + String(args.cannotBeNull), + resolve: (_, { cannotBeNull }) => 'Passed: ' + String(cannotBeNull), }, }, }), diff --git a/packages/executor/src/execution/__tests__/stream-test.ts b/packages/executor/src/execution/__tests__/stream-test.ts index 183cbf8b05f..024d6d1e044 100644 --- a/packages/executor/src/execution/__tests__/stream-test.ts +++ b/packages/executor/src/execution/__tests__/stream-test.ts @@ -8,13 +8,15 @@ import { GraphQLString, parse, } from 'graphql'; -import { MaybePromise } from '@graphql-tools/utils'; +import { createDeferred, MaybePromise } from '@graphql-tools/utils'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; +import { expectPromise } from '../../__testUtils__/expectPromise.js'; +import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; +import { execute } from '../execute.js'; import type { InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, -} from '../execute.js'; -import { execute } from '../execute.js'; +} from '../types.js'; const friendType = new GraphQLObjectType({ fields: { @@ -76,11 +78,19 @@ const query = new GraphQLObjectType({ const schema = new GraphQLSchema({ query }); -async function complete(document: DocumentNode, rootValue: unknown = {}) { +async function complete( + document: DocumentNode, + rootValue: unknown = {}, + enableEarlyExecution = false, + useLatestFormat = true, +) { const result = await execute({ schema, document, rootValue, + enableEarlyExecution, + sendIncrementalErrorsAsNull: !useLatestFormat, + sendPathAndLabelOnIncremental: !useLatestFormat, }); if ('initialResult' in result) { @@ -117,65 +127,72 @@ async function completeAsync(document: DocumentNode, numCalls: number, rootValue return Promise.all(promises); } -function createResolvablePromise(): [Promise, (value?: T) => void] { - let resolveFn; - const promise = new Promise(resolve => { - resolveFn = resolve; - }); - return [promise, resolveFn as unknown as (value?: T) => void]; -} - describe('Execute: stream directive', () => { it('Can stream a list field', async () => { const document = parse('{ scalarList @stream(initialCount: 1) }'); const result = await complete(document, { scalarList: () => ['apple', 'banana', 'coconut'], }); - expect(result).toEqual([ + expectJSON(result).toDeepEqual([ { data: { scalarList: ['apple'], }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], + incremental: [{ items: ['banana', 'coconut'], id: '0' }], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Can stream a list field using branching executor format', async () => { + const document = parse('{ scalarList @stream(initialCount: 1) }'); + const result = await complete( + document, + { + scalarList: () => ['apple', 'banana', 'coconut'], + }, + undefined, + false, + ); + expectJSON(result).toDeepEqual([ + { + data: { + scalarList: ['apple'], + }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['banana', 'coconut'], id: '0', path: ['scalarList', 1] }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can use default value of initialCount', async () => { const document = parse('{ scalarList @stream }'); const result = await complete(document, { scalarList: () => ['apple', 'banana', 'coconut'], }); - expect(result).toEqual([ + expectJSON(result).toDeepEqual([ { data: { scalarList: [], }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['apple'], path: ['scalarList', 0] }], - hasNext: true, - }, - { - incremental: [{ items: ['banana'], path: ['scalarList', 1] }], - hasNext: true, - }, - { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['apple', 'banana', 'coconut'], id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Negative values of initialCount throw field errors', async () => { const document = parse('{ scalarList @stream(initialCount: -2) }'); const result = await complete(document, { @@ -199,7 +216,6 @@ describe('Execute: stream directive', () => { }, }); }); - it('Returns label from stream directive', async () => { const document = parse('{ scalarList @stream(initialCount: 1, label: "scalar-stream") }'); const result = await complete(document, { @@ -210,31 +226,21 @@ describe('Execute: stream directive', () => { data: { scalarList: ['apple'], }, + pending: [{ id: '0', path: ['scalarList'], label: 'scalar-stream' }], hasNext: true, }, { incremental: [ { - items: ['banana'], - path: ['scalarList', 1], - label: 'scalar-stream', - }, - ], - hasNext: true, - }, - { - incremental: [ - { - items: ['coconut'], - path: ['scalarList', 2], - label: 'scalar-stream', + items: ['banana', 'coconut'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can disable @stream using if argument', async () => { const document = parse('{ scalarList @stream(initialCount: 0, if: false) }'); const result = await complete(document, { @@ -244,7 +250,6 @@ describe('Execute: stream directive', () => { data: { scalarList: ['apple', 'banana', 'coconut'] }, }); }); - it('Does not disable stream with null if argument', async () => { const document = parse( 'query ($shouldStream: Boolean) { scalarList @stream(initialCount: 2, if: $shouldStream) }', @@ -255,15 +260,16 @@ describe('Execute: stream directive', () => { expectJSON(result).toDeepEqual([ { data: { scalarList: ['apple', 'banana'] }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { - incremental: [{ items: ['coconut'], path: ['scalarList', 2] }], + incremental: [{ items: ['coconut'], id: '0' }], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream multi-dimensional lists', async () => { const document = parse('{ scalarListList @stream(initialCount: 1) }'); const result = await complete(document, { @@ -278,29 +284,24 @@ describe('Execute: stream directive', () => { data: { scalarListList: [['apple', 'apple', 'apple']], }, + pending: [{ id: '0', path: ['scalarListList'] }], hasNext: true, }, { incremental: [ { - items: [['banana', 'banana', 'banana']], - path: ['scalarListList', 1], - }, - ], - hasNext: true, - }, - { - incremental: [ - { - items: [['coconut', 'coconut', 'coconut']], - path: ['scalarListList', 2], + items: [ + ['banana', 'banana', 'banana'], + ['coconut', 'coconut', 'coconut'], + ], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream a field that returns a list of promises', async () => { const document = parse(/* GraphQL */ ` query { @@ -327,6 +328,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { @@ -338,14 +340,14 @@ describe('Execute: stream directive', () => { id: '3', }, ], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream in correct order with lists of promises', async () => { const document = parse(/* GraphQL */ ` query { @@ -363,13 +365,14 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + id: '0', }, ], hasNext: true, @@ -378,7 +381,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + id: '0', }, ], hasNext: true, @@ -387,14 +390,172 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Does not execute early if not specified', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const result = await complete(document, { + friendList: () => + friends.map((f, i) => ({ + id: async () => { + const slowness = 3 - i; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(i); + return f.id; + }, + })), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '2' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '3' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([0, 1, 2]); + }); + it('Executes early if specified', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const result = await complete( + document, + { + friendList: () => + friends.map((f, i) => ({ + id: async () => { + const slowness = 3 - i; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(i); + return f.id; + }, + })), + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }, { id: '2' }, { id: '3' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([2, 1, 0]); + }); + it('Can stream a field that returns a list with nested promises', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 2) { + name + id + } + } + `); + const result = await complete(document, { + friendList: () => + friends.map(f => ({ + name: Promise.resolve(f.name), + id: Promise.resolve(f.id), + })), + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [ + { + name: 'Luke', + id: '1', + }, + { + name: 'Han', + id: '2', + }, + ], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [ + { + name: 'Leia', + id: '3', + }, + ], + id: '0', + }, + ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles rejections in a field that returns a list of promises before initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -425,20 +586,21 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }, null], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles rejections in a field that returns a list of promises after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -462,13 +624,14 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'bad', @@ -477,16 +640,21 @@ describe('Execute: stream directive', () => { }, ], }, + ], + hasNext: true, + }, + { + incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream a field that returns an async iterable', async () => { const document = parse(/* GraphQL */ ` query { @@ -508,13 +676,14 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Luke', id: '1' }], - path: ['friendList', 0], + id: '0', }, ], hasNext: true, @@ -523,7 +692,7 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Han', id: '2' }], - path: ['friendList', 1], + id: '0', }, ], hasNext: true, @@ -532,17 +701,17 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], hasNext: true, }, { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Can stream a field that returns an async iterable, using a non-zero initialCount', async () => { const document = parse(/* GraphQL */ ` query { @@ -567,20 +736,24 @@ describe('Execute: stream directive', () => { { name: 'Han', id: '2' }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + hasNext: true, + }, + { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Negative values of initialCount throw field errors on a field that returns an async iterable', async () => { const document = parse(/* GraphQL */ ` query { @@ -606,7 +779,125 @@ describe('Execute: stream directive', () => { }, }); }); - + it('Does not execute early if not specified, when streaming from an async iterable', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const slowFriend = async (n: number) => ({ + id: async () => { + const slowness = (3 - n) * 10; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(n); + return friends[n].id; + }, + }); + const result = await complete(document, { + async *friendList() { + yield await Promise.resolve(slowFriend(0)); + yield await Promise.resolve(slowFriend(1)); + yield await Promise.resolve(slowFriend(2)); + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '2' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '3' }], + id: '0', + }, + ], + hasNext: true, + }, + { + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([0, 1, 2]); + }); + it('Executes early if specified when streaming from an async iterable', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 0) { + id + } + } + `); + const order: Array = []; + const slowFriend = (n: number) => ({ + id: async () => { + const slowness = (3 - n) * 10; + for (let j = 0; j < slowness; j++) { + await resolveOnNextTick(); + } + order.push(n); + return friends[n].id; + }, + }); + const result = await complete( + document, + { + async *friendList() { + yield await Promise.resolve(slowFriend(0)); + yield await Promise.resolve(slowFriend(1)); + yield await Promise.resolve(slowFriend(2)); + }, + }, + true, + ); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1' }, { id: '2' }, { id: '3' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + expect(order).toEqual([2, 1, 0]); + }); it('Can handle concurrent calls to .next() without waiting', async () => { const document = parse(/* GraphQL */ ` query { @@ -616,7 +907,7 @@ describe('Execute: stream directive', () => { } } `); - const result = await completeAsync(document, 3, { + const result = await completeAsync(document, 2, { async *friendList() { yield await Promise.resolve(friends[0]); yield await Promise.resolve(friends[1]); @@ -633,6 +924,7 @@ describe('Execute: stream directive', () => { { name: 'Han', id: '2' }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, }, @@ -642,17 +934,16 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ name: 'Leia', id: '3' }], - path: ['friendList', 2], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, }, { done: true, value: undefined }, - { done: true, value: undefined }, ]); }); - it('Handles error thrown in async iterable before initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -673,15 +964,14 @@ describe('Execute: stream directive', () => { { message: 'bad', locations: [{ line: 3, column: 9 }], - path: ['friendList', 1], + path: ['friendList'], }, ], data: { - friendList: [{ name: 'Luke', id: '1' }, null], + friendList: null, }, }); }); - it('Handles error thrown in async iterable after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -702,18 +992,18 @@ describe('Execute: stream directive', () => { data: { friendList: [{ name: 'Luke', id: '1' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'bad', locations: [{ line: 3, column: 9 }], - path: ['friendList', 1], + path: ['friendList'], }, ], }, @@ -722,7 +1012,6 @@ describe('Execute: stream directive', () => { }, ]); }); - it('Handles null returned in non-null list items after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -732,7 +1021,7 @@ describe('Execute: stream directive', () => { } `); const result = await complete(document, { - nonNullFriendList: () => [friends[0], null], + nonNullFriendList: () => [friends[0], null, friends[1]], }); expectJSON(result).toDeepEqual([ @@ -740,13 +1029,13 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ name: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Cannot return null for non-nullable field Query.nonNullFriendList.', @@ -760,7 +1049,51 @@ describe('Execute: stream directive', () => { }, ]); }); + it('Handles null returned in non-null list items after initialCount is reached, using branching executor format', async () => { + const document = parse(/* GraphQL */ ` + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + `); + const result = await complete( + document, + { + nonNullFriendList: () => [friends[0], null, friends[1]], + }, + undefined, + false, + ); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ name: 'Luke' }], + }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], + hasNext: true, + }, + { + incremental: [ + { + errors: [ + { + message: 'Cannot return null for non-nullable field Query.nonNullFriendList.', + locations: [{ line: 3, column: 9 }], + path: ['nonNullFriendList', 1], + }, + ], + items: null, + id: '0', + path: ['nonNullFriendList', 1], + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); it('Handles null returned in non-null async iterable list items after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -787,13 +1120,13 @@ describe('Execute: stream directive', () => { data: { nonNullFriendList: [{ name: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Cannot return null for non-nullable field Query.nonNullFriendList.', @@ -807,7 +1140,6 @@ describe('Execute: stream directive', () => { }, ]); }); - it('Handles errors thrown by completeValue after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -822,13 +1154,14 @@ describe('Execute: stream directive', () => { data: { scalarList: ['Luke'], }, + pending: [{ id: '0', path: ['scalarList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['scalarList', 1], + id: '0', errors: [ { message: 'String cannot represent value: {}', @@ -838,11 +1171,11 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Handles async errors thrown by completeValue after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -852,26 +1185,213 @@ describe('Execute: stream directive', () => { } `); const result = await complete(document, { - friendList: () => [ - Promise.resolve({ nonNullName: friends[0].name }), - Promise.resolve({ + friendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error('Oops')), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [{ nonNullName: 'Luke' }], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [null], + id: '0', + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['friendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ nonNullName: 'Han' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles nested async errors thrown by completeValue after initialCount is reached', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + friendList: () => [ + { nonNullName: Promise.resolve(friends[0].name) }, + { nonNullName: Promise.reject(new Error('Oops')) }, + { nonNullName: Promise.resolve(friends[1].name) }, + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + friendList: [{ nonNullName: 'Luke' }], + }, + pending: [{ id: '0', path: ['friendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [null], + id: '0', + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['friendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ nonNullName: 'Han' }], + id: '0', + }, + ], + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list', async () => { + const document = parse(/* GraphQL */ ` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + nonNullFriendList: () => [ + Promise.resolve({ nonNullName: friends[0].name }), + Promise.resolve({ + nonNullName: () => Promise.reject(new Error('Oops')), + }), + Promise.resolve({ nonNullName: friends[1].name }), + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], + }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles nested async errors thrown by completeValue after initialCount is reached for a non-nullable list', async () => { + const document = parse(/* GraphQL */ ` + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + nonNullFriendList: () => [ + { nonNullName: Promise.resolve(friends[0].name) }, + { nonNullName: Promise.reject(new Error('Oops')) }, + { nonNullName: Promise.resolve(friends[1].name) }, + ], + }); + expectJSON(result).toDeepEqual([ + { + data: { + nonNullFriendList: [{ nonNullName: 'Luke' }], + }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], + hasNext: true, + }, + { + completed: [ + { + id: '0', + errors: [ + { + message: 'Oops', + locations: [{ line: 4, column: 11 }], + path: ['nonNullFriendList', 1, 'nonNullName'], + }, + ], + }, + ], + hasNext: false, + }, + ]); + }); + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable', async () => { + const document = parse(/* GraphQL */ ` + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + `); + const result = await complete(document, { + async *friendList() { + yield await Promise.resolve({ nonNullName: friends[0].name }); + yield await Promise.resolve({ nonNullName: () => Promise.reject(new Error('Oops')), - }), - Promise.resolve({ nonNullName: friends[1].name }), - ], + }); + yield await Promise.resolve({ nonNullName: friends[1].name }); + }, }); expectJSON(result).toDeepEqual([ { data: { friendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'Oops', @@ -887,15 +1407,18 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], + id: '0', }, ], + hasNext: true, + }, + { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached for a non-nullable list', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async generator for a non-nullable list', async () => { const document = parse(/* GraphQL */ ` query { nonNullFriendList @stream(initialCount: 1) { @@ -904,26 +1427,25 @@ describe('Execute: stream directive', () => { } `); const result = await complete(document, { - nonNullFriendList: () => [ - Promise.resolve({ nonNullName: friends[0].name }), - Promise.resolve({ + async *nonNullFriendList() { + yield await Promise.resolve({ nonNullName: friends[0].name }); + yield await Promise.resolve({ nonNullName: () => Promise.reject(new Error('Oops')), - }), - Promise.resolve({ nonNullName: friends[1].name }), - ], + }); /* c8 ignore start */ + } /* c8 ignore stop */, }); expectJSON(result).toDeepEqual([ { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Oops', @@ -937,63 +1459,70 @@ describe('Execute: stream directive', () => { }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached from async iterable', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable does not provide a return method) ', async () => { const document = parse(/* GraphQL */ ` query { - friendList @stream(initialCount: 1) { + nonNullFriendList @stream(initialCount: 1) { nonNullName } } `); + let count = 0; const result = await complete(document, { - async *friendList() { - yield await Promise.resolve({ nonNullName: friends[0].name }); - yield await Promise.resolve({ - nonNullName: () => Promise.reject(new Error('Oops')), - }); - yield await Promise.resolve({ nonNullName: friends[1].name }); + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + // Not reached + /* c8 ignore next 5 */ + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + } + }, + }), }, }); expectJSON(result).toDeepEqual([ { data: { - friendList: [{ nonNullName: 'Luke' }], + nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: [null], - path: ['friendList', 1], + id: '0', errors: [ { message: 'Oops', locations: [{ line: 4, column: 11 }], - path: ['friendList', 1, 'nonNullName'], + path: ['nonNullFriendList', 1, 'nonNullName'], }, ], }, ], - hasNext: true, - }, - { - incremental: [ - { - items: [{ nonNullName: 'Han' }], - path: ['friendList', 2], - }, - ], - hasNext: true, - }, - { hasNext: false, }, ]); }); - - it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list', async () => { + it('Handles async errors thrown by completeValue after initialCount is reached from async iterable for a non-nullable list when the async iterable provides concurrent next/return methods and has a slow return ', async () => { const document = parse(/* GraphQL */ ` query { nonNullFriendList @stream(initialCount: 1) { @@ -1001,29 +1530,58 @@ describe('Execute: stream directive', () => { } } `); + let count = 0; + let returned = false; const result = await complete(document, { - async *nonNullFriendList() { - yield await Promise.resolve({ nonNullName: friends[0].name }); - yield await Promise.resolve({ - nonNullName: () => Promise.reject(new Error('Oops')), - }); - yield await Promise.resolve({ - nonNullName: friends[1].name, - }); /* c8 ignore start */ - } /* c8 ignore stop */, + nonNullFriendList: { + [Symbol.asyncIterator]: () => ({ + next: async () => { + /* c8 ignore next 3 */ + if (returned) { + return Promise.resolve({ done: true }); + } + switch (count++) { + case 0: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[0].name }, + }); + case 1: + return Promise.resolve({ + done: false, + value: { + nonNullName: () => Promise.reject(new Error('Oops')), + }, + }); + // Not reached + /* c8 ignore next 5 */ + case 2: + return Promise.resolve({ + done: false, + value: { nonNullName: friends[1].name }, + }); + } + }, + return: async () => { + await resolveOnNextTick(); + returned = true; + return { done: true }; + }, + }), + }, }); expectJSON(result).toDeepEqual([ { data: { nonNullFriendList: [{ nonNullName: 'Luke' }], }, + pending: [{ id: '0', path: ['nonNullFriendList'] }], hasNext: true, }, { - incremental: [ + completed: [ { - items: null, - path: ['nonNullFriendList', 1], + id: '0', errors: [ { message: 'Oops', @@ -1036,8 +1594,8 @@ describe('Execute: stream directive', () => { hasNext: false, }, ]); + expect(returned).toBeTruthy(); }); - it('Filters payloads that are nulled', async () => { const document = parse(/* GraphQL */ ` query { @@ -1070,7 +1628,6 @@ describe('Execute: stream directive', () => { }, }); }); - it('Filters payloads that are nulled by a later synchronous error', async () => { const document = parse(/* GraphQL */ ` query { @@ -1103,7 +1660,6 @@ describe('Execute: stream directive', () => { }, }); }); - it('Does not filter payloads when null error is in a different path', async () => { const document = parse(/* GraphQL */ ` query { @@ -1133,13 +1689,26 @@ describe('Execute: stream directive', () => { otherNestedObject: {}, nestedObject: { nestedFriendList: [] }, }, + pending: [ + { id: '0', path: ['otherNestedObject'] }, + { id: '1', path: ['nestedObject', 'nestedFriendList'] }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ name: 'Luke' }], + id: '1', + }, + ], hasNext: true, }, { incremental: [ { data: { scalarField: null }, - path: ['otherNestedObject'], + id: '0', errors: [ { message: 'Oops', @@ -1148,16 +1717,16 @@ describe('Execute: stream directive', () => { }, ], }, - { - items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], - }, ], + completed: [{ id: '0' }], + hasNext: true, + }, + { + completed: [{ id: '1' }], hasNext: false, }, ]); }); - it('Filters stream payloads that are nulled in a deferred payload', async () => { const document = parse(/* GraphQL */ ` query { @@ -1188,6 +1757,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ id: '0', path: ['nestedObject'] }], hasNext: true, }, { @@ -1196,7 +1766,7 @@ describe('Execute: stream directive', () => { data: { deeperNestedObject: null, }, - path: ['nestedObject'], + id: '0', errors: [ { message: @@ -1207,11 +1777,11 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Filters defer payloads that are nulled in a stream response', async () => { const document = parse(/* GraphQL */ ` query { @@ -1236,13 +1806,14 @@ describe('Execute: stream directive', () => { data: { friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [null], - path: ['friendList', 0], + id: '0', errors: [ { message: 'Cannot return null for non-nullable field Friend.nonNullName.', @@ -1255,22 +1826,23 @@ describe('Execute: stream directive', () => { hasNext: true, }, { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Returns iterator and ignores errors when stream payloads are filtered', async () => { + it('Returns iterator and passes through errors when stream payloads are filtered', async () => { let returned = false; let requested = false; const iterable = { [Symbol.asyncIterator]: () => ({ next: () => { + /* c8 ignore start */ if (requested) { - /* c8 ignore next 3 */ - // Not reached, iterator should end immediately. - expect('Not reached').toBeFalsy(); - } + // stream is filtered, next is not called, and so this is not reached. + return Promise.reject(new Error('Oops')); + } /* c8 ignore stop */ requested = true; const friend = friends[0]; return Promise.resolve({ @@ -1283,6 +1855,7 @@ describe('Execute: stream directive', () => { }, return: () => { returned = true; + // This error should be passed through. return Promise.reject(new Error('Oops')); }, }), @@ -1314,6 +1887,7 @@ describe('Execute: stream directive', () => { }, }, }, + enableEarlyExecution: true, }); expect('initialResult' in executeResult).toBeTruthy(); @@ -1325,6 +1899,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ id: '0', path: ['nestedObject'] }], hasNext: true, }); @@ -1337,7 +1912,7 @@ describe('Execute: stream directive', () => { data: { deeperNestedObject: null, }, - path: ['nestedObject'], + id: '0', errors: [ { message: @@ -1348,16 +1923,16 @@ describe('Execute: stream directive', () => { ], }, ], + completed: [{ id: '0' }], hasNext: false, }, }); - const result3 = await iterator.next(); - expectJSON(result3).toDeepEqual({ done: true, value: undefined }); + const result3Promise = iterator.next(); + await expectPromise(result3Promise).toRejectWith('Oops'); expect(returned).toBeTruthy(); }); - it('Handles promises returned by completeValue after initialCount is reached', async () => { const document = parse(/* GraphQL */ ` query { @@ -1382,13 +1957,14 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1', name: 'Luke' }], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }, { incremental: [ { items: [{ id: '2', name: 'Han' }], - path: ['friendList', 1], + id: '0', }, ], hasNext: true, @@ -1397,19 +1973,79 @@ describe('Execute: stream directive', () => { incremental: [ { items: [{ id: '3', name: 'Leia' }], - path: ['friendList', 2], + id: '0', }, ], hasNext: true, }, { + completed: [{ id: '0' }], + hasNext: false, + }, + ]); + }); + it('Handles overlapping deferred and non-deferred streams', async () => { + const document = parse(/* GraphQL */ ` + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + id + } + } + nestedObject { + ... @defer { + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + } + } + `); + const result = await complete(document, { + nestedObject: { + async *nestedFriendList() { + yield await Promise.resolve(friends[0]); + yield await Promise.resolve(friends[1]); + }, + }, + }); + expectJSON(result).toDeepEqual([ + { + data: { + nestedObject: { + nestedFriendList: [], + }, + }, + pending: [{ id: '0', path: ['nestedObject', 'nestedFriendList'] }], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '1', name: 'Luke' }], + id: '0', + }, + ], + hasNext: true, + }, + { + incremental: [ + { + items: [{ id: '2', name: 'Han' }], + id: '0', + }, + ], + hasNext: true, + }, + { + completed: [{ id: '0' }], hasNext: false, }, ]); }); - it('Returns payloads in correct order when parent deferred fragment resolves slower than stream', async () => { - const [slowFieldPromise, resolveSlowField] = createResolvablePromise(); + const { promise: slowFieldPromise, resolve: resolveSlowField } = createDeferred(); const document = parse(/* GraphQL */ ` query { nestedObject { @@ -1435,6 +2071,7 @@ describe('Execute: stream directive', () => { }, }, }, + enableEarlyExecution: false, }); expect('initialResult' in executeResult).toBeTruthy(); @@ -1446,6 +2083,7 @@ describe('Execute: stream directive', () => { data: { nestedObject: {}, }, + pending: [{ id: '0', path: ['nestedObject'] }], hasNext: true, }); @@ -1454,45 +2092,53 @@ describe('Execute: stream directive', () => { const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { + pending: [{ id: '1', path: ['nestedObject', 'nestedFriendList'] }], incremental: [ { data: { scalarField: 'slow', nestedFriendList: [] }, - path: ['nestedObject'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: true, }, done: false, }); + const result3 = await iterator.next(); expectJSON(result3).toDeepEqual({ value: { incremental: [ { items: [{ name: 'Luke' }], - path: ['nestedObject', 'nestedFriendList', 0], + id: '1', }, ], hasNext: true, }, done: false, }); + const result4 = await iterator.next(); expectJSON(result4).toDeepEqual({ value: { incremental: [ { items: [{ name: 'Han' }], - path: ['nestedObject', 'nestedFriendList', 1], + id: '1', }, ], hasNext: true, }, done: false, }); + const result5 = await iterator.next(); expectJSON(result5).toDeepEqual({ - value: { hasNext: false }, + value: { + completed: [{ id: '1' }], + hasNext: false, + }, done: false, }); const result6 = await iterator.next(); @@ -1501,10 +2147,10 @@ describe('Execute: stream directive', () => { done: true, }); }); - it('Can @defer fields that are resolved after async iterable is complete', async () => { - const [slowFieldPromise, resolveSlowField] = createResolvablePromise(); - const [iterableCompletionPromise, resolveIterableCompletion] = createResolvablePromise(); + const { promise: slowFieldPromise, resolve: resolveSlowField } = createDeferred(); + const { promise: iterableCompletionPromise, resolve: resolveIterableCompletion } = + createDeferred(); const document = parse(/* GraphQL */ ` query { @@ -1531,6 +2177,7 @@ describe('Execute: stream directive', () => { await iterableCompletionPromise; }, }, + enableEarlyExecution: false, }); expect('initialResult' in executeResult).toBeTruthy(); @@ -1542,26 +2189,25 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1' }], }, + pending: [ + { id: '0', path: ['friendList', 0], label: 'DeferName' }, + { id: '1', path: ['friendList'], label: 'stream-label' }, + ], hasNext: true, }); const result2Promise = iterator.next(); - resolveIterableCompletion(); + resolveIterableCompletion(null); const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ value: { incremental: [ { data: { name: 'Luke' }, - path: ['friendList', 0], - label: 'DeferName', - }, - { - items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: true, }, done: false, @@ -1572,27 +2218,49 @@ describe('Execute: stream directive', () => { const result3 = await result3Promise; expectJSON(result3).toDeepEqual({ value: { + pending: [{ id: '2', path: ['friendList', 1], label: 'DeferName' }], incremental: [ { - data: { name: 'Han' }, - path: ['friendList', 1], - label: 'DeferName', + items: [{ id: '2' }], + id: '1', }, ], - hasNext: false, + hasNext: true, }, done: false, }); const result4 = await iterator.next(); expectJSON(result4).toDeepEqual({ + value: { + completed: [{ id: '1' }], + hasNext: true, + }, + done: false, + }); + const result5 = await iterator.next(); + expectJSON(result5).toDeepEqual({ + value: { + incremental: [ + { + data: { name: 'Han' }, + id: '2', + }, + ], + completed: [{ id: '2' }], + hasNext: false, + }, + done: false, + }); + const result6 = await iterator.next(); + expectJSON(result6).toDeepEqual({ value: undefined, done: true, }); }); - it('Can @defer fields that are resolved before async iterable is complete', async () => { - const [slowFieldPromise, resolveSlowField] = createResolvablePromise(); - const [iterableCompletionPromise, resolveIterableCompletion] = createResolvablePromise(); + const { promise: slowFieldPromise, resolve: resolveSlowField } = createDeferred(); + const { promise: iterableCompletionPromise, resolve: resolveIterableCompletion } = + createDeferred(); const document = parse(/* GraphQL */ ` query { @@ -1619,6 +2287,7 @@ describe('Execute: stream directive', () => { await iterableCompletionPromise; }, }, + enableEarlyExecution: false, }); expect('initialResult' in executeResult).toBeTruthy(); // @ts-expect-error once we assert that initialResult is in executeResult then it should work fine @@ -1630,6 +2299,10 @@ describe('Execute: stream directive', () => { data: { friendList: [{ id: '1' }], }, + pending: [ + { id: '0', path: ['friendList', 0], label: 'DeferName' }, + { id: '1', path: ['friendList'], label: 'stream-label' }, + ], hasNext: true, }); @@ -1641,15 +2314,10 @@ describe('Execute: stream directive', () => { incremental: [ { data: { name: 'Luke' }, - path: ['friendList', 0], - label: 'DeferName', - }, - { - items: [{ id: '2' }], - path: ['friendList', 1], - label: 'stream-label', + id: '0', }, ], + completed: [{ id: '0' }], hasNext: true, }, done: false, @@ -1658,57 +2326,70 @@ describe('Execute: stream directive', () => { const result3 = await iterator.next(); expectJSON(result3).toDeepEqual({ value: { + pending: [{ id: '2', path: ['friendList', 1], label: 'DeferName' }], incremental: [ { - data: { name: 'Han' }, - path: ['friendList', 1], - label: 'DeferName', + items: [{ id: '2' }], + id: '1', }, ], hasNext: true, }, done: false, }); - const result4Promise = iterator.next(); - resolveIterableCompletion(); - const result4 = await result4Promise; + + const result4 = await iterator.next(); expectJSON(result4).toDeepEqual({ - value: { hasNext: false }, + value: { + incremental: [ + { + data: { name: 'Han' }, + id: '2', + }, + ], + completed: [{ id: '2' }], + hasNext: true, + }, done: false, }); - const result5 = await iterator.next(); + const result5Promise = iterator.next(); + resolveIterableCompletion(null); + const result5 = await result5Promise; expectJSON(result5).toDeepEqual({ + value: { + completed: [{ id: '1' }], + hasNext: false, + }, + done: false, + }); + + const result6 = await iterator.next(); + expectJSON(result6).toDeepEqual({ value: undefined, done: true, }); }); - it('Returns underlying async iterables when returned generator is returned', async () => { let returned = false; - let index = 0; const iterable = { [Symbol.asyncIterator]: () => ({ - next: () => { - const friend = friends[index++]; - if (!friend) { - return Promise.resolve({ done: true, value: undefined }); - } - return Promise.resolve({ done: false, value: friend }); - }, + next: () => + new Promise(() => { + /* never resolves */ + }), return: () => { returned = true; + // This error should be passed through. + return Promise.reject(new Error('Oops')); }, }), }; const document = parse(/* GraphQL */ ` query { - friendList @stream(initialCount: 1) { + friendList @stream { id - ... @defer { - name - } } } `); @@ -1727,32 +2408,30 @@ describe('Execute: stream directive', () => { const result1 = executeResult.initialResult; expectJSON(result1).toDeepEqual({ data: { - friendList: [ - { - id: '1', - }, - ], + friendList: [], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }); + + const result2Promise = iterator.next(); const returnPromise = iterator.return(); - const result2 = await iterator.next(); + const result2 = await result2Promise; expectJSON(result2).toDeepEqual({ done: true, value: undefined, }); - await returnPromise; + await expectPromise(returnPromise).toRejectWith('Oops'); expect(returned).toBeTruthy(); }); - it('Can return async iterable when underlying iterable does not have a return method', async () => { let index = 0; const iterable = { [Symbol.asyncIterator]: () => ({ next: () => { const friend = friends[index++]; - if (!friend) { + if (friend == null) { return Promise.resolve({ done: true, value: undefined }); } return Promise.resolve({ done: false, value: friend }); @@ -1790,6 +2469,7 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [{ id: '0', path: ['friendList'] }], hasNext: true, }); @@ -1802,7 +2482,6 @@ describe('Execute: stream directive', () => { }); await returnPromise; }); - it('Returns underlying async iterables when returned generator is thrown', async () => { let index = 0; let returned = false; @@ -1810,7 +2489,7 @@ describe('Execute: stream directive', () => { [Symbol.asyncIterator]: () => ({ next: () => { const friend = friends[index++]; - if (!friend) { + if (friend == null) { return Promise.resolve({ done: true, value: undefined }); } return Promise.resolve({ done: false, value: friend }); @@ -1851,6 +2530,10 @@ describe('Execute: stream directive', () => { }, ], }, + pending: [ + { id: '0', path: ['friendList', 0] }, + { id: '1', path: ['friendList'] }, + ], hasNext: true, }); @@ -1861,13 +2544,7 @@ describe('Execute: stream directive', () => { done: true, value: undefined, }); - try { - await throwPromise; /* c8 ignore start */ - // Not reachable, always throws - /* c8 ignore stop */ - } catch (e) { - // ignore error - } + await expectPromise(throwPromise).toRejectWith('bad'); expect(returned).toBeTruthy(); }); }); diff --git a/packages/executor/src/execution/__tests__/subscribe.test.ts b/packages/executor/src/execution/__tests__/subscribe.test.ts index bf17d40dae7..08f8db05ea9 100644 --- a/packages/executor/src/execution/__tests__/subscribe.test.ts +++ b/packages/executor/src/execution/__tests__/subscribe.test.ts @@ -10,6 +10,7 @@ import { import { makeExecutableSchema } from '@graphql-tools/schema'; import { ExecutionResult, isAsyncIterable, isPromise, MaybePromise } from '@graphql-tools/utils'; import { expectJSON } from '../../__testUtils__/expectJSON.js'; +import { expectPromise } from '../../__testUtils__/expectPromise.js'; import { resolveOnNextTick } from '../../__testUtils__/resolveOnNextTick.js'; import { assertAsyncIterable } from '../../../../loaders/url/tests/test-utils.js'; import { ExecutionArgs, subscribe } from '../execute.js'; @@ -85,9 +86,15 @@ const emailSchema = new GraphQLSchema({ function createSubscription( pubsub: SimplePubSub, variableValues?: { readonly [variable: string]: unknown }, + errorWithIncrementalSubscription = true, ) { const document = parse(` - subscription ($priority: Int = 0, $shouldDefer: Boolean = false, $asyncResolver: Boolean = false) { + subscription ( + $priority: Int = 0 + $shouldDefer: Boolean = false + $shouldStream: Boolean = false + $asyncResolver: Boolean = false + ) { importantEmail(priority: $priority) { email { from @@ -98,6 +105,7 @@ function createSubscription( } ... @defer(if: $shouldDefer) { inbox { + emails @include(if: $shouldStream) @stream(if: $shouldStream) unread total } @@ -135,32 +143,10 @@ function createSubscription( document, rootValue: data, variableValues, + errorWithIncrementalSubscription, }); } -// TODO: consider adding this method to testUtils (with tests) -function expectPromise(maybePromise: unknown) { - expect(isPromise(maybePromise)).toBeTruthy(); - - return { - toResolve() { - return maybePromise; - }, - async toRejectWith(message: string) { - let caughtError: Error; - - try { - /* c8 ignore next 2 */ - await maybePromise; - } catch (error) { - caughtError = error as Error; - expect(caughtError).toBeInstanceOf(Error); - expect(caughtError).toHaveProperty('message', message); - } - }, - }; -} - const DummyQueryType = new GraphQLObjectType({ name: 'Query', fields: { @@ -721,7 +707,7 @@ describe('Subscription Publish Phase', () => { }); }); - it('produces additional payloads for subscriptions with @defer', async () => { + it('subscribe function returns errors with @defer', async () => { const pubsub = new SimplePubSub(); const subscription = await createSubscription(pubsub, { shouldDefer: true, @@ -741,6 +727,76 @@ describe('Subscription Publish Phase', () => { }), ).toBeTruthy(); + const errorPayload = { + done: false, + value: { + errors: [ + { + message: + '`@defer` directive not supported on subscription operations. Disable `@defer` by setting the `if` argument to `false`.', + locations: [{ line: 8, column: 7 }], + path: ['importantEmail'], + }, + ], + data: { importantEmail: null }, + }, + }; + + // The previously waited on payload now has a value. + expectJSON(await payload).toDeepEqual(errorPayload); + + // Another new email arrives, after all incrementally delivered payloads are received. + expect( + pubsub.emit({ + from: 'hyo@graphql.org', + subject: 'Tools', + message: 'I <3 making things', + unread: true, + }), + ).toBeTruthy(); + + // The next waited on payload will have a value. + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.next()).toDeepEqual(errorPayload); + + // @ts-expect-error + expectJSON(await subscription.return()).toDeepEqual({ + done: true, + value: undefined, + }); + + // Awaiting a subscription after closing it results in completed results. + // @ts-expect-error + expectJSON(await subscription.next()).toDeepEqual({ + done: true, + value: undefined, + }); + }); + + it('produces additional payloads for subscriptions with @defer if allowed', async () => { + const pubsub = new SimplePubSub(); + const subscription = await createSubscription( + pubsub, + { + shouldDefer: true, + }, + false, + ); + expect(isAsyncIterable(subscription)).toBeTruthy(); + // Wait for the next subscription payload. + // @ts-expect-error we have asserted it is an async iterable + const payload = subscription.next(); + + // A new email arrives! + expect( + pubsub.emit({ + from: 'yuzhi@graphql.org', + subject: 'Alright', + message: 'Tests are good', + unread: true, + }), + ).toBeTruthy(); + // The previously waited on payload now has a value. expectJSON(await payload).toDeepEqual({ done: false, @@ -753,6 +809,7 @@ describe('Subscription Publish Phase', () => { }, }, }, + pending: [{ id: '0', path: ['importantEmail'] }], hasNext: true, }, }); @@ -770,9 +827,10 @@ describe('Subscription Publish Phase', () => { total: 2, }, }, - path: ['importantEmail'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, }); @@ -800,6 +858,7 @@ describe('Subscription Publish Phase', () => { }, }, }, + pending: [{ id: '0', path: ['importantEmail'] }], hasNext: true, }, }); @@ -827,9 +886,10 @@ describe('Subscription Publish Phase', () => { total: 3, }, }, - path: ['importantEmail'], + id: '0', }, ], + completed: [{ id: '0' }], hasNext: false, }, }); @@ -847,6 +907,7 @@ describe('Subscription Publish Phase', () => { }, }, }, + pending: [{ id: '0', path: ['importantEmail'] }], hasNext: true, }, }); @@ -866,6 +927,93 @@ describe('Subscription Publish Phase', () => { }); }); + it('subscribe function returns errors with @stream', async () => { + const pubsub = new SimplePubSub(); + const subscription = await createSubscription(pubsub, { + shouldStream: true, + }); + expect(isAsyncIterable(subscription)).toBeTruthy(); + // Wait for the next subscription payload. + // @ts-expect-error + const payload = subscription.next(); + + // A new email arrives! + expect( + pubsub.emit({ + from: 'yuzhi@graphql.org', + subject: 'Alright', + message: 'Tests are good', + unread: true, + }), + ).toBeTruthy(); + + // The previously waited on payload now has a value. + expectJSON(await payload).toDeepEqual({ + done: false, + value: { + errors: [ + { + message: + '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', + locations: [{ line: 18, column: 13 }], + path: ['importantEmail', 'inbox', 'emails'], + }, + ], + data: { + importantEmail: { + email: { from: 'yuzhi@graphql.org', subject: 'Alright' }, + inbox: { emails: null, unread: 1, total: 2 }, + }, + }, + }, + }); + + // Another new email arrives, after all incrementally delivered payloads are received. + expect( + pubsub.emit({ + from: 'hyo@graphql.org', + subject: 'Tools', + message: 'I <3 making things', + unread: true, + }), + ).toBeTruthy(); + + // The next waited on payload will have a value. + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.next()).toDeepEqual({ + done: false, + value: { + errors: [ + { + message: + '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', + locations: [{ line: 18, column: 13 }], + path: ['importantEmail', 'inbox', 'emails'], + }, + ], + data: { + importantEmail: { + email: { from: 'hyo@graphql.org', subject: 'Tools' }, + inbox: { emails: null, unread: 2, total: 3 }, + }, + }, + }, + }); + + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.return()).toDeepEqual({ + done: true, + value: undefined, + }); + + // Awaiting a subscription after closing it results in completed results. + // @ts-expect-error we have asserted it is an async iterable + expectJSON(await subscription.next()).toDeepEqual({ + done: true, + value: undefined, + }); + }); + it('produces a payload when there are multiple events', async () => { const pubsub = new SimplePubSub(); const subscription = createSubscription(pubsub); diff --git a/packages/executor/src/execution/buildExecutionPlan.ts b/packages/executor/src/execution/buildExecutionPlan.ts new file mode 100644 index 00000000000..d054cfb858f --- /dev/null +++ b/packages/executor/src/execution/buildExecutionPlan.ts @@ -0,0 +1,95 @@ +import { AccumulatorMap } from './AccumulatorMap.js'; +import type { DeferUsage, FieldDetails, FieldGroup, GroupedFieldSet } from './collectFields.js'; +import { getBySet } from './getBySet.js'; +import { isSameSet } from './isSameSet.js'; + +export type DeferUsageSet = ReadonlySet; + +export interface ExecutionPlan { + groupedFieldSet: GroupedFieldSet; + newGroupedFieldSets: Map; +} + +export function buildExecutionPlan( + originalGroupedFieldSet: GroupedFieldSet, + parentDeferUsages: DeferUsageSet = new Set(), +): ExecutionPlan { + const groupedFieldSet = new Map(); + const newGroupedFieldSets = new Map>(); + for (const [responseKey, fieldGroup] of originalGroupedFieldSet) { + const filteredDeferUsageSet = getFilteredDeferUsageSet(fieldGroup); + + if (isSameSet(filteredDeferUsageSet, parentDeferUsages)) { + groupedFieldSet.set(responseKey, fieldGroup); + continue; + } + + let newGroupedFieldSet = getBySet(newGroupedFieldSets, filteredDeferUsageSet); + if (newGroupedFieldSet === undefined) { + newGroupedFieldSet = new Map(); + newGroupedFieldSets.set(filteredDeferUsageSet, newGroupedFieldSet); + } + newGroupedFieldSet.set(responseKey, fieldGroup); + } + + return { + groupedFieldSet, + newGroupedFieldSets, + }; +} + +function getFilteredDeferUsageSet(fieldGroup: FieldGroup): ReadonlySet { + const filteredDeferUsageSet = new Set(); + for (const fieldDetails of fieldGroup) { + const deferUsage = fieldDetails.deferUsage; + if (deferUsage === undefined) { + filteredDeferUsageSet.clear(); + return filteredDeferUsageSet; + } + filteredDeferUsageSet.add(deferUsage); + } + + for (const deferUsage of filteredDeferUsageSet) { + let parentDeferUsage: DeferUsage | undefined = deferUsage.parentDeferUsage; + while (parentDeferUsage !== undefined) { + if (filteredDeferUsageSet.has(parentDeferUsage)) { + filteredDeferUsageSet.delete(deferUsage); + break; + } + parentDeferUsage = parentDeferUsage.parentDeferUsage; + } + } + return filteredDeferUsageSet; +} + +export function buildBranchingExecutionPlan( + originalGroupedFieldSet: GroupedFieldSet, + parentDeferUsages: DeferUsageSet = new Set(), +): ExecutionPlan { + const groupedFieldSet = new AccumulatorMap(); + + const newGroupedFieldSets = new Map>(); + + for (const [responseKey, fieldGroup] of originalGroupedFieldSet) { + for (const fieldDetails of fieldGroup) { + const deferUsage = fieldDetails.deferUsage; + const deferUsageSet = + deferUsage === undefined ? new Set() : new Set([deferUsage]); + if (isSameSet(parentDeferUsages, deferUsageSet)) { + groupedFieldSet.add(responseKey, fieldDetails); + } else { + let newGroupedFieldSet = getBySet(newGroupedFieldSets, deferUsageSet); + if (newGroupedFieldSet === undefined) { + newGroupedFieldSet = new AccumulatorMap(); + newGroupedFieldSets.set(deferUsageSet, newGroupedFieldSet); + } + newGroupedFieldSet.add(responseKey, fieldDetails); + } + } + } + + return { + groupedFieldSet, + newGroupedFieldSets, + }; +} diff --git a/packages/executor/src/execution/collectFields.ts b/packages/executor/src/execution/collectFields.ts new file mode 100644 index 00000000000..9d109fa8422 --- /dev/null +++ b/packages/executor/src/execution/collectFields.ts @@ -0,0 +1,320 @@ +import type { + FieldNode, + FragmentDefinitionNode, + FragmentSpreadNode, + GraphQLObjectType, + GraphQLSchema, + InlineFragmentNode, + SelectionSetNode, +} from 'graphql'; +import { + getDirectiveValues, + GraphQLIncludeDirective, + GraphQLSkipDirective, + isAbstractType, + Kind, + typeFromAST, +} from 'graphql'; +import { GraphQLDeferDirective } from '@graphql-tools/utils'; +import { AccumulatorMap } from './AccumulatorMap.js'; +import { invariant } from './invariant.js'; + +export interface DeferUsage { + label: string | undefined; + parentDeferUsage: DeferUsage | undefined; +} + +export interface FieldDetails { + node: FieldNode; + deferUsage: DeferUsage | undefined; +} + +export type FieldGroup = ReadonlyArray; + +export type GroupedFieldSet = ReadonlyMap; + +interface CollectFieldsContext { + schema: GraphQLSchema; + fragments: Record; + variableValues: TVariables; + errorWithIncrementalSubscription: boolean; + runtimeType: GraphQLObjectType; + visitedFragmentNames: Set; +} + +/** + * Given a selectionSet, collects all of the fields and returns them. + * + * CollectFields requires the "runtime type" of an object. For a field that + * returns an Interface or Union type, the "runtime type" will be the actual + * object type returned by that field. + * + * @internal + */ +export function collectFields( + schema: GraphQLSchema, + fragments: Record, + variableValues: TVariables, + runtimeType: GraphQLObjectType, + selectionSet: SelectionSetNode, + errorWithIncrementalSubscription: boolean, +): { + groupedFieldSet: GroupedFieldSet; + newDeferUsages: ReadonlyArray; +} { + const groupedFieldSet = new AccumulatorMap(); + const newDeferUsages: Array = []; + const context: CollectFieldsContext = { + schema, + fragments, + variableValues, + runtimeType, + errorWithIncrementalSubscription, + visitedFragmentNames: new Set(), + }; + + collectFieldsImpl(context, selectionSet, groupedFieldSet, newDeferUsages); + return { groupedFieldSet, newDeferUsages }; +} + +/** + * Given an array of field nodes, collects all of the subfields of the passed + * in fields, and returns them at the end. + * + * CollectSubFields requires the "return type" of an object. For a field that + * returns an Interface or Union type, the "return type" will be the actual + * object type returned by that field. + * + * @internal + */ +export function collectSubfields( + schema: GraphQLSchema, + fragments: Record, + variableValues: { [variable: string]: unknown }, + errorWithIncrementalSubscription: boolean, + returnType: GraphQLObjectType, + fieldGroup: FieldGroup, +): { + groupedFieldSet: GroupedFieldSet; + newDeferUsages: ReadonlyArray; +} { + const context: CollectFieldsContext = { + schema, + fragments, + variableValues, + runtimeType: returnType, + errorWithIncrementalSubscription, + visitedFragmentNames: new Set(), + }; + const subGroupedFieldSet = new AccumulatorMap(); + const newDeferUsages: Array = []; + + for (const fieldDetail of fieldGroup) { + const node = fieldDetail.node; + if (node.selectionSet) { + collectFieldsImpl( + context, + node.selectionSet, + subGroupedFieldSet, + newDeferUsages, + fieldDetail.deferUsage, + ); + } + } + + return { + groupedFieldSet: subGroupedFieldSet, + newDeferUsages, + }; +} + +function collectFieldsImpl( + context: CollectFieldsContext, + selectionSet: SelectionSetNode, + groupedFieldSet: AccumulatorMap, + newDeferUsages: Array, + deferUsage?: DeferUsage, +): void { + const { + schema, + fragments, + variableValues, + runtimeType, + errorWithIncrementalSubscription, + visitedFragmentNames, + } = context; + + for (const selection of selectionSet.selections) { + switch (selection.kind) { + case Kind.FIELD: { + if (!shouldIncludeNode(variableValues, selection)) { + continue; + } + groupedFieldSet.add(getFieldEntryKey(selection), { + node: selection, + deferUsage, + }); + break; + } + case Kind.INLINE_FRAGMENT: { + if ( + !shouldIncludeNode(variableValues, selection) || + !doesFragmentConditionMatch(schema, selection, runtimeType) + ) { + continue; + } + + const newDeferUsage = getDeferUsage( + errorWithIncrementalSubscription, + variableValues, + selection, + deferUsage, + ); + + if (!newDeferUsage) { + collectFieldsImpl( + context, + selection.selectionSet, + groupedFieldSet, + newDeferUsages, + deferUsage, + ); + } else { + newDeferUsages.push(newDeferUsage); + collectFieldsImpl( + context, + selection.selectionSet, + groupedFieldSet, + newDeferUsages, + newDeferUsage, + ); + } + + break; + } + case Kind.FRAGMENT_SPREAD: { + const fragName = selection.name.value; + + const newDeferUsage = getDeferUsage( + errorWithIncrementalSubscription, + variableValues, + selection, + deferUsage, + ); + + if ( + !newDeferUsage && + (visitedFragmentNames.has(fragName) || !shouldIncludeNode(variableValues, selection)) + ) { + continue; + } + + const fragment = fragments[fragName]; + if (fragment == null || !doesFragmentConditionMatch(schema, fragment, runtimeType)) { + continue; + } + if (!newDeferUsage) { + visitedFragmentNames.add(fragName); + collectFieldsImpl( + context, + fragment.selectionSet, + groupedFieldSet, + newDeferUsages, + deferUsage, + ); + } else { + newDeferUsages.push(newDeferUsage); + collectFieldsImpl( + context, + fragment.selectionSet, + groupedFieldSet, + newDeferUsages, + newDeferUsage, + ); + } + break; + } + } + } +} + +/** + * Returns an object containing the `@defer` arguments if a field should be + * deferred based on the experimental flag, defer directive present and + * not disabled by the "if" argument. + */ +function getDeferUsage( + errorWithIncrementalSubscription: boolean, + variableValues: { [variable: string]: unknown }, + node: FragmentSpreadNode | InlineFragmentNode, + parentDeferUsage: DeferUsage | undefined, +): DeferUsage | undefined { + const defer = getDirectiveValues(GraphQLDeferDirective, node, variableValues); + + if (!defer) { + return; + } + + if (defer['if'] === false) { + return; + } + + invariant( + !errorWithIncrementalSubscription, + '`@defer` directive not supported on subscription operations. Disable `@defer` by setting the `if` argument to `false`.', + ); + + return { + label: typeof defer['label'] === 'string' ? defer['label'] : undefined, + parentDeferUsage, + }; +} + +/** + * Determines if a field should be included based on the `@include` and `@skip` + * directives, where `@skip` has higher precedence than `@include`. + */ +function shouldIncludeNode( + variableValues: { [variable: string]: unknown }, + node: FragmentSpreadNode | FieldNode | InlineFragmentNode, +): boolean { + const skip = getDirectiveValues(GraphQLSkipDirective, node, variableValues); + if (skip?.['if'] === true) { + return false; + } + + const include = getDirectiveValues(GraphQLIncludeDirective, node, variableValues); + if (include?.['if'] === false) { + return false; + } + return true; +} + +/** + * Determines if a fragment is applicable to the given type. + */ +function doesFragmentConditionMatch( + schema: GraphQLSchema, + fragment: FragmentDefinitionNode | InlineFragmentNode, + type: GraphQLObjectType, +): boolean { + const typeConditionNode = fragment.typeCondition; + if (!typeConditionNode) { + return true; + } + const conditionalType = typeFromAST(schema, typeConditionNode); + if (conditionalType === type) { + return true; + } + if (isAbstractType(conditionalType)) { + return schema.isSubType(conditionalType, type); + } + return false; +} + +/** + * Implements the logic to compute the key of a given field's entry + */ +function getFieldEntryKey(node: FieldNode): string { + return node.alias ? node.alias.value : node.name.value; +} diff --git a/packages/executor/src/execution/execute.ts b/packages/executor/src/execution/execute.ts index 36f6ed02f6f..7b1d2caf6c5 100644 --- a/packages/executor/src/execution/execute.ts +++ b/packages/executor/src/execution/execute.ts @@ -8,7 +8,6 @@ import { GraphQLError, GraphQLField, GraphQLFieldResolver, - GraphQLFormattedError, GraphQLLeafType, GraphQLList, GraphQLObjectType, @@ -24,16 +23,14 @@ import { Kind, locatedError, OperationDefinitionNode, + OperationTypeNode, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, versionInfo, } from 'graphql'; -import { ValueOrPromise } from 'value-or-promise'; import { - collectSubFields as _collectSubfields, addPath, - collectFields, createGraphQLError, getArgumentValues, getDefinedRootType, @@ -53,31 +50,56 @@ import { promiseReduce, } from '@graphql-tools/utils'; import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import { AccumulatorMap } from './AccumulatorMap.js'; +import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js'; +import { + buildBranchingExecutionPlan, + buildExecutionPlan, + DeferUsageSet, + ExecutionPlan, +} from './buildExecutionPlan.js'; import { coerceError } from './coerceError.js'; +import { + collectSubfields as _collectSubfields, + collectFields, + DeferUsage, + FieldGroup, + GroupedFieldSet, +} from './collectFields.js'; import { flattenAsyncIterable } from './flattenAsyncIterable.js'; +import { buildIncrementalResponse } from './IncrementalPublisher.js'; import { invariant } from './invariant.js'; import { promiseForObject } from './promiseForObject.js'; +import { + CancellableStreamRecord, + CompletedExecutionGroup, + DeferredFragmentRecord, + IncrementalDataRecord, + IncrementalExecutionResults, + InitialIncrementalExecutionResult, + PendingExecutionGroup, + SingularExecutionResult, + StreamItemRecord, + StreamItemResult, + StreamRecord, + SubsequentIncrementalExecutionResult, +} from './types.js'; import { getVariableValues } from './values.js'; -export interface SingularExecutionResult { - errors?: ReadonlyArray; - data?: TData | null; - extensions?: TExtensions; -} - /** * A memoized collection of relevant subfields with regard to the return * type. Memoizing ensures the subfields are not repeatedly calculated, which * saves overhead when resolving lists of values. */ const collectSubfields = memoize3( - (exeContext: ExecutionContext, returnType: GraphQLObjectType, fieldNodes: Array) => + (exeContext: ExecutionContext, returnType: GraphQLObjectType, fieldGroup: FieldGroup) => _collectSubfields( exeContext.schema, exeContext.fragments, exeContext.variableValues, + exeContext.errorWithIncrementalSubscription, returnType, - fieldNodes, + fieldGroup, ), ); @@ -120,118 +142,23 @@ export interface ExecutionContext { fieldResolver: GraphQLFieldResolver; typeResolver: GraphQLTypeResolver; subscribeFieldResolver: GraphQLFieldResolver; - errors: Array; - subsequentPayloads: Set; - signal?: AbortSignal; -} - -export interface FormattedExecutionResult< - TData = Record, - TExtensions = Record, -> { - errors?: ReadonlyArray; - data?: TData | null; - extensions?: TExtensions; -} - -export interface IncrementalExecutionResults< - TData = Record, - TExtensions = Record, -> { - initialResult: InitialIncrementalExecutionResult; - subsequentResults: AsyncGenerator< - SubsequentIncrementalExecutionResult, - void, - void - >; -} - -export interface InitialIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> extends SingularExecutionResult { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; + enableEarlyExecution: boolean; + deduplicateDefers: boolean; + sendIncrementalErrorsAsNull: boolean; + sendPathAndLabelOnIncremental: boolean; + errorWithIncrementalSubscription: boolean; + signal: AbortSignal | undefined; + errors: AccumulatorMap | undefined; + cancellableStreams: Set | undefined; + incrementalDataRecords: Array | undefined; } -export interface FormattedInitialIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> extends FormattedExecutionResult { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface SubsequentIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface FormattedSubsequentIncrementalExecutionResult< - TData = Record, - TExtensions = Record, -> { - hasNext: boolean; - incremental?: ReadonlyArray>; - extensions?: TExtensions; -} - -export interface IncrementalDeferResult< - TData = Record, - TExtensions = Record, -> extends SingularExecutionResult { - path?: ReadonlyArray; - label?: string; -} - -export interface FormattedIncrementalDeferResult< - TData = Record, - TExtensions = Record, -> extends FormattedExecutionResult { - path?: ReadonlyArray; - label?: string; -} - -export interface IncrementalStreamResult< - TData = Array, - TExtensions = Record, -> { - errors?: ReadonlyArray; - items?: TData | null; - path?: ReadonlyArray; - label?: string; - extensions?: TExtensions; -} - -export interface FormattedIncrementalStreamResult< - TData = Array, - TExtensions = Record, -> { - errors?: ReadonlyArray; - items?: TData | null; - path?: ReadonlyArray; - label?: string; - extensions?: TExtensions; +interface IncrementalContext { + errors: AccumulatorMap | undefined; + deferUsageSet?: DeferUsageSet | undefined; + incrementalDataRecords: Array | undefined; } -export type IncrementalResult< - TData = Record, - TExtensions = Record, -> = IncrementalDeferResult | IncrementalStreamResult; - -export type FormattedIncrementalResult< - TData = Record, - TExtensions = Record, -> = - | FormattedIncrementalDeferResult - | FormattedIncrementalStreamResult; - export interface ExecutionArgs { schema: GraphQLSchema; document: TypedDocumentNode; @@ -242,9 +169,20 @@ export interface ExecutionArgs { fieldResolver?: Maybe>; typeResolver?: Maybe>; subscribeFieldResolver?: Maybe>; + enableEarlyExecution?: Maybe; + deduplicateDefers?: Maybe; + sendIncrementalErrorsAsNull?: Maybe; + sendPathAndLabelOnIncremental?: Maybe; + errorWithIncrementalSubscription?: Maybe; signal?: AbortSignal; } +interface StreamUsage { + label: string | undefined; + initialCount: number; + fieldGroup: FieldGroup; +} + /** * Implements the "Executing requests" section of the GraphQL specification, * including `@defer` and `@stream` as proposed in @@ -282,59 +220,7 @@ export function execute( }; } - return executeImpl(exeContext); -} - -function executeImpl( - exeContext: ExecutionContext, -): MaybePromise | IncrementalExecutionResults> { - if (exeContext.signal?.aborted) { - throw exeContext.signal.reason; - } - - // Return a Promise that will eventually resolve to the data described by - // The "Response" section of the GraphQL specification. - // - // If errors are encountered while executing a GraphQL field, only that - // field and its descendants will be omitted, and sibling fields will still - // be executed. An execution which encounters errors will still result in a - // resolved Promise. - // - // Errors from sub-fields of a NonNull type may propagate to the top level, - // at which point we still log the error and null the parent field, which - // in this case is the entire response. - const result = new ValueOrPromise(() => executeOperation(exeContext)) - .then( - data => { - const initialResult = buildResponse(data, exeContext.errors); - if (exeContext.subsequentPayloads.size > 0) { - return { - initialResult: { - ...initialResult, - hasNext: true, - }, - subsequentResults: yieldSubsequentPayloads(exeContext), - }; - } - - return initialResult; - }, - (error: any) => { - if (exeContext.signal?.aborted) { - throw exeContext.signal.reason; - } - - if (error.errors) { - exeContext.errors.push(...error.errors); - } else { - exeContext.errors.push(error); - } - return buildResponse(null, exeContext.errors); - }, - ) - .resolve()!; - - return result; + return executeOperation(exeContext); } /** @@ -357,11 +243,84 @@ export function executeSync(args: ExecutionArgs): SingularExecutionResult { * Given a completed execution context and data, build the `{ errors, data }` * response defined by the "Response" section of the GraphQL specification. */ -function buildResponse( - data: TData | null, - errors: ReadonlyArray, +function buildDataResponse( + exeContext: ExecutionContext, + data: TData, +): SingularExecutionResult | IncrementalExecutionResults { + const { errors, incrementalDataRecords } = exeContext; + if (incrementalDataRecords === undefined) { + return buildSingleResult(data, errors); + } + + if (errors === undefined) { + return buildIncrementalResponse(exeContext, data, undefined, incrementalDataRecords); + } + + const filteredIncrementalDataRecords = filterIncrementalDataRecords( + undefined, + errors, + incrementalDataRecords, + ); + + if (filteredIncrementalDataRecords.length === 0) { + return buildSingleResult(data, errors); + } + + return buildIncrementalResponse( + exeContext, + data, + flattenErrors(errors), + filteredIncrementalDataRecords, + ); +} + +function buildSingleResult( + data: TData, + errors: ReadonlyMap> | undefined, ): SingularExecutionResult { - return errors.length === 0 ? { data } : { errors, data }; + return errors !== undefined ? { errors: Array.from(errors.values()).flat(), data } : { data }; +} + +function filterIncrementalDataRecords( + initialPath: Path | undefined, + errors: ReadonlyMap>, + incrementalDataRecords: ReadonlyArray, +): ReadonlyArray { + const filteredIncrementalDataRecords: Array = []; + for (const incrementalDataRecord of incrementalDataRecords) { + let currentPath = incrementalDataRecord.path; + + if (errors.has(currentPath)) { + continue; + } + + const paths: Array = [currentPath]; + let filtered = false; + while (currentPath !== initialPath) { + // Because currentPath leads to initialPath or is undefined, and the + // loop will exit if initialPath is undefined, currentPath must be + // defined. + // TODO: Consider, however, adding an invariant. + + currentPath = currentPath!.prev; + if (errors.has(currentPath)) { + filtered = true; + break; + } + paths.push(currentPath); + } + + if (!filtered) { + filteredIncrementalDataRecords.push(incrementalDataRecord); + } + } + + return filteredIncrementalDataRecords; +} + +function flattenErrors(errors: ReadonlyMap>) { + const errorsByPath = [...errors.values()]; + return errorsByPath.flat(); } /** @@ -421,6 +380,11 @@ export function buildExecutionContext( exeContext: ExecutionContext, -): MaybePromise { - const { operation, schema, fragments, variableValues, rootValue } = exeContext; - const rootType = getDefinedRootType(schema, operation.operation, [operation]); - if (rootType == null) { - createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, { - nodes: operation, - }); +): MaybePromise | IncrementalExecutionResults> { + if (exeContext.signal?.aborted) { + throw exeContext.signal.reason; } - const { fields: rootFields, patches } = collectFields( - schema, - fragments, - variableValues, - rootType, - operation.selectionSet, - ); - const path = undefined; - let result: MaybePromise; + try { + const { + operation, + schema, + fragments, + variableValues, + rootValue, + deduplicateDefers, + errorWithIncrementalSubscription, + } = exeContext; + const rootType = getDefinedRootType(schema, operation.operation, [operation]); + if (rootType == null) { + createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, { + nodes: operation, + }); + } + + const collectedFields = collectFields( + schema, + fragments, + variableValues, + rootType, + operation.selectionSet, + errorWithIncrementalSubscription, + ); + let groupedFieldSet = collectedFields.groupedFieldSet; + const newDeferUsages = collectedFields.newDeferUsages; + let data: MaybePromise; + if (newDeferUsages.length === 0) { + data = executeRootGroupedFieldSet( + exeContext, + operation.operation, + rootType, + rootValue, + groupedFieldSet, + undefined, + ); + } else { + const executionPlan = deduplicateDefers + ? buildExecutionPlan(groupedFieldSet) + : buildBranchingExecutionPlan(groupedFieldSet); + groupedFieldSet = executionPlan.groupedFieldSet; + const newGroupedFieldSets = executionPlan.newGroupedFieldSets; + const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map()); + + data = executeRootGroupedFieldSet( + exeContext, + operation.operation, + rootType, + rootValue, + groupedFieldSet, + newDeferMap, + ); + + if (newGroupedFieldSets.size > 0) { + const newPendingExecutionGroups = collectExecutionGroups( + exeContext, + rootType, + rootValue, + undefined, + undefined, + newGroupedFieldSets, + newDeferMap, + ); + + addIncrementalDataRecords(exeContext, newPendingExecutionGroups); + } + } + if (isPromise(data)) { + return data.then( + resolved => buildDataResponse(exeContext, resolved), + error => { + if (exeContext.signal?.aborted) { + throw exeContext.signal.reason; + } + return { + data: null, + errors: withError(exeContext.errors, error), + }; + }, + ); + } + return buildDataResponse(exeContext, data); + } catch (error: any) { + if (exeContext.signal?.aborted) { + throw exeContext.signal.reason; + } + return { data: null, errors: withError(exeContext.errors, error) }; + } +} - if (operation.operation === 'mutation') { - result = executeFieldsSerially(exeContext, rootType, rootValue, path, rootFields); +function executeRootGroupedFieldSet( + exeContext: ExecutionContext, + operation: OperationTypeNode, + rootType: GraphQLObjectType, + rootValue: unknown, + groupedFieldSet: GroupedFieldSet, + deferMap: ReadonlyMap | undefined, +): MaybePromise { + let result: MaybePromise; + if (operation === 'mutation') { + result = executeFieldsSerially( + exeContext, + rootType, + rootValue, + undefined, + groupedFieldSet, + undefined, + deferMap, + ); } else { - result = executeFields(exeContext, rootType, rootValue, path, rootFields) as TData; + result = executeFields( + exeContext, + rootType, + rootValue, + undefined, + groupedFieldSet, + undefined, + deferMap, + ) as MaybePromise; } + return result; +} - for (const patch of patches) { - const { label, fields: patchFields } = patch; - executeDeferredFragment(exeContext, rootType, rootValue, patchFields, label, path); +function addIncrementalDataRecords( + context: ExecutionContext | IncrementalContext, + newIncrementalDataRecords: ReadonlyArray, +): void { + const incrementalDataRecords = context.incrementalDataRecords; + if (incrementalDataRecords === undefined) { + context.incrementalDataRecords = [...newIncrementalDataRecords]; + return; } + incrementalDataRecords.push(...newIncrementalDataRecords); +} - return result; +function withError( + errors: ReadonlyMap> | undefined, + error: GraphQLError | AggregateError, +): ReadonlyArray { + const newErrors = 'errors' in error ? error.errors : [error]; + return errors === undefined ? newErrors : [...flattenErrors(errors), ...newErrors]; } /** * Implements the "Executing selection sets" section of the spec * for fields that must be executed serially. */ -function executeFieldsSerially( +function executeFieldsSerially( exeContext: ExecutionContext, parentType: GraphQLObjectType, sourceValue: unknown, path: Path | undefined, - fields: Map>, + groupedFieldSet: GroupedFieldSet, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise { return promiseReduce( - fields, - (results, [responseName, fieldNodes]) => { + groupedFieldSet, + (results, [responseName, fieldGroup]) => { const fieldPath = addPath(path, responseName, parentType.name); if (exeContext.signal?.aborted) { throw exeContext.signal.reason; } - return new ValueOrPromise(() => - executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath), - ).then(result => { - if (result === undefined) { - return results; - } - - results[responseName] = result; - + const result = executeField( + exeContext, + parentType, + sourceValue, + fieldGroup, + fieldPath, + incrementalContext, + deferMap, + ); + if (result === undefined) { return results; - }); + } + if (isPromise(result)) { + return result.then(resolved => { + results[responseName] = resolved; + return results; + }); + } + results[responseName] = result; + return results; }, Object.create(null), - ).resolve(); + ); } /** @@ -585,14 +682,15 @@ function executeFields( parentType: GraphQLObjectType, sourceValue: unknown, path: Path | undefined, - fields: Map>, - asyncPayloadRecord?: AsyncPayloadRecord, + groupedFieldSet: GroupedFieldSet, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { const results = Object.create(null); let containsPromise = false; try { - for (const [responseName, fieldNodes] of fields) { + for (const [responseName, fieldGroup] of groupedFieldSet) { if (exeContext.signal?.aborted) { throw exeContext.signal.reason; } @@ -602,9 +700,10 @@ function executeFields( exeContext, parentType, sourceValue, - fieldNodes, + fieldGroup, fieldPath, - asyncPayloadRecord, + incrementalContext, + deferMap, ); if (result !== undefined) { @@ -619,12 +718,12 @@ function executeFields( // Ensure that any promises returned by other fields are handled, as they may also reject. return promiseForObject(results, exeContext.signal).finally(() => { throw error; - }); + }) as never; } throw error; } - // If there are no promises, we can just return the object + // If there are no promises, we can just return the object and any incrementalDataRecords if (!containsPromise) { return results; } @@ -635,6 +734,10 @@ function executeFields( return promiseForObject(results, exeContext.signal); } +function toNodes(fieldGroup: FieldGroup): Array { + return fieldGroup.map(fieldDetails => fieldDetails.node); +} + /** * Implements the "Executing fields" section of the spec * In particular, this function figures out the value that the field returns by @@ -645,12 +748,12 @@ function executeField( exeContext: ExecutionContext, parentType: GraphQLObjectType, source: unknown, - fieldNodes: Array, + fieldGroup: FieldGroup, path: Path, - asyncPayloadRecord?: AsyncPayloadRecord, -): MaybePromise { - const errors = asyncPayloadRecord?.errors ?? exeContext.errors; - const fieldDef = getFieldDef(exeContext.schema, parentType, fieldNodes[0]); + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): MaybePromise | undefined { + const fieldDef = getFieldDef(exeContext.schema, parentType, fieldGroup[0].node); if (!fieldDef) { return; } @@ -658,14 +761,14 @@ function executeField( const returnType = fieldDef.type; const resolveFn = fieldDef.resolve ?? exeContext.fieldResolver; - const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path); + const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), parentType, path); // Get the resolve function, regardless of if its result is normal or abrupt (error). try { // Build a JS object of arguments from the field.arguments AST, using the // variables scope to fulfill any variable references. // TODO: find a way to memoize, in case this field is within a List type. - const args = getArgumentValues(fieldDef, fieldNodes[0], exeContext.variableValues); + const args = getArgumentValues(fieldDef, fieldGroup[0].node, exeContext.variableValues); // The resolve function's optional third argument is a context value that // is provided to every resolve function within an execution. It is commonly @@ -674,61 +777,42 @@ function executeField( const result = resolveFn(source, args, contextValue, info); - let completed; if (isPromise(result)) { - completed = result.then(resolved => - completeValue(exeContext, returnType, fieldNodes, info, path, resolved, asyncPayloadRecord), - ); - } else { - completed = completeValue( + return completePromisedValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } + const completed = completeValue( + exeContext, + returnType, + fieldGroup, + info, + path, + result, + incrementalContext, + deferMap, + ); + if (isPromise(completed)) { // Note: we don't rely on a `catch` method, but we do expect "thenable" // to take a second callback for the error case. return completed.then(undefined, rawError => { - if (rawError instanceof AggregateError) { - return new AggregateError( - rawError.errors.map(rawErrorItem => { - rawErrorItem = coerceError(rawErrorItem); - const error = locatedError(rawErrorItem, fieldNodes, pathToArray(path)); - const handledError = handleFieldError(error, returnType, errors); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return handledError; - }), - ); - } - rawError = coerceError(rawError); - const error = locatedError(rawError, fieldNodes, pathToArray(path)); - const handledError = handleFieldError(error, returnType, errors); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return handledError; + handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext); + return null; }); } return completed; } catch (rawError) { - if (rawError instanceof AggregateError) { - return new AggregateError( - rawError.errors.map(rawErrorItem => { - const coercedError = coerceError(rawErrorItem); - const error = locatedError(coercedError, fieldNodes, pathToArray(path)); - return handleFieldError(error, returnType, errors); - }), - ); - } - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(path)); - const handledError = handleFieldError(error, returnType, errors); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return handledError; + handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext); + return null; } } @@ -762,10 +846,22 @@ export function buildResolveInfo( export const CRITICAL_ERROR = 'CRITICAL_ERROR' as const; function handleFieldError( - error: GraphQLError, + rawError: unknown, + exeContext: ExecutionContext, returnType: GraphQLOutputType, - errors: Array, -): null { + fieldGroup: FieldGroup, + path: Path, + incrementalContext: IncrementalContext | undefined, +): void { + if (rawError instanceof AggregateError) { + for (const rawErrorItem of rawError.errors) { + handleFieldError(rawErrorItem, exeContext, returnType, fieldGroup, path, incrementalContext); + } + return; + } + + const error = locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path)); + // If the field type is non-nullable, then it is resolved without any // protection from errors, however it still properly locates the error. if (isNonNullType(returnType)) { @@ -778,8 +874,13 @@ function handleFieldError( // Otherwise, error protection is applied, logging the error and resolving // a null value for this field if one is encountered. - errors.push(error); - return null; + const context = incrementalContext ?? exeContext; + let errors = context.errors; + if (errors === undefined) { + errors = new AccumulatorMap(); + context.errors = errors; + } + errors.add(path, error); } /** @@ -806,11 +907,12 @@ function handleFieldError( function completeValue( exeContext: ExecutionContext, returnType: GraphQLOutputType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise { // If result is an Error, throw a located error. if (result instanceof Error) { @@ -823,13 +925,14 @@ function completeValue( const completed = completeValue( exeContext, returnType.ofType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); - if (completed === null) { + if (completed == null) { throw new Error( `Cannot return null for non-nullable field ${info.parentType.name}.${info.fieldName}.`, ); @@ -847,11 +950,12 @@ function completeValue( return completeListValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } @@ -867,11 +971,12 @@ function completeValue( return completeAbstractValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } @@ -880,43 +985,78 @@ function completeValue( return completeObjectValue( exeContext, returnType, - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } /* c8 ignore next 6 */ // Not reachable, all possible output types have been considered. - console.assert(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType)); + invariant(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType)); +} + +async function completePromisedValue( + exeContext: ExecutionContext, + returnType: GraphQLOutputType, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + path: Path, + result: PromiseLike, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): Promise { + try { + const resolved = await result; + let completed = completeValue( + exeContext, + returnType, + fieldGroup, + info, + path, + resolved, + incrementalContext, + deferMap, + ); + + if (isPromise(completed)) { + completed = await completed; + } + return completed; + } catch (rawError) { + handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext); + return null; + } } /** - * Returns an object containing the `@stream` arguments if a field should be + * Returns an object containing info for streaming if a field should be * streamed based on the experimental flag, stream directive present and * not disabled by the "if" argument. */ -function getStreamValues( +function getStreamUsage( exeContext: ExecutionContext, - fieldNodes: Array, + fieldGroup: FieldGroup, path: Path, -): - | undefined - | { - initialCount: number | undefined; - label: string | undefined; - } { +): StreamUsage | undefined { // do not stream inner lists of multi-dimensional lists if (typeof path.key === 'number') { return; } + // TODO: add test for this case (a streamed list nested under a list). + /* c8 ignore next 7 */ + if ((fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage !== undefined) { + return (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage; + } + // validation only allows equivalent streams on multiple fields, so it is // safe to only check the first fieldNode for the stream directive const stream = getDirectiveValues( GraphQLStreamDirective, - fieldNodes[0], + fieldGroup[0].node, exeContext.variableValues, ) as { initialCount: number; @@ -936,10 +1076,25 @@ function getStreamValues( invariant(stream['initialCount'] >= 0, 'initialCount must be a positive integer'); - return { + invariant( + !exeContext.errorWithIncrementalSubscription, + '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.', + ); + + const streamedFieldGroup: FieldGroup = fieldGroup.map(fieldDetails => ({ + node: fieldDetails.node, + deferUsage: undefined, + })); + + const streamUsage = { initialCount: stream['initialCount'], label: typeof stream['label'] === 'string' ? stream['label'] : undefined, + fieldGroup: streamedFieldGroup, }; + + (fieldGroup as unknown as { _streamUsage: StreamUsage })._streamUsage = streamUsage; + + return streamUsage; } /** @@ -949,98 +1104,157 @@ function getStreamValues( async function completeAsyncIteratorValue( exeContext: ExecutionContext, itemType: GraphQLOutputType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, - iterator: AsyncIterator, - asyncPayloadRecord?: AsyncPayloadRecord, + asyncIterator: AsyncIterator, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): Promise> { exeContext.signal?.addEventListener('abort', () => { - iterator.return?.(); + asyncIterator.return?.(); }); - const errors = asyncPayloadRecord?.errors ?? exeContext.errors; - const stream = getStreamValues(exeContext, fieldNodes, path); let containsPromise = false; const completedResults: Array = []; let index = 0; + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); + const earlyReturn = + asyncIterator.return === undefined ? undefined : asyncIterator.return.bind(asyncIterator); + try { + while (true) { + if (streamUsage && index >= streamUsage.initialCount) { + const streamItemQueue = buildAsyncStreamItemQueue( + index, + path, + asyncIterator, + exeContext, + streamUsage.fieldGroup, + info, + itemType, + ); - while (true) { - if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) { - executeStreamIterator( - index, - iterator, - exeContext, - fieldNodes, - info, - itemType, - path, - stream.label, - asyncPayloadRecord, - ); - break; - } + let streamRecord: StreamRecord | CancellableStreamRecord; + if (earlyReturn === undefined) { + streamRecord = { + label: streamUsage.label, + path, + index, + streamItemQueue, + }; + } else { + streamRecord = { + label: streamUsage.label, + path, + index, + streamItemQueue, + earlyReturn, + }; + if (exeContext.cancellableStreams === undefined) { + exeContext.cancellableStreams = new Set(); + } + exeContext.cancellableStreams.add(streamRecord); + } - const itemPath = addPath(path, index, undefined); - let iteration; - try { - iteration = await iterator.next(); - if (iteration.done) { + const context = incrementalContext ?? exeContext; + addIncrementalDataRecords(context, [streamRecord]); break; } - } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - completedResults.push(handleFieldError(error, itemType, errors)); - break; - } - if ( - completeListItemValue( - iteration.value, - completedResults, - errors, - exeContext, - itemType, - fieldNodes, - info, - itemPath, - asyncPayloadRecord, - ) - ) { - containsPromise = true; - } - index += 1; - } - return containsPromise ? Promise.all(completedResults) : completedResults; -} + const itemPath = addPath(path, index, undefined); + let iteration; + try { + iteration = await asyncIterator.next(); + } catch (rawError) { + throw locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path)); + } -/** + // TODO: add test case for stream returning done before initialCount + /* c8 ignore next 3 */ + if (iteration.done) { + break; + } + + const item = iteration.value; + // TODO: add tests for stream backed by asyncIterator that returns a promise + /* c8 ignore start */ + if (isPromise(item)) { + completedResults.push( + completePromisedListItemValue( + item, + exeContext, + itemType, + fieldGroup, + info, + itemPath, + incrementalContext, + deferMap, + ), + ); + containsPromise = true; + } else if ( + /* c8 ignore stop */ + completeListItemValue( + item, + completedResults, + exeContext, + itemType, + fieldGroup, + info, + itemPath, + incrementalContext, + deferMap, + ) + // TODO: add tests for stream backed by asyncIterator that completes to a promise + /* c8 ignore start */ + ) { + containsPromise = true; + } + /* c8 ignore stop */ + index++; + } + } catch (error) { + if (earlyReturn !== undefined) { + earlyReturn().catch(() => { + /* c8 ignore next 1 */ + // ignore error + }); + } + throw error; + } + + return containsPromise + ? /* c8 ignore start */ Promise.all(completedResults) + : /* c8 ignore stop */ completedResults; +} + +/** * Complete a list value by completing each item in the list with the * inner type */ function completeListValue( exeContext: ExecutionContext, returnType: GraphQLList, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { const itemType = returnType.ofType; - const errors = asyncPayloadRecord?.errors ?? exeContext.errors; if (isAsyncIterable(result)) { - const iterator = result[Symbol.asyncIterator](); + const asyncIterator = result[Symbol.asyncIterator](); return completeAsyncIteratorValue( exeContext, itemType, - fieldNodes, + fieldGroup, info, path, - iterator, - asyncPayloadRecord, + asyncIterator, + incrementalContext, + deferMap, ); } @@ -1050,52 +1264,97 @@ function completeListValue( ); } - const stream = getStreamValues(exeContext, fieldNodes, path); + return completeIterableValue( + exeContext, + itemType, + fieldGroup, + info, + path, + result, + incrementalContext, + deferMap, + ); +} +function completeIterableValue( + exeContext: ExecutionContext, + itemType: GraphQLOutputType, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + path: Path, + items: Iterable, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): MaybePromise> { // This is specified as a simple map, however we're optimizing the path // where the list contains no Promises by avoiding creating another Promise. let containsPromise = false; - let previousAsyncPayloadRecord = asyncPayloadRecord; const completedResults: Array = []; let index = 0; - for (const item of result) { + const streamUsage = getStreamUsage(exeContext, fieldGroup, path); + const iterator = items[Symbol.iterator](); + let iteration = iterator.next(); + while (!iteration.done) { + const item = iteration.value; + + if (streamUsage && index >= streamUsage.initialCount) { + const streamRecord: StreamRecord = { + label: streamUsage.label, + path, + index, + streamItemQueue: buildSyncStreamItemQueue( + item, + index, + path, + iterator, + exeContext, + streamUsage.fieldGroup, + info, + itemType, + ), + }; + + const context = incrementalContext ?? exeContext; + addIncrementalDataRecords(context, [streamRecord]); + break; + } + // No need to modify the info object containing the path, // since from here on it is not ever accessed by resolver functions. const itemPath = addPath(path, index, undefined); - if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) { - previousAsyncPayloadRecord = executeStreamField( - path, - itemPath, - item, - exeContext, - fieldNodes, - info, - itemType, - stream.label, - previousAsyncPayloadRecord, + if (isPromise(item)) { + completedResults.push( + completePromisedListItemValue( + item, + exeContext, + itemType, + fieldGroup, + info, + itemPath, + incrementalContext, + deferMap, + ), ); - index++; - continue; - } - - if ( + containsPromise = true; + } else if ( completeListItemValue( item, completedResults, - errors, exeContext, itemType, - fieldNodes, + fieldGroup, info, itemPath, - asyncPayloadRecord, + incrementalContext, + deferMap, ) ) { containsPromise = true; } - index++; + + iteration = iterator.next(); } return containsPromise ? Promise.all(completedResults) : completedResults; @@ -1109,68 +1368,85 @@ function completeListValue( function completeListItemValue( item: unknown, completedResults: Array, - errors: Array, exeContext: ExecutionContext, itemType: GraphQLOutputType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemPath: Path, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): boolean { try { - let completedItem; - if (isPromise(item)) { - completedItem = item.then(resolved => - completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - resolved, - asyncPayloadRecord, - ), - ); - } else { - completedItem = completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - item, - asyncPayloadRecord, - ); - } + const completedItem = completeValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + item, + incrementalContext, + deferMap, + ); if (isPromise(completedItem)) { // Note: we don't rely on a `catch` method, but we do expect "thenable" // to take a second callback for the error case. completedResults.push( completedItem.then(undefined, rawError => { - rawError = coerceError(rawError); - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return handledError; + handleFieldError( + rawError, + exeContext, + itemType, + fieldGroup, + itemPath, + incrementalContext, + ); + return null; }), ); - return true; } completedResults.push(completedItem); } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - completedResults.push(handledError); + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + completedResults.push(null); } - return false; } +async function completePromisedListItemValue( + item: unknown, + exeContext: ExecutionContext, + itemType: GraphQLOutputType, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemPath: Path, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, +): Promise { + try { + const resolved = await item; + let completed = completeValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + resolved, + incrementalContext, + deferMap, + ); + if (isPromise(completed)) { + completed = await completed; + } + return completed; + } catch (rawError) { + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + return null; + } +} + /** * Complete a Scalar or Enum by serializing to a valid value, returning * null if serialization is not possible. @@ -1207,11 +1483,12 @@ function completeLeafValue(returnType: GraphQLLeafType, result: unknown): unknow function completeAbstractValue( exeContext: ExecutionContext, returnType: GraphQLAbstractType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver; const contextValue = exeContext.contextValue; @@ -1225,27 +1502,29 @@ function completeAbstractValue( resolvedRuntimeType, exeContext, returnType, - fieldNodes, + fieldGroup, info, result, ), - fieldNodes, + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ), ); } return completeObjectValue( exeContext, - ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldNodes, info, result), - fieldNodes, + ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldGroup, info, result), + fieldGroup, info, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } @@ -1253,14 +1532,14 @@ function ensureValidRuntimeType( runtimeTypeName: unknown, exeContext: ExecutionContext, returnType: GraphQLAbstractType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, result: unknown, ): GraphQLObjectType { if (runtimeTypeName == null) { throw createGraphQLError( `Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1286,21 +1565,21 @@ function ensureValidRuntimeType( if (runtimeType == null) { throw createGraphQLError( `Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } if (!isObjectType(runtimeType)) { throw createGraphQLError( `Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } if (!exeContext.schema.isSubType(returnType, runtimeType)) { throw createGraphQLError( `Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, - { nodes: fieldNodes }, + { nodes: toNodes(fieldGroup) }, ); } @@ -1313,11 +1592,12 @@ function ensureValidRuntimeType( function completeObjectValue( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { // If there is an isTypeOf predicate function, call it with the // current result. If isTypeOf returns false, then raise an error rather @@ -1328,87 +1608,157 @@ function completeObjectValue( if (isPromise(isTypeOf)) { return isTypeOf.then(resolvedIsTypeOf => { if (!resolvedIsTypeOf) { - throw invalidReturnTypeError(returnType, result, fieldNodes); + throw invalidReturnTypeError(returnType, result, fieldGroup); } return collectAndExecuteSubfields( exeContext, returnType, - fieldNodes, + fieldGroup, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); }); } if (!isTypeOf) { - throw invalidReturnTypeError(returnType, result, fieldNodes); + throw invalidReturnTypeError(returnType, result, fieldGroup); } } return collectAndExecuteSubfields( exeContext, returnType, - fieldNodes, + fieldGroup, path, result, - asyncPayloadRecord, + incrementalContext, + deferMap, ); } function invalidReturnTypeError( returnType: GraphQLObjectType, result: unknown, - fieldNodes: Array, + fieldGroup: FieldGroup, ): GraphQLError { return createGraphQLError( `Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, - { - nodes: fieldNodes, - }, + { nodes: toNodes(fieldGroup) }, ); } +function addNewDeferredFragments( + newDeferUsages: ReadonlyArray, + newDeferMap: Map, + path?: Path | undefined, +): ReadonlyMap { + // For each new deferUsage object: + for (const newDeferUsage of newDeferUsages) { + const parentDeferUsage = newDeferUsage.parentDeferUsage; + + const parent = + parentDeferUsage === undefined + ? undefined + : deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap); + + // Instantiate the new record. + const deferredFragmentRecord = new DeferredFragmentRecord(path, newDeferUsage.label, parent); + + // Update the map. + newDeferMap.set(newDeferUsage, deferredFragmentRecord); + } + + return newDeferMap; +} + +function deferredFragmentRecordFromDeferUsage( + deferUsage: DeferUsage, + deferMap: ReadonlyMap, +): DeferredFragmentRecord { + return deferMap.get(deferUsage)!; +} + function collectAndExecuteSubfields( exeContext: ExecutionContext, returnType: GraphQLObjectType, - fieldNodes: Array, + fieldGroup: FieldGroup, path: Path, result: unknown, - asyncPayloadRecord?: AsyncPayloadRecord, + incrementalContext: IncrementalContext | undefined, + deferMap: ReadonlyMap | undefined, ): MaybePromise> { // Collect sub-fields to execute to complete this value. - const { fields: subFieldNodes, patches: subPatches } = collectSubfields( - exeContext, - returnType, - fieldNodes, + const collectedSubfields = collectSubfields(exeContext, returnType, fieldGroup); + let groupedFieldSet = collectedSubfields.groupedFieldSet; + const newDeferUsages = collectedSubfields.newDeferUsages; + if (deferMap === undefined && newDeferUsages.length === 0) { + return executeFields( + exeContext, + returnType, + result, + path, + groupedFieldSet, + incrementalContext, + undefined, + ); + } + const subExecutionPlan = buildSubExecutionPlan( + groupedFieldSet, + incrementalContext?.deferUsageSet, + exeContext.deduplicateDefers, ); + groupedFieldSet = subExecutionPlan.groupedFieldSet; + const newGroupedFieldSets = subExecutionPlan.newGroupedFieldSets; + const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path); + const subFields = executeFields( exeContext, returnType, result, path, - subFieldNodes, - asyncPayloadRecord, + groupedFieldSet, + incrementalContext, + newDeferMap, ); - for (const subPatch of subPatches) { - const { label, fields: subPatchFieldNodes } = subPatch; - executeDeferredFragment( + if (newGroupedFieldSets.size > 0) { + const newPendingExecutionGroups = collectExecutionGroups( exeContext, returnType, result, - subPatchFieldNodes, - label, path, - asyncPayloadRecord, + incrementalContext?.deferUsageSet, + newGroupedFieldSets, + newDeferMap, ); - } + const context = incrementalContext ?? exeContext; + addIncrementalDataRecords(context, newPendingExecutionGroups); + } return subFields; } +function buildSubExecutionPlan( + originalGroupedFieldSet: GroupedFieldSet, + deferUsageSet: DeferUsageSet | undefined, + deduplicateDefers: boolean, +): ExecutionPlan { + let executionPlan = (originalGroupedFieldSet as unknown as { _executionPlan: ExecutionPlan }) + ._executionPlan; + if (executionPlan !== undefined) { + return executionPlan; + } + executionPlan = deduplicateDefers + ? buildExecutionPlan(originalGroupedFieldSet, deferUsageSet) + : buildBranchingExecutionPlan(originalGroupedFieldSet, deferUsageSet); + (originalGroupedFieldSet as unknown as { _executionPlan: ExecutionPlan })._executionPlan = + executionPlan; + return executionPlan; +} + /** * If a resolveType function is not given, then a default resolve behavior is * used which attempts two strategies: @@ -1514,6 +1864,25 @@ export const defaultFieldResolver: GraphQLFieldResolver = func * * Accepts an object with named arguments. */ +export function subscribe( + args: ExecutionArgs & { + errorWithIncrementalSubscription: true | undefined | null; + }, +): MaybePromise< + AsyncGenerator, void, void> | SingularExecutionResult +>; +export function subscribe( + args: ExecutionArgs, +): MaybePromise< + | AsyncGenerator< + | SingularExecutionResult + | InitialIncrementalExecutionResult + | SubsequentIncrementalExecutionResult, + void, + void + > + | SingularExecutionResult +>; export function subscribe( args: ExecutionArgs, ): MaybePromise< @@ -1643,7 +2012,9 @@ function mapSourceToResponse( mapAsyncIterator( resultOrStream[Symbol.asyncIterator](), async (payload: unknown) => - ensureAsyncIterable(await executeImpl(buildPerEventExecutionContext(exeContext, payload))), + ensureAsyncIterable( + await executeOperation(buildPerEventExecutionContext(exeContext, payload)), + ), (error: Error) => { if (error instanceof AggregateError) { throw new AggregateError( @@ -1680,7 +2051,14 @@ function createSourceEventStreamImpl( } function executeSubscription(exeContext: ExecutionContext): MaybePromise> { - const { schema, fragments, operation, variableValues, rootValue } = exeContext; + const { + schema, + fragments, + operation, + variableValues, + rootValue, + errorWithIncrementalSubscription, + } = exeContext; const rootType = schema.getSubscriptionType(); if (rootType == null) { @@ -1689,25 +2067,27 @@ function executeSubscription(exeContext: ExecutionContext): MaybePromise { - throw locatedError(error, fieldNodes, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); }); } return assertEventStream(result, exeContext.signal); } catch (error) { - throw locatedError(error, fieldNodes, pathToArray(path)); + throw locatedError(error, toNodes(fieldGroup), pathToArray(path)); } } @@ -1761,495 +2141,446 @@ function assertEventStream(result: unknown, signal?: AbortSignal): AsyncIterable }; } -function executeDeferredFragment( +function collectExecutionGroups( exeContext: ExecutionContext, parentType: GraphQLObjectType, sourceValue: unknown, - fields: Map>, - label?: string, - path?: Path, - parentContext?: AsyncPayloadRecord, -): void { - const asyncPayloadRecord = new DeferredFragmentRecord({ - label, - path, - parentContext, - exeContext, - }); - let promiseOrData; - try { - promiseOrData = executeFields( - exeContext, - parentType, - sourceValue, + path: Path | undefined, + parentDeferUsages: DeferUsageSet | undefined, + newGroupedFieldSets: Map, + deferMap: ReadonlyMap, +): ReadonlyArray { + const newPendingExecutionGroups: Array = []; + + for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) { + const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap); + + const pendingExecutionGroup: PendingExecutionGroup = { path, - fields, - asyncPayloadRecord, - ); + deferredFragmentRecords, + result: undefined as unknown as BoxedPromiseOrValue, + }; - if (isPromise(promiseOrData)) { - promiseOrData = promiseOrData.then(null, e => { - asyncPayloadRecord.errors.push(e); - return null; - }); + const executor = () => + executeExecutionGroup( + pendingExecutionGroup, + exeContext, + parentType, + sourceValue, + path, + groupedFieldSet, + { + errors: undefined, + deferUsageSet, + incrementalDataRecords: undefined, + }, + deferMap, + ); + + if (exeContext.enableEarlyExecution) { + pendingExecutionGroup.result = new BoxedPromiseOrValue( + shouldDefer(parentDeferUsages, deferUsageSet) + ? Promise.resolve().then(executor) + : executor(), + ); + } else { + pendingExecutionGroup.result = () => new BoxedPromiseOrValue(executor()); + const resolveThunk = () => { + const maybeThunk = pendingExecutionGroup.result; + if (!(maybeThunk instanceof BoxedPromiseOrValue)) { + pendingExecutionGroup.result = maybeThunk(); + } + }; + for (const deferredFragmentRecord of deferredFragmentRecords) { + deferredFragmentRecord.onPending(resolveThunk); + } } - } catch (e) { - asyncPayloadRecord.errors.push(e as GraphQLError); - promiseOrData = null; + + newPendingExecutionGroups.push(pendingExecutionGroup); } - asyncPayloadRecord.addData(promiseOrData); + + return newPendingExecutionGroups; } -function executeStreamField( - path: Path, - itemPath: Path, - item: MaybePromise, +function shouldDefer( + parentDeferUsages: undefined | DeferUsageSet, + deferUsages: DeferUsageSet, +): boolean { + // If we have a new child defer usage, defer. + // Otherwise, this defer usage was already deferred when it was initially + // encountered, and is now in the midst of executing early, so the new + // deferred grouped fields set can be executed immediately. + return ( + parentDeferUsages === undefined || + !Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)) + ); +} + +function executeExecutionGroup( + pendingExecutionGroup: PendingExecutionGroup, exeContext: ExecutionContext, - fieldNodes: Array, - info: GraphQLResolveInfo, - itemType: GraphQLOutputType, - label?: string, - parentContext?: AsyncPayloadRecord, -): AsyncPayloadRecord { - const asyncPayloadRecord = new StreamRecord({ - label, - path: itemPath, - parentContext, - exeContext, - }); - let completedItem: MaybePromise; + parentType: GraphQLObjectType, + sourceValue: unknown, + path: Path | undefined, + groupedFieldSet: GroupedFieldSet, + incrementalContext: IncrementalContext, + deferMap: ReadonlyMap, +): MaybePromise { + let result; try { - try { - if (isPromise(item)) { - completedItem = item.then(resolved => - completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - resolved, - asyncPayloadRecord, - ), - ); - } else { - completedItem = completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - item, - asyncPayloadRecord, - ); - } + result = executeFields( + exeContext, + parentType, + sourceValue, + path, + groupedFieldSet, + incrementalContext, + deferMap, + ); + } catch (error: any) { + return { + pendingExecutionGroup, + path: pathToArray(path), + errors: withError(incrementalContext.errors, error), + }; + } - if (isPromise(completedItem)) { - // Note: we don't rely on a `catch` method, but we do expect "thenable" - // to take a second callback for the error case. - completedItem = completedItem.then(undefined, rawError => { - rawError = coerceError(rawError); - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return handledError; - }); - } - } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - completedItem = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - } - } catch (error) { - asyncPayloadRecord.errors.push(error as GraphQLError); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - asyncPayloadRecord.addItems(null); - return asyncPayloadRecord; - } - - let completedItems: MaybePromise | null>; - if (isPromise(completedItem)) { - completedItems = completedItem.then( - value => [value], - error => { - asyncPayloadRecord.errors.push(error); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return null; - }, + if (isPromise(result)) { + return result.then( + resolved => + buildCompletedExecutionGroup(incrementalContext, pendingExecutionGroup, path, resolved), + error => ({ + pendingExecutionGroup, + path: pathToArray(path), + errors: withError(incrementalContext.errors, error), + }), ); - } else { - completedItems = [completedItem]; } - asyncPayloadRecord.addItems(completedItems); - return asyncPayloadRecord; + return buildCompletedExecutionGroup(incrementalContext, pendingExecutionGroup, path, result); } -async function executeStreamIteratorItem( - iterator: AsyncIterator, - exeContext: ExecutionContext, - fieldNodes: Array, - info: GraphQLResolveInfo, - itemType: GraphQLOutputType, - asyncPayloadRecord: StreamRecord, - itemPath: Path, -): Promise> { - let item; - try { - const { value, done } = await iterator.next(); - if (done) { - asyncPayloadRecord.setIsCompletedIterator(); - return { done, value: undefined }; - } - item = value; - } catch (rawError) { - const coercedError = coerceError(rawError); - const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath)); - const value = handleFieldError(error, itemType, asyncPayloadRecord.errors); - // don't continue if iterator throws - return { done: true, value }; +function buildCompletedExecutionGroup( + incrementalContext: IncrementalContext, + pendingExecutionGroup: PendingExecutionGroup, + path: Path | undefined, + data: Record, +): CompletedExecutionGroup { + const { errors, incrementalDataRecords } = incrementalContext; + if (incrementalDataRecords === undefined) { + return { + pendingExecutionGroup, + path: pathToArray(path), + result: errors === undefined ? { data } : { data, errors: [...flattenErrors(errors)] }, + incrementalDataRecords, + }; } - let completedItem; - try { - completedItem = completeValue( - exeContext, - itemType, - fieldNodes, - info, - itemPath, - item, - asyncPayloadRecord, - ); - if (isPromise(completedItem)) { - completedItem = completedItem.then(undefined, rawError => { - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return handledError; - }); - } - return { done: false, value: completedItem }; - } catch (rawError) { - const error = locatedError(rawError, fieldNodes, pathToArray(itemPath)); - const value = handleFieldError(error, itemType, asyncPayloadRecord.errors); - filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord); - return { done: false, value }; + if (errors === undefined) { + return { + pendingExecutionGroup, + path: pathToArray(path), + result: { data }, + incrementalDataRecords, + }; } + + return { + pendingExecutionGroup, + path: pathToArray(path), + result: { data, errors: [...flattenErrors(errors)] }, + incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords), + }; } -async function executeStreamIterator( +function getDeferredFragmentRecords( + deferUsages: DeferUsageSet, + deferMap: ReadonlyMap, +): ReadonlyArray { + return Array.from(deferUsages).map(deferUsage => + deferredFragmentRecordFromDeferUsage(deferUsage, deferMap), + ); +} + +function buildSyncStreamItemQueue( + initialItem: MaybePromise, initialIndex: number, - iterator: AsyncIterator, + streamPath: Path, + iterator: Iterator, exeContext: ExecutionContext, - fieldNodes: Array, + fieldGroup: FieldGroup, info: GraphQLResolveInfo, itemType: GraphQLOutputType, - path: Path, - label?: string, - parentContext?: AsyncPayloadRecord, -): Promise { - let index = initialIndex; - let previousAsyncPayloadRecord = parentContext ?? undefined; - while (true) { - const itemPath = addPath(path, index, undefined); - const asyncPayloadRecord = new StreamRecord({ - label, - path: itemPath, - parentContext: previousAsyncPayloadRecord, - iterator, - exeContext, - }); - - let iteration; - try { - iteration = await executeStreamIteratorItem( - iterator, +): Array { + const streamItemQueue: Array = []; + + const enableEarlyExecution = exeContext.enableEarlyExecution; + + const firstExecutor = () => { + const initialPath = addPath(streamPath, initialIndex, undefined); + const firstStreamItem = new BoxedPromiseOrValue( + completeStreamItem( + streamPath, + initialPath, + initialItem, exeContext, - fieldNodes, + { errors: undefined, incrementalDataRecords: undefined }, + fieldGroup, info, itemType, - asyncPayloadRecord, - itemPath, - ); - } catch (error) { - asyncPayloadRecord.errors.push(error as GraphQLError); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - asyncPayloadRecord.addItems(null); - // entire stream has errored and bubbled upwards - if (iterator?.return) { - iterator.return().catch(() => { - // ignore errors - }); - } - return; - } + ), + ); - const { done, value: completedItem } = iteration; + let iteration = iterator.next(); + let currentIndex = initialIndex + 1; + let currentStreamItem: + | BoxedPromiseOrValue + | (() => BoxedPromiseOrValue) = firstStreamItem; + while (!iteration.done) { + // TODO: add test case for early sync termination + /* c8 ignore next 6 */ + if (currentStreamItem instanceof BoxedPromiseOrValue) { + const result = currentStreamItem.value; + if (!isPromise(result) && result.errors !== undefined) { + break; + } + } - let completedItems: MaybePromise | null>; - if (isPromise(completedItem)) { - completedItems = completedItem.then( - value => [value], - error => { - asyncPayloadRecord.errors.push(error); - filterSubsequentPayloads(exeContext, path, asyncPayloadRecord); - return null; - }, - ); - } else { - completedItems = [completedItem]; - } + const itemPath = addPath(streamPath, currentIndex, undefined); - asyncPayloadRecord.addItems(completedItems); + const value = iteration.value; - if (done) { - break; - } - previousAsyncPayloadRecord = asyncPayloadRecord; - index++; - } -} + const currentExecutor = () => + completeStreamItem( + streamPath, + itemPath, + value, + exeContext, + { errors: undefined, incrementalDataRecords: undefined }, + fieldGroup, + info, + itemType, + ); -function filterSubsequentPayloads( - exeContext: ExecutionContext, - nullPath: Path, - currentAsyncRecord: AsyncPayloadRecord | undefined, -): void { - const nullPathArray = pathToArray(nullPath); - exeContext.subsequentPayloads.forEach(asyncRecord => { - if (asyncRecord === currentAsyncRecord) { - // don't remove payload from where error originates - return; - } - for (let i = 0; i < nullPathArray.length; i++) { - if (asyncRecord.path[i] !== nullPathArray[i]) { - // asyncRecord points to a path unaffected by this payload - return; - } - } - // asyncRecord path points to nulled error field - if (isStreamPayload(asyncRecord) && asyncRecord.iterator?.return) { - asyncRecord.iterator.return().catch(() => { - // ignore error - }); - } - exeContext.subsequentPayloads.delete(asyncRecord); - }); -} + currentStreamItem = enableEarlyExecution + ? new BoxedPromiseOrValue(currentExecutor()) + : () => new BoxedPromiseOrValue(currentExecutor()); -function getCompletedIncrementalResults(exeContext: ExecutionContext): Array { - const incrementalResults: Array = []; - for (const asyncPayloadRecord of exeContext.subsequentPayloads) { - const incrementalResult: IncrementalResult = {}; - if (!asyncPayloadRecord.isCompleted) { - continue; - } - exeContext.subsequentPayloads.delete(asyncPayloadRecord); - if (isStreamPayload(asyncPayloadRecord)) { - const items = asyncPayloadRecord.items; - if (asyncPayloadRecord.isCompletedIterator) { - // async iterable resolver just finished but there may be pending payloads - continue; - } - (incrementalResult as IncrementalStreamResult).items = items; - } else { - const data = asyncPayloadRecord.data; - (incrementalResult as IncrementalDeferResult).data = data ?? null; - } + streamItemQueue.push(currentStreamItem); - incrementalResult.path = asyncPayloadRecord.path; - if (asyncPayloadRecord.label) { - incrementalResult.label = asyncPayloadRecord.label; + iteration = iterator.next(); + currentIndex = initialIndex + 1; } - if (asyncPayloadRecord.errors.length > 0) { - incrementalResult.errors = asyncPayloadRecord.errors; - } - incrementalResults.push(incrementalResult); - } - return incrementalResults; -} -function yieldSubsequentPayloads( - exeContext: ExecutionContext, -): AsyncGenerator { - let isDone = false; - - const abortPromise = new Promise((_, reject) => { - exeContext.signal?.addEventListener('abort', () => { - isDone = true; - reject(exeContext.signal?.reason); - }); - }); + streamItemQueue.push(new BoxedPromiseOrValue({ path: streamPath })); - async function next(): Promise> { - if (isDone) { - return { value: undefined, done: true }; - } + return firstStreamItem.value; + }; - await Promise.race([ - abortPromise, - ...Array.from(exeContext.subsequentPayloads).map(p => p.promise), - ]); + streamItemQueue.push( + enableEarlyExecution + ? new BoxedPromiseOrValue(Promise.resolve().then(firstExecutor)) + : () => new BoxedPromiseOrValue(firstExecutor()), + ); - if (isDone) { - // a different call to next has exhausted all payloads - return { value: undefined, done: true }; - } + return streamItemQueue; +} - const incremental = getCompletedIncrementalResults(exeContext); - const hasNext = exeContext.subsequentPayloads.size > 0; +function buildAsyncStreamItemQueue( + initialIndex: number, + streamPath: Path, + asyncIterator: AsyncIterator, + exeContext: ExecutionContext, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemType: GraphQLOutputType, +): Array { + const streamItemQueue: Array = []; + const executor = () => + getNextAsyncStreamItemResult( + streamItemQueue, + streamPath, + initialIndex, + asyncIterator, + exeContext, + fieldGroup, + info, + itemType, + ); - if (!incremental.length && hasNext) { - return next(); - } + streamItemQueue.push( + exeContext.enableEarlyExecution + ? new BoxedPromiseOrValue(executor()) + : () => new BoxedPromiseOrValue(executor()), + ); - if (!hasNext) { - isDone = true; - } + return streamItemQueue; +} +async function getNextAsyncStreamItemResult( + streamItemQueue: Array, + streamPath: Path, + index: number, + asyncIterator: AsyncIterator, + exeContext: ExecutionContext, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemType: GraphQLOutputType, +): Promise { + let iteration; + try { + iteration = await asyncIterator.next(); + } catch (error) { return { - value: incremental.length ? { incremental, hasNext } : { hasNext }, - done: false, + path: streamPath, + errors: [locatedError(coerceError(error), toNodes(fieldGroup), pathToArray(streamPath))], }; } - function returnStreamIterators() { - const promises: Array>> = []; - exeContext.subsequentPayloads.forEach(asyncPayloadRecord => { - if (isStreamPayload(asyncPayloadRecord) && asyncPayloadRecord.iterator?.return) { - promises.push(asyncPayloadRecord.iterator.return()); - } - }); - return Promise.all(promises); + if (iteration.done) { + return { path: streamPath }; } - return { - [Symbol.asyncIterator]() { - return this; - }, - next, - async return(): Promise> { - await returnStreamIterators(); - isDone = true; - return { value: undefined, done: true }; - }, - async throw(error?: unknown): Promise> { - await returnStreamIterators(); - isDone = true; - return Promise.reject(error); - }, - }; -} + const itemPath = addPath(streamPath, index, undefined); -class DeferredFragmentRecord { - type: 'defer'; - errors: Array; - label: string | undefined; - path: Array; - promise: Promise; - data: Record | null; - parentContext: AsyncPayloadRecord | undefined; - isCompleted: boolean; - _exeContext: ExecutionContext; - _resolve?: (arg: MaybePromise | null>) => void; - constructor(opts: { - label: string | undefined; - path: Path | undefined; - parentContext: AsyncPayloadRecord | undefined; - exeContext: ExecutionContext; - }) { - this.type = 'defer'; - this.label = opts.label; - this.path = pathToArray(opts.path); - this.parentContext = opts.parentContext; - this.errors = []; - this._exeContext = opts.exeContext; - this._exeContext.subsequentPayloads.add(this); - this.isCompleted = false; - this.data = null; - this.promise = new Promise | null>(resolve => { - this._resolve = MaybePromise => { - resolve(MaybePromise); - }; - }).then(data => { - this.data = data; - this.isCompleted = true; - }); - } + const result = completeStreamItem( + streamPath, + itemPath, + iteration.value, + exeContext, + { errors: undefined, incrementalDataRecords: undefined }, + fieldGroup, + info, + itemType, + ); - addData(data: MaybePromise | null>) { - const parentData = this.parentContext?.promise; - if (parentData) { - this._resolve?.(parentData.then(() => data)); - return; - } - this._resolve?.(data); - } + const executor = () => + getNextAsyncStreamItemResult( + streamItemQueue, + streamPath, + index, + asyncIterator, + exeContext, + fieldGroup, + info, + itemType, + ); + + streamItemQueue.push( + exeContext.enableEarlyExecution + ? new BoxedPromiseOrValue(executor()) + : () => new BoxedPromiseOrValue(executor()), + ); + + return result; } -class StreamRecord { - type: 'stream'; - errors: Array; - label: string | undefined; - path: Array; - items: Array | null; - promise: Promise; - parentContext: AsyncPayloadRecord | undefined; - iterator: AsyncIterator | undefined; - isCompletedIterator?: boolean; - isCompleted: boolean; - _exeContext: ExecutionContext; - _resolve?: (arg: MaybePromise | null>) => void; - constructor(opts: { - label: string | undefined; - path: Path | undefined; - iterator?: AsyncIterator; - parentContext: AsyncPayloadRecord | undefined; - exeContext: ExecutionContext; - }) { - this.type = 'stream'; - this.items = null; - this.label = opts.label; - this.path = pathToArray(opts.path); - this.parentContext = opts.parentContext; - this.iterator = opts.iterator; - this.errors = []; - this._exeContext = opts.exeContext; - this._exeContext.subsequentPayloads.add(this); - this.isCompleted = false; - this.items = null; - this.promise = new Promise | null>(resolve => { - this._resolve = MaybePromise => { - resolve(MaybePromise); - }; - }).then(items => { - this.items = items; - this.isCompleted = true; - }); +function completeStreamItem( + streamPath: Path, + itemPath: Path, + item: unknown, + exeContext: ExecutionContext, + incrementalContext: IncrementalContext, + fieldGroup: FieldGroup, + info: GraphQLResolveInfo, + itemType: GraphQLOutputType, +): MaybePromise { + if (isPromise(item)) { + return completePromisedValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + item, + incrementalContext, + new Map(), + ).then( + resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), + error => ({ + path: streamPath, + errors: withError(incrementalContext.errors, error), + }), + ); } - addItems(items: MaybePromise | null>) { - const parentData = this.parentContext?.promise; - if (parentData) { - this._resolve?.(parentData.then(() => items)); - return; + let result: MaybePromise; + try { + try { + result = completeValue( + exeContext, + itemType, + fieldGroup, + info, + itemPath, + item, + incrementalContext, + new Map(), + ); + } catch (rawError) { + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + result = null; } - this._resolve?.(items); + } catch (error: any) { + return { + path: streamPath, + errors: withError(incrementalContext.errors, error), + }; } - setIsCompletedIterator() { - this.isCompletedIterator = true; + if (isPromise(result)) { + return result + .then(undefined, rawError => { + handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext); + return null; + }) + .then( + resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), + error => ({ + path: streamPath, + errors: withError(incrementalContext.errors, error), + }), + ); } + + return buildStreamItemResult(incrementalContext, streamPath, result); } -type AsyncPayloadRecord = DeferredFragmentRecord | StreamRecord; +function buildStreamItemResult( + incrementalContext: IncrementalContext, + streamPath: Path, + item: unknown, +): StreamItemResult { + const { errors, incrementalDataRecords } = incrementalContext; + if (incrementalDataRecords === undefined) { + return { + path: streamPath, + item, + errors: errors === undefined ? undefined : [...flattenErrors(errors)], + incrementalDataRecords, + }; + } + + if (errors === undefined) { + return { + path: streamPath, + item, + errors, + incrementalDataRecords, + }; + } -function isStreamPayload(asyncPayload: AsyncPayloadRecord): asyncPayload is StreamRecord { - return asyncPayload.type === 'stream'; + return { + path: streamPath, + item, + errors: [...flattenErrors(errors)], + incrementalDataRecords: filterIncrementalDataRecords( + streamPath, + errors, + incrementalDataRecords, + ), + }; } - /** * This method looks up the field on the given type definition. * It has special casing for the three introspection fields, diff --git a/packages/executor/src/execution/getBySet.ts b/packages/executor/src/execution/getBySet.ts new file mode 100644 index 00000000000..4ddabd30021 --- /dev/null +++ b/packages/executor/src/execution/getBySet.ts @@ -0,0 +1,13 @@ +import { isSameSet } from './isSameSet.js'; + +export function getBySet( + map: ReadonlyMap, U>, + setToMatch: ReadonlySet, +): U | undefined { + for (const set of map.keys()) { + if (isSameSet(set, setToMatch)) { + return map.get(set); + } + } + return undefined; +} diff --git a/packages/executor/src/execution/isSameSet.ts b/packages/executor/src/execution/isSameSet.ts new file mode 100644 index 00000000000..f2837d848cd --- /dev/null +++ b/packages/executor/src/execution/isSameSet.ts @@ -0,0 +1,11 @@ +export function isSameSet(setA: ReadonlySet, setB: ReadonlySet): boolean { + if (setA.size !== setB.size) { + return false; + } + for (const item of setA) { + if (!setB.has(item)) { + return false; + } + } + return true; +} diff --git a/packages/executor/src/execution/types.ts b/packages/executor/src/execution/types.ts new file mode 100644 index 00000000000..828337c04cc --- /dev/null +++ b/packages/executor/src/execution/types.ts @@ -0,0 +1,296 @@ +import type { GraphQLError, GraphQLFormattedError } from 'graphql'; +import type { Path } from '@graphql-tools/utils'; +import type { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js'; + +/** + * The result of GraphQL execution. + * + * - `errors` is included when any errors occurred as a non-empty array. + * - `data` is the result of a successful execution of the query. + * - `hasNext` is true if a future payload is expected. + * - `extensions` is reserved for adding non-standard properties. + * - `incremental` is a list of the results from defer/stream directives. + */ +export interface SingularExecutionResult { + errors?: ReadonlyArray; + data?: TData | null; + extensions?: TExtensions; +} + +export interface FormattedExecutionResult< + TData = Record, + TExtensions = Record, +> { + errors?: ReadonlyArray; + data?: TData | null; + extensions?: TExtensions; +} + +export interface IncrementalExecutionResults< + TData = unknown, + TExtensions = Record, +> { + initialResult: InitialIncrementalExecutionResult; + subsequentResults: AsyncGenerator< + SubsequentIncrementalExecutionResult, + void, + void + >; +} + +export interface InitialIncrementalExecutionResult< + TData = Record, + TExtensions = Record, +> extends SingularExecutionResult { + data: TData; + pending: ReadonlyArray; + hasNext: true; + extensions?: TExtensions; +} + +export interface FormattedInitialIncrementalExecutionResult< + TData = Record, + TExtensions = Record, +> extends FormattedExecutionResult { + data: TData; + pending: ReadonlyArray; + hasNext: boolean; + extensions?: TExtensions; +} + +export interface SubsequentIncrementalExecutionResult< + TData = unknown, + TExtensions = Record, +> { + pending?: ReadonlyArray; + incremental?: ReadonlyArray>; + completed?: ReadonlyArray; + hasNext: boolean; + extensions?: TExtensions; +} + +export interface FormattedSubsequentIncrementalExecutionResult< + TData = unknown, + TExtensions = Record, +> { + hasNext: boolean; + pending?: ReadonlyArray; + incremental?: ReadonlyArray>; + completed?: ReadonlyArray; + extensions?: TExtensions; +} + +interface ExecutionGroupResult> { + errors?: ReadonlyArray; + data: TData; +} + +export interface IncrementalDeferResult< + TData = Record, + TExtensions = Record, +> { + errors?: ReadonlyArray; + data: TData | null; + id: string; + path?: ReadonlyArray; + label?: string; + subPath?: ReadonlyArray; + extensions?: TExtensions; +} + +export interface FormattedIncrementalDeferResult< + TData = Record, + TExtensions = Record, +> { + errors?: ReadonlyArray; + data: TData | null; + id: string; + path?: ReadonlyArray; + label?: string; + subPath?: ReadonlyArray; + extensions?: TExtensions; +} + +interface StreamItemsRecordResult> { + errors?: ReadonlyArray; + items: TData; +} + +export interface IncrementalStreamResult< + TData = ReadonlyArray, + TExtensions = Record, +> { + errors?: ReadonlyArray; + items: TData | null; + id: string; + path?: ReadonlyArray; + label?: string; + extensions?: TExtensions; +} + +export interface FormattedIncrementalStreamResult< + TData = Array, + TExtensions = Record, +> { + errors?: ReadonlyArray; + items: TData | null; + id: string; + path?: ReadonlyArray; + label?: string; + extensions?: TExtensions; +} + +export type IncrementalResult> = + | IncrementalDeferResult + | IncrementalStreamResult; + +export type FormattedIncrementalResult> = + | FormattedIncrementalDeferResult + | FormattedIncrementalStreamResult; + +export interface PendingResult { + id: string; + path: ReadonlyArray; + label?: string; +} + +export interface CompletedResult { + id: string; + errors?: ReadonlyArray; +} + +export interface FormattedCompletedResult { + path: ReadonlyArray; + label?: string; + errors?: ReadonlyArray; +} + +export function isPendingExecutionGroup( + incrementalDataRecord: IncrementalDataRecord, +): incrementalDataRecord is PendingExecutionGroup { + return 'deferredFragmentRecords' in incrementalDataRecord; +} + +export type CompletedExecutionGroup = SuccessfulExecutionGroup | FailedExecutionGroup; + +export function isCompletedExecutionGroup( + incrementalDataRecordResult: IncrementalDataRecordResult, +): incrementalDataRecordResult is CompletedExecutionGroup { + return 'pendingExecutionGroup' in incrementalDataRecordResult; +} + +export interface SuccessfulExecutionGroup { + pendingExecutionGroup: PendingExecutionGroup; + path: Array; + result: ExecutionGroupResult; + incrementalDataRecords: ReadonlyArray | undefined; + errors?: never; +} + +interface FailedExecutionGroup { + pendingExecutionGroup: PendingExecutionGroup; + path: Array; + errors: ReadonlyArray; + result?: never; +} + +export function isFailedExecutionGroup( + completedExecutionGroup: CompletedExecutionGroup, +): completedExecutionGroup is FailedExecutionGroup { + return completedExecutionGroup.errors !== undefined; +} + +export interface PendingExecutionGroup { + path: Path | undefined; + deferredFragmentRecords: ReadonlyArray; + result: + | BoxedPromiseOrValue + | (() => BoxedPromiseOrValue); +} + +export type DeliveryGroup = DeferredFragmentRecord | StreamRecord; + +/** @internal */ +export class DeferredFragmentRecord { + path: Path | undefined; + label: string | undefined; + id?: string | undefined; + parent: DeferredFragmentRecord | undefined; + pendingExecutionGroups: Set; + successfulExecutionGroups: Set; + children: Set; + pending: boolean; + fns: Array<() => void>; + + constructor( + path: Path | undefined, + label: string | undefined, + parent: DeferredFragmentRecord | undefined, + ) { + this.path = path; + this.label = label; + this.parent = parent; + this.pendingExecutionGroups = new Set(); + this.successfulExecutionGroups = new Set(); + this.children = new Set(); + this.pending = false; + this.fns = []; + } + + onPending(fn: () => void): void { + this.fns.push(fn); + } + + setAsPending(): void { + this.pending = true; + for (const fn of this.fns) { + fn(); + } + } +} + +export function isDeferredFragmentRecord( + deliveryGroup: DeliveryGroup, +): deliveryGroup is DeferredFragmentRecord { + return deliveryGroup instanceof DeferredFragmentRecord; +} + +export interface StreamItemResult { + path: Path; + item?: unknown; + incrementalDataRecords?: ReadonlyArray | undefined; + errors?: ReadonlyArray | undefined; +} + +export type StreamItemRecord = + | BoxedPromiseOrValue + | (() => BoxedPromiseOrValue); + +export interface StreamRecord { + path: Path; + label: string | undefined; + index: number; + id?: string | undefined; + streamItemQueue: Array; +} + +export interface StreamItemsResult { + streamRecord: StreamRecord; + result?: StreamItemsRecordResult | undefined; + incrementalDataRecords?: ReadonlyArray | undefined; + errors?: ReadonlyArray | undefined; +} + +export interface CancellableStreamRecord extends StreamRecord { + earlyReturn: () => Promise; +} + +export function isCancellableStreamRecord( + deliveryGroup: DeliveryGroup, +): deliveryGroup is CancellableStreamRecord { + return 'earlyReturn' in deliveryGroup; +} + +export type IncrementalDataRecord = PendingExecutionGroup | StreamRecord; + +export type IncrementalDataRecordResult = CompletedExecutionGroup | StreamItemsResult; diff --git a/packages/federation/test/__snapshots__/defer.test.ts.snap b/packages/federation/test/__snapshots__/defer.test.ts.snap index c399c70c004..2840206a9e5 100644 --- a/packages/federation/test/__snapshots__/defer.test.ts.snap +++ b/packages/federation/test/__snapshots__/defer.test.ts.snap @@ -16,8 +16,39 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, "hasNext": true, + "pending": [ + { + "id": "0", + "path": [], + }, + { + "id": "1", + "path": [ + "users", + 0, + ], + }, + { + "id": "2", + "path": [ + "users", + 1, + ], + }, + ], }, { + "completed": [ + { + "id": "0", + }, + { + "id": "3", + }, + { + "id": "4", + }, + ], "hasNext": true, "incremental": [ { @@ -51,12 +82,24 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` }, ], }, - "path": [], + "id": "0", }, { "data": { "name": "Ada Lovelace", }, + "id": "3", + }, + { + "data": { + "name": "Alan Turing", + }, + "id": "4", + }, + ], + "pending": [ + { + "id": "3", "path": [ "posts", 0, @@ -64,9 +107,7 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, { - "data": { - "name": "Alan Turing", - }, + "id": "4", "path": [ "posts", 1, @@ -76,7 +117,27 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, { - "hasNext": true, + "completed": [ + { + "id": "1", + }, + { + "id": "2", + }, + { + "id": "5", + }, + { + "id": "6", + }, + { + "id": "7", + }, + { + "id": "8", + }, + ], + "hasNext": false, "incremental": [ { "data": { @@ -89,10 +150,7 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` }, ], }, - "path": [ - "users", - 0, - ], + "id": "1", }, { "data": { @@ -105,20 +163,36 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` }, ], }, - "path": [ - "users", - 1, - ], + "id": "2", }, - ], - }, - { - "hasNext": false, - "incremental": [ { "data": { "title": "Hello, World!", }, + "id": "5", + }, + { + "data": { + "name": "Ada Lovelace", + }, + "id": "6", + }, + { + "data": { + "title": "My Story", + }, + "id": "7", + }, + { + "data": { + "name": "Alan Turing", + }, + "id": "8", + }, + ], + "pending": [ + { + "id": "5", "path": [ "users", 0, @@ -127,32 +201,26 @@ exports[`Defer defers the nested fields: defer-nested-fields 1`] = ` ], }, { - "data": { - "title": "My Story", - }, + "id": "6", "path": [ "users", - 1, + 0, "posts", 0, + "author", ], }, { - "data": { - "name": "Ada Lovelace", - }, + "id": "7", "path": [ "users", - 0, + 1, "posts", 0, - "author", ], }, { - "data": { - "name": "Alan Turing", - }, + "id": "8", "path": [ "users", 1, @@ -171,8 +239,23 @@ exports[`Defer defers the root fields: defer-root-fields 1`] = ` { "data": {}, "hasNext": true, + "pending": [ + { + "id": "0", + "path": [], + }, + { + "id": "1", + "path": [], + }, + ], }, { + "completed": [ + { + "id": "0", + }, + ], "hasNext": true, "incremental": [ { @@ -208,11 +291,16 @@ exports[`Defer defers the root fields: defer-root-fields 1`] = ` }, ], }, - "path": [], + "id": "0", }, ], }, { + "completed": [ + { + "id": "1", + }, + ], "hasNext": false, "incremental": [ { @@ -248,7 +336,7 @@ exports[`Defer defers the root fields: defer-root-fields 1`] = ` }, ], }, - "path": [], + "id": "1", }, ], }, diff --git a/packages/federation/test/defer.test.ts b/packages/federation/test/defer.test.ts index 69a31d1c07a..5a5fa66ff63 100644 --- a/packages/federation/test/defer.test.ts +++ b/packages/federation/test/defer.test.ts @@ -5,45 +5,14 @@ import { IntrospectAndCompose, LocalGraphQLDataSource } from '@apollo/gateway'; import { buildSubgraphSchema } from '@apollo/subgraph'; import { createDefaultExecutor } from '@graphql-tools/delegate'; import { normalizedExecutor } from '@graphql-tools/executor'; -import { ExecutionResult, mergeDeep } from '@graphql-tools/utils'; +import { ExecutionResult, mergeIncrementalResult } from '@graphql-tools/utils'; import { assertAsyncIterable } from '../../loaders/url/tests/test-utils'; import { getStitchedSchemaFromSupergraphSdl } from '../src/supergraph'; function mergeDeferredResults(values: ExecutionResult[]) { const result: ExecutionResult = {}; for (const value of values) { - if (value.data) { - if (!result.data) { - result.data = value.data; - } else { - result.data = mergeDeep([result.data, value.data]); - } - } - if (value.errors) { - result.errors = result.errors || []; - result.errors = [...result.errors, ...value.errors]; - } - if (value.incremental) { - for (const incremental of value.incremental) { - if (incremental.path) { - result.data = result.data || {}; - if (!incremental.path.length) { - result.data = mergeDeep([result.data, incremental.data]); - } else { - const existingData = _.get(result.data, incremental.path); - if (!existingData) { - _.set(result.data, incremental.path, incremental.data); - } else { - _.set(result.data, incremental.path, mergeDeep([existingData, incremental.data])); - } - } - } - if (incremental.errors) { - result.errors = result.errors || []; - result.errors = [...result.errors, ...incremental.errors]; - } - } - } + mergeIncrementalResult({ incrementalResult: value, executionResult: result }); } return result; } diff --git a/packages/utils/package.json b/packages/utils/package.json index 956e3226435..bbb532c4641 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -53,11 +53,13 @@ "dependencies": { "@graphql-typed-document-node/core": "^3.1.1", "cross-inspect": "1.0.0", + "dlv": "^1.1.3", "dset": "^3.1.2", "tslib": "^2.4.0" }, "devDependencies": { "@types/dateformat": "3.0.1", + "@types/dlv": "^1.1.4", "dateformat": "4.6.3", "graphql-scalars": "1.23.0" }, diff --git a/packages/utils/src/Interfaces.ts b/packages/utils/src/Interfaces.ts index 552cdb2da5a..9617b613d6d 100644 --- a/packages/utils/src/Interfaces.ts +++ b/packages/utils/src/Interfaces.ts @@ -66,6 +66,9 @@ export interface ExecutionResult { label?: string; path?: ReadonlyArray; items?: TData | null; + id?: string; + pending?: ReadonlyArray<{ id: string; path: ReadonlyArray }>; + completed?: ReadonlyArray<{ id: string; errors?: ReadonlyArray }>; } export interface ExecutionRequest< diff --git a/packages/utils/src/createDeferred.ts b/packages/utils/src/createDeferred.ts new file mode 100644 index 00000000000..417067a3ab0 --- /dev/null +++ b/packages/utils/src/createDeferred.ts @@ -0,0 +1,16 @@ +// TODO: Remove this after Node 22 + +export type Deferred = PromiseWithResolvers; + +export function createDeferred(): Deferred { + if (Promise.withResolvers) { + return Promise.withResolvers(); + } + let resolve: (value: T | PromiseLike) => void; + let reject: (error: unknown) => void; + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve; + reject = _reject; + }); + return { promise, resolve: resolve!, reject: reject! }; +} diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index c2bd2e5ab05..3079a72efaa 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -54,3 +54,4 @@ export * from './jsutils.js'; export * from './directives.js'; export * from './mergeIncrementalResult.js'; export * from './debugTimer.js'; +export * from './createDeferred.js'; diff --git a/packages/utils/src/mergeIncrementalResult.ts b/packages/utils/src/mergeIncrementalResult.ts index 3851fddd0dc..16fe4f09d63 100644 --- a/packages/utils/src/mergeIncrementalResult.ts +++ b/packages/utils/src/mergeIncrementalResult.ts @@ -1,7 +1,10 @@ +import dlv from 'dlv'; import { dset } from 'dset/merge'; import { GraphQLError } from 'graphql'; import { ExecutionResult } from './Interfaces.js'; +const pathsMap = new WeakMap>>(); + export function mergeIncrementalResult({ incrementalResult, executionResult, @@ -9,17 +12,56 @@ export function mergeIncrementalResult({ incrementalResult: ExecutionResult; executionResult: ExecutionResult; }) { - const path = ['data', ...(incrementalResult.path ?? [])]; + let path: ReadonlyArray | undefined = [ + 'data', + ...(incrementalResult.path ?? []), + ]; + + for (const result of [executionResult, incrementalResult]) { + if (result.pending) { + let paths = pathsMap.get(executionResult); + if (paths === undefined) { + paths = new Map(); + pathsMap.set(executionResult, paths); + } + + for (const { id, path } of result.pending) { + paths.set(id, ['data', ...path]); + } + } + } if (incrementalResult.items) { - for (const item of incrementalResult.items) { - dset(executionResult, path, item); - // Increment the last path segment (the array index) to merge the next item at the next index - (path[path.length - 1] as number)++; + if (incrementalResult.id) { + const id = incrementalResult.id; + + path = pathsMap.get(executionResult)?.get(id); + if (path === undefined) { + throw new Error('Invalid incremental delivery format.'); + } + + const list = dlv(executionResult, path as Array); + list.push(...incrementalResult.items); + } else { + const path = ['data', ...(incrementalResult.path ?? [])]; + for (const item of incrementalResult.items) { + dset(executionResult, path, item); + // Increment the last path segment (the array index) to merge the next item at the next index + (path[path.length - 1] as number)++; + } } } if (incrementalResult.data) { + if (incrementalResult.id) { + const id = incrementalResult.id; + if (id !== undefined) { + path = pathsMap.get(executionResult)?.get(id); + if (path === undefined) { + throw new Error('Invalid incremental delivery format.'); + } + } + } dset(executionResult, path, incrementalResult.data); } @@ -40,4 +82,16 @@ export function mergeIncrementalResult({ }); }); } + + if (incrementalResult.completed) { + // Remove tracking and add additional errors + for (const { id, errors } of incrementalResult.completed) { + pathsMap.get(executionResult)?.delete(id); + + if (errors) { + executionResult.errors = executionResult.errors || []; + (executionResult.errors as GraphQLError[]).push(...errors); + } + } + } } diff --git a/packages/utils/tests/mergeIncrementalResult.spec.ts b/packages/utils/tests/mergeIncrementalResult.spec.ts index 0313357a47b..378cfa34942 100644 --- a/packages/utils/tests/mergeIncrementalResult.spec.ts +++ b/packages/utils/tests/mergeIncrementalResult.spec.ts @@ -20,6 +20,15 @@ describe('mergeIncrementalResult', () => { expect(executionResult).toEqual({ data: { user: { age: 42, name: 'John' } } }); }); + it('should deep merge data with basic path with new format', () => { + const executionResult = { data: { user: { name: 'John' } }, pending: [{ id: '0', path: [] }] }; + const incrementalResult = { incremental: [{ id: '0', data: { user: { age: 42 } } }] }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult.data).toEqual({ user: { age: 42, name: 'John' } }); + }); + it('should merge data at path', () => { const executionResult = { data: { user: { name: 'John' } } }; const incrementalResult = { path: ['user'], data: { age: 42 } }; @@ -29,6 +38,18 @@ describe('mergeIncrementalResult', () => { expect(executionResult).toEqual({ data: { user: { age: 42, name: 'John' } } }); }); + it('should merge data at path with new format', () => { + const executionResult = { + data: { user: { name: 'John' } }, + pending: [{ id: '0', path: ['user'] }], + }; + const incrementalResult = { incremental: [{ id: '0', data: { age: 42 } }] }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult.data).toEqual({ user: { age: 42, name: 'John' } }); + }); + it('should push items', () => { const executionResult = { data: { user: { name: 'John' } } }; const incrementalResult = { @@ -69,6 +90,27 @@ describe('mergeIncrementalResult', () => { }); }); + it('should push items at path with new format', () => { + const executionResult = { + data: { + user: { name: 'John', comments: ['comment 1', 'comment 2'] }, + }, + pending: [{ id: '0', path: ['user', 'comments'] }], + }; + const incrementalResult = { + incremental: [{ id: '0', items: ['comment 3', 'comment 4'] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult.data).toEqual({ + user: { + name: 'John', + comments: ['comment 1', 'comment 2', 'comment 3', 'comment 4'], + }, + }); + }); + it('should merge items at path', () => { const executionResult = { data: { @@ -113,6 +155,38 @@ describe('mergeIncrementalResult', () => { }); }); + it('should add errors with new format', () => { + const executionResult = { data: { user: { name: 'John' } }, pending: [{ id: '0', path: [] }] }; + const incrementalResult = { + incremental: [ + { id: '0', errors: [new GraphQLError('error 1'), new GraphQLError('error 2')] }, + ], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + data: { user: { name: 'John' } }, + errors: [new GraphQLError('error 1'), new GraphQLError('error 2')], + pending: [{ id: '0', path: [] }], + }); + }); + + it('should add completion errors with new format', () => { + const executionResult = { data: { user: { name: 'John' } }, pending: [{ id: '0', path: [] }] }; + const incrementalResult = { + completed: [{ id: '0', errors: [new GraphQLError('error 1'), new GraphQLError('error 2')] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + data: { user: { name: 'John' } }, + errors: [new GraphQLError('error 1'), new GraphQLError('error 2')], + pending: [{ id: '0', path: [] }], + }); + }); + it('should keep errors', () => { const executionResult = { errors: [new GraphQLError('error 1')] }; const incrementalResult = { data: { user: { name: 'John' } }, path: [] }; @@ -125,6 +199,24 @@ describe('mergeIncrementalResult', () => { }); }); + it('should keep errors with new format', () => { + const executionResult = { + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }; + const incrementalResult = { + incremental: [{ id: '0', data: { user: { name: 'John' } }, path: [] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + data: { user: { name: 'John' } }, + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }); + }); + it('should merge errors', () => { const executionResult = { errors: [new GraphQLError('error 1')] }; @@ -143,6 +235,52 @@ describe('mergeIncrementalResult', () => { }); }); + it('should merge errors with new format', () => { + const executionResult = { + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }; + + const incrementalResult = { + incremental: [ + { id: '0', errors: [new GraphQLError('error 2'), new GraphQLError('error 3')] }, + ], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + errors: [ + new GraphQLError('error 1'), + new GraphQLError('error 2'), + new GraphQLError('error 3'), + ], + pending: [{ id: '0', path: [] }], + }); + }); + + it('should merge completion errors with new format', () => { + const executionResult = { + errors: [new GraphQLError('error 1')], + pending: [{ id: '0', path: [] }], + }; + + const incrementalResult = { + completed: [{ id: '0', errors: [new GraphQLError('error 2'), new GraphQLError('error 3')] }], + }; + + mergeIncrementalResult({ incrementalResult, executionResult }); + + expect(executionResult).toEqual({ + errors: [ + new GraphQLError('error 1'), + new GraphQLError('error 2'), + new GraphQLError('error 3'), + ], + pending: [{ id: '0', path: [] }], + }); + }); + it('should keep extensions', () => { const exeuctionResult = { data: { user: { name: 'John' } }, extensions: { foo: 'bar' } }; const incrementalResult = { data: { user: { age: 42 } }, path: [] }; diff --git a/yarn.lock b/yarn.lock index c960ac526e5..cd5c26bcbf4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2660,6 +2660,11 @@ dependencies: "@types/ms" "*" +"@types/dlv@^1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@types/dlv/-/dlv-1.1.4.tgz#e92f76b78adf2b118b5a807956f36434baefbab0" + integrity sha512-m8KmImw4Jt+4rIgupwfivrWEOnj1LzkmKkqbh075uG13eTQ1ZxHWT6T0vIdSQhLIjQCiR0n0lZdtyDOPO1x2Mw== + "@types/eslint-scope@^3.7.3": version "3.7.3" resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224"