From 8b30aa2f3d046a8e8166e928ffdcc09534b74978 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rge=20N=C3=A6ss?= Date: Thu, 22 Dec 2022 15:19:50 +0100 Subject: [PATCH] fix(base): wait for all chunked requests to arrive before continuing Our previous approach of chunking document availability requests did not wait for all the chunks to finish before returning the results. The `debounceCollect` utility expects an observable that returns all results in one go, but because we returned a stream that would emit the chunks one-by-one as they arrived, we would only care about the first emission, which would not include the whole expected result set. This patch fixes the issue by reducing over each emission, successively add them to an array. The `reduce` operator will wait for the stream to finish before emitting a single reduced value. --- packages/@sanity/base/src/preview/availability.ts | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/@sanity/base/src/preview/availability.ts b/packages/@sanity/base/src/preview/availability.ts index 0faff1324fd..b2f5dc7c3ee 100644 --- a/packages/@sanity/base/src/preview/availability.ts +++ b/packages/@sanity/base/src/preview/availability.ts @@ -1,6 +1,6 @@ /* eslint-disable max-nested-callbacks */ import {combineLatest, defer, from, Observable, of} from 'rxjs' -import {distinctUntilChanged, map, mergeMap, switchMap} from 'rxjs/operators' +import {distinctUntilChanged, map, mergeMap, reduce, switchMap} from 'rxjs/operators' import shallowEquals from 'shallow-equals' import {flatten, keyBy} from 'lodash' import {getDraftId, getPublishedId} from '../util/draftUtils' @@ -99,12 +99,23 @@ function chunkDocumentIds(documentIds: string[]): string[][] { return chunks } +/** + * Mutative concat + * @param array + * @param chunks + */ +function mutConcat(array: T[], chunks: T[]) { + array.push(...chunks) + return array +} + const fetchDocumentReadability = debounceCollect(function fetchDocumentReadability( args: string[][] ): Observable { const uniqueIds = [...new Set(flatten(args))] return from(chunkDocumentIds(uniqueIds)).pipe( mergeMap(fetchDocumentReadabilityChunked, 10), + reduce(mutConcat, []), map((res) => args.map(([id]) => res[uniqueIds.indexOf(id)])) ) },