Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow the removal of multiple optimistics at once #11962

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/new-snails-love.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@apollo/client": patch
---

Add `removeOptimistics` to remove multiple optimistics at once
113 changes: 72 additions & 41 deletions src/cache/inmemory/entityStore.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ export abstract class EntityStore implements NormalizedCache {

public abstract removeLayer(layerId: string): EntityStore;

public abstract removeLayers(layersIds: string[]): EntityStore;

// Although the EntityStore class is abstract, it contains concrete
// implementations of the various NormalizedCache interface methods that
// are inherited by the Root and Layer subclasses.
Expand Down Expand Up @@ -721,6 +723,10 @@ export namespace EntityStore {
return this;
}

public removeLayers(): Root {
return this;
}

public readonly storageTrie = new Trie<StorageType>(canUseWeakMap);
public getStorage(): StorageType {
return this.storageTrie.lookupArray(arguments);
Expand All @@ -745,52 +751,73 @@ class Layer extends EntityStore {
return new Layer(layerId, this, replay, this.group);
}

private dirtyFieds(newParent: EntityStore): void {
if (this.group.caching) {
// Dirty every ID we're removing. Technically we might be able to avoid
// dirtying fields that have values in higher layers, but we don't have
// easy access to higher layers here, and we're about to recreate those
// layers anyway (see parent.addLayer below).
Object.keys(this.data).forEach((dataId) => {
const ownStoreObject = this.data[dataId];
const parentStoreObject = newParent["lookup"](dataId);
if (!parentStoreObject) {
// The StoreObject identified by dataId was defined in this layer
// but will be undefined in the parent layer, so we can delete the
// whole entity using this.delete(dataId). Since we're about to
// throw this layer away, the only goal of this deletion is to dirty
// the removed fields.
this.delete(dataId);
} else if (!ownStoreObject) {
// This layer had an entry for dataId but it was undefined, which
// means the entity was deleted in this layer, and it's about to
// become undeleted when we remove this layer, so we need to dirty
// all fields that are about to be reexposed.
this.group.dirty(dataId, "__exists");
Object.keys(parentStoreObject).forEach((storeFieldName) => {
this.group.dirty(dataId, storeFieldName);
});
} else if (ownStoreObject !== parentStoreObject) {
// If ownStoreObject is not exactly the same as parentStoreObject,
// dirty any fields whose values will change as a result of this
// removal.
Object.keys(ownStoreObject).forEach((storeFieldName) => {
if (
!equal(
ownStoreObject[storeFieldName],
parentStoreObject[storeFieldName]
)
) {
this.group.dirty(dataId, storeFieldName);
}
});
}
});
}
}

public removeLayer(layerId: string): EntityStore {
// Remove all instances of the given id, not just the first one.
const parent = this.parent.removeLayer(layerId);

if (layerId === this.id) {
if (this.group.caching) {
// Dirty every ID we're removing. Technically we might be able to avoid
// dirtying fields that have values in higher layers, but we don't have
// easy access to higher layers here, and we're about to recreate those
// layers anyway (see parent.addLayer below).
Object.keys(this.data).forEach((dataId) => {
const ownStoreObject = this.data[dataId];
const parentStoreObject = parent["lookup"](dataId);
if (!parentStoreObject) {
// The StoreObject identified by dataId was defined in this layer
// but will be undefined in the parent layer, so we can delete the
// whole entity using this.delete(dataId). Since we're about to
// throw this layer away, the only goal of this deletion is to dirty
// the removed fields.
this.delete(dataId);
} else if (!ownStoreObject) {
// This layer had an entry for dataId but it was undefined, which
// means the entity was deleted in this layer, and it's about to
// become undeleted when we remove this layer, so we need to dirty
// all fields that are about to be reexposed.
this.group.dirty(dataId, "__exists");
Object.keys(parentStoreObject).forEach((storeFieldName) => {
this.group.dirty(dataId, storeFieldName);
});
} else if (ownStoreObject !== parentStoreObject) {
// If ownStoreObject is not exactly the same as parentStoreObject,
// dirty any fields whose values will change as a result of this
// removal.
Object.keys(ownStoreObject).forEach((storeFieldName) => {
if (
!equal(
ownStoreObject[storeFieldName],
parentStoreObject[storeFieldName]
)
) {
this.group.dirty(dataId, storeFieldName);
}
});
}
});
}
this.dirtyFieds(parent);

return parent;
}

// No changes are necessary if the parent chain remains identical.
if (parent === this.parent) return this;

// Recreate this layer on top of the new parent.
return parent.addLayer(this.id, this.replay);
}

public removeLayers(layerIds: string[]): EntityStore {
// Remove all instances of the given id, not just the first one.
const parent = this.parent.removeLayers(layerIds);

if (layerIds.includes(this.id)) {
this.dirtyFieds(parent);

return parent;
}
Expand Down Expand Up @@ -849,6 +876,10 @@ class Stump extends Layer {
return this;
}

public removeLayers() {
return this;
}

public merge(older: string | StoreObject, newer: string | StoreObject) {
// We never want to write any data into the Stump, so we forward any merge
// calls to the Root instead. Another option here would be to throw an
Expand Down
8 changes: 8 additions & 0 deletions src/cache/inmemory/inMemoryCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -411,6 +411,14 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> {
}
}

public removeOptimistics(idsToRemove: string[]) {
const newOptimisticData = this.optimisticData.removeLayers(idsToRemove);
if (newOptimisticData !== this.optimisticData) {
this.optimisticData = newOptimisticData;
this.broadcastWatches();
}
}

private txCount = 0;

public batch<TUpdateResult>(
Expand Down
1 change: 1 addition & 0 deletions src/core/QueryManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -291,6 +291,7 @@ export class QueryManager<TStore> {
{
...context,
optimisticResponse: isOptimistic ? optimisticResponse : void 0,
mutationId,
},
variables,
false
Expand Down