diff --git a/.changeset/real-donuts-count.md b/.changeset/real-donuts-count.md new file mode 100644 index 00000000..685d4d48 --- /dev/null +++ b/.changeset/real-donuts-count.md @@ -0,0 +1,7 @@ +--- +"@supabase-cache-helpers/postgrest-react-query": minor +"@supabase-cache-helpers/postgrest-core": minor +"@supabase-cache-helpers/postgrest-swr": minor +--- + +feat: support for wildcard diff --git a/docs/pages/postgrest/queries.mdx b/docs/pages/postgrest/queries.mdx index 3adc32c0..7abe4ef5 100644 --- a/docs/pages/postgrest/queries.mdx +++ b/docs/pages/postgrest/queries.mdx @@ -2,14 +2,6 @@ import { Callout, Tabs, Tab } from "nextra-theme-docs"; # Queries - - Unfortunately, we require you to be explicit in the select statement fields, - and not use a wildcard ('*'), since mutations need to know the columns to - derive whether the query cache should be updated with new values. Note that - the wildcard selector is not allowed on any level, so you can't use it for - relations either. - - The cache helpers query hooks wrap the data fetching hooks of the cache libraries and pass both the cache key and the fetcher function from on the PostgREST query. This is enabled primarily by a parser that turns any Supabase PostgREST query into a definite cache key. For example, ```ts @@ -32,6 +24,13 @@ is parsed into + + Although you can use wildcards (`*`) in your query, their usage is only + recommended for `head: true` and `count: true` queries. For any other query, + you should be explicit about the columns you want to select. Only then is + cache helpers able to do granular cache updates without refetching. + + ## `useQuery` Wrapper around the default data fetching hook that returns the query including the count without any modification of the data. The config parameter of the respective library can be passed as the second argument. diff --git a/packages/postgrest-core/package.json b/packages/postgrest-core/package.json index 1f98b653..88be9641 100644 --- a/packages/postgrest-core/package.json +++ b/packages/postgrest-core/package.json @@ -13,17 +13,11 @@ "./package.json": "./package.json" }, "types": "./dist/index.d.ts", - "files": [ - "dist/**" - ], + "files": ["dist/**"], "publishConfig": { "access": "public" }, - "keywords": [ - "Supabase", - "PostgREST", - "Cache" - ], + "keywords": ["Supabase", "PostgREST", "Cache"], "repository": { "type": "git", "url": "git+https://github.com/psteinroe/supabase-cache-helpers.git", diff --git a/packages/postgrest-core/src/delete-item.ts b/packages/postgrest-core/src/delete-item.ts index 5a217933..58d17d8f 100644 --- a/packages/postgrest-core/src/delete-item.ts +++ b/packages/postgrest-core/src/delete-item.ts @@ -78,57 +78,61 @@ export const deleteItem = async >( const filter = getPostgrestFilter(key.queryKey); // parse input into expected target format if (key.schema === schema && key.table === table) { - const transformedInput = filter.denormalize(op.input); - if ( - // For delete, the input has to have a value for all primary keys - op.primaryKeys.every( - (pk) => typeof transformedInput[pk as string] !== 'undefined', - ) - ) { - const limit = key.limit ?? 1000; - mutations.push( - mutate(k, (currentData) => { - // Return early if undefined or null - if (!currentData) return currentData; + if (key.isHead === true) { + mutations.push(revalidate(k)); + } else { + const transformedInput = filter.denormalize(op.input); + if ( + // For delete, the input has to have a value for all primary keys + op.primaryKeys.every( + (pk) => typeof transformedInput[pk as string] !== 'undefined', + ) + ) { + const limit = key.limit ?? 1000; + mutations.push( + mutate(k, (currentData) => { + // Return early if undefined or null + if (!currentData) return currentData; - if (isPostgrestHasMorePaginationCacheData(currentData)) { - return toHasMorePaginationCacheData( - filterByPks( - transformedInput, - currentData.flatMap((p) => p.data), - op.primaryKeys, - ), - currentData, - limit, - ); - } else if (isPostgrestPaginationCacheData(currentData)) { - return toPaginationCacheData( - filterByPks( + if (isPostgrestHasMorePaginationCacheData(currentData)) { + return toHasMorePaginationCacheData( + filterByPks( + transformedInput, + currentData.flatMap((p) => p.data), + op.primaryKeys, + ), + currentData, + limit, + ); + } else if (isPostgrestPaginationCacheData(currentData)) { + return toPaginationCacheData( + filterByPks( + transformedInput, + currentData.flat(), + op.primaryKeys, + ), + limit, + ); + } else if (isAnyPostgrestResponse(currentData)) { + const { data } = currentData; + if (!Array.isArray(data)) { + return { data: null }; + } + + const newData = filterByPks( transformedInput, - currentData.flat(), + data, op.primaryKeys, - ), - limit, - ); - } else if (isAnyPostgrestResponse(currentData)) { - const { data } = currentData; - if (!Array.isArray(data)) { - return { data: null }; - } - - const newData = filterByPks( - transformedInput, - data, - op.primaryKeys, - ); + ); - return { - data: newData, - count: newData.length, - }; - } - }), - ); + return { + data: newData, + count: newData.length, + }; + } + }), + ); + } } } diff --git a/packages/postgrest-core/src/fetch/build-mutation-fetcher-response.ts b/packages/postgrest-core/src/fetch/build-mutation-fetcher-response.ts index 21527fda..412c29f6 100644 --- a/packages/postgrest-core/src/fetch/build-mutation-fetcher-response.ts +++ b/packages/postgrest-core/src/fetch/build-mutation-fetcher-response.ts @@ -44,17 +44,51 @@ export const normalizeResponse = ( groups: (Path | NestedPath)[], obj: R, ): R => { + if (groups.some((p) => p.path === '*')) { + // if wildcard, add every non nested value + // for every nested value, check if groups contains a nested path for it. if not, also add it. + // reason is that the wildcard does not select relations + + Object.entries(obj as Record).forEach(([k, v]) => { + if (typeof v === 'object' || Array.isArray(v)) { + if (!groups.some((g) => isNestedPath(g) && g.path === k)) { + groups.push({ + path: k, + declaration: k, + }); + } + } else if (!groups.some((g) => g.path === k)) { + groups.push({ + path: k, + declaration: k, + }); + } + }); + } + return groups.reduce((prev, curr) => { // prefer alias over path because of dedupe alias const value = get(obj, curr.alias || curr.path); if (typeof value === 'undefined') return prev; - if (value === null || !isNestedPath(curr)) { + if (value === null) { return { ...prev, [curr.path]: value, }; } + if (!isNestedPath(curr)) { + return { + ...prev, + ...flatten({ + [curr.path]: + value !== null && + (typeof value === 'object' || Array.isArray(value)) + ? flatten(value) + : value, + }), + }; + } if (Array.isArray(value)) { return { ...prev, @@ -87,7 +121,52 @@ const buildUserQueryData = ( pathGroups: (Path | NestedPath)[], obj: R, ): R => { + if (pathGroups.some((p) => p.path === '*')) { + // if wildcard, add every non nested value + // for every nested value, check if pathGroups contains a nested path for it. if not, also add it. + // reason is that the wildcard does not select relations + + Object.entries(obj as Record).forEach(([k, v]) => { + if (typeof v === 'object' || Array.isArray(v)) { + if (!pathGroups.some((g) => isNestedPath(g) && g.path === k)) { + pathGroups.push({ + path: k, + declaration: k, + }); + } + } else if (!pathGroups.some((g) => g.path === k)) { + pathGroups.push({ + path: k, + declaration: k, + }); + } + }); + } + + if (userQueryGroups.some((p) => p.path === '*')) { + // if wildcard, add every non nested value + // for every nested value, check if pathGroups contains a nested path for it. if not, also add it. + // reason is that the wildcard does not select relations + + Object.entries(obj as Record).forEach(([k, v]) => { + if (typeof v === 'object' || Array.isArray(v)) { + if (!pathGroups.some((g) => isNestedPath(g) && g.path === k)) { + userQueryGroups.push({ + path: k, + declaration: k, + }); + } + } else if (!userQueryGroups.some((g) => g.path === k)) { + userQueryGroups.push({ + path: k, + declaration: k, + }); + } + }); + } + return userQueryGroups.reduce((prev, curr) => { + if (curr.path === '*') return prev; // paths is reflecting the obj const inputPath = pathGroups.find( (p) => p.path === curr.path && isNestedPath(p) === isNestedPath(curr), diff --git a/packages/postgrest-core/src/filter/denormalize.ts b/packages/postgrest-core/src/filter/denormalize.ts index 4220c2b4..d0702f90 100644 --- a/packages/postgrest-core/src/filter/denormalize.ts +++ b/packages/postgrest-core/src/filter/denormalize.ts @@ -8,7 +8,7 @@ import type { Path } from '../lib/query-types'; /** * Denormalize a normalized response object using the paths of the target query - * **/ + **/ export const denormalize = >( // the paths into which we need to transform paths: Path[], @@ -17,7 +17,33 @@ export const denormalize = >( ): R => { const groups = groupPathsRecursive(paths); + if (groups.some((g) => g.path === '*')) { + // if a wildcard path is present, we expand the groups with all values from the object that are not part of a nested path from `paths`. + // This will include also unwanted values, e.g. from a join on another relation because its impossible for us to distinguish between json columns and joins. + Object.keys(obj).forEach((k) => { + const keyParts = k.split('.'); + if ( + keyParts.length > 1 && + groups.some((g) => isNestedPath(g) && g.path === keyParts[0]) + ) { + // skip if key is actually part of a nested path from the groups + return; + } + if (groups.some((g) => g.path === keyParts[0])) { + // skip if key is already part of the groups + return; + } + + groups.push({ + declaration: keyParts[0], + path: keyParts[0], + }); + }); + } + return groups.reduce((prev, curr) => { + // skip the wildcard since we already handled it above + if (curr.path === '*') return prev; let value = obj[curr.path]; if (!isNestedPath(curr)) { diff --git a/packages/postgrest-core/src/lib/parse-select-param.ts b/packages/postgrest-core/src/lib/parse-select-param.ts index ab7bb571..606ca334 100644 --- a/packages/postgrest-core/src/lib/parse-select-param.ts +++ b/packages/postgrest-core/src/lib/parse-select-param.ts @@ -48,7 +48,8 @@ export const parseSelectParam = (s: string, currentPath?: Path): Path[] => { .map(([table, selectedColumns]) => `${table}(${selectedColumns})` .replace(/\(/g, '\\(') - .replace(/\)/g, '\\)'), + .replace(/\)/g, '\\)') + .replace(/\*/g, '\\*'), ) .join('|')}`, 'g', @@ -75,9 +76,6 @@ export const parseSelectParam = (s: string, currentPath?: Path): Path[] => { }; }); - if (columns.find((c) => c.path.includes('*'))) - throw new Error('Wildcard selector is not supported'); - return [ ...columns, ...Object.entries(foreignTables).flatMap( diff --git a/packages/postgrest-core/src/mutate-item.ts b/packages/postgrest-core/src/mutate-item.ts index c32268b5..1436977e 100644 --- a/packages/postgrest-core/src/mutate-item.ts +++ b/packages/postgrest-core/src/mutate-item.ts @@ -85,6 +85,7 @@ export type MutateItemCache> = { | 'hasFiltersOnPaths' | 'applyFiltersOnPaths' | 'apply' + | 'hasWildcardPath' >; /** * Decode a key. Should return null if not a PostgREST key. @@ -121,9 +122,9 @@ export const mutateItem = async >( // Exit early if not a postgrest key if (!key) continue; const filter = getPostgrestFilter(key.queryKey); - // parse input into expected target format - const transformedInput = filter.denormalize(op.input); if (key.schema === schema && key.table === table) { + // parse input into expected target format + const transformedInput = filter.denormalize(op.input); if ( // For mutate, the input has to have a value for all primary keys op.primaryKeys.every( @@ -140,72 +141,78 @@ export const mutateItem = async >( const orderBy = key.orderByKey ? parseOrderByKey(key.orderByKey) : undefined; - mutations.push( - mutate(k, (currentData) => { - // Return early if undefined or null - if (!currentData) return currentData; - - if (isPostgrestHasMorePaginationCacheData(currentData)) { - return toHasMorePaginationCacheData( - mutateOperation( - transformedInput, - mutateInput, - currentData.flatMap((p) => p.data), - primaryKeys, - filter, - orderBy, - ), - currentData, - limit, - ); - } else if (isPostgrestPaginationCacheData(currentData)) { - return toPaginationCacheData( - mutateOperation( + if (key.isHead === true || filter.hasWildcardPath) { + // we cannot know whether the new item after mutating still has all paths required for a query if it contains a wildcard, + // because we do not know what columns a table has. we must always revalidate then. + mutations.push(revalidate(k)); + } else { + mutations.push( + mutate(k, (currentData) => { + // Return early if undefined or null + if (!currentData) return currentData; + + if (isPostgrestHasMorePaginationCacheData(currentData)) { + return toHasMorePaginationCacheData( + mutateOperation( + transformedInput, + mutateInput, + currentData.flatMap((p) => p.data), + primaryKeys, + filter, + orderBy, + ), + currentData, + limit, + ); + } else if (isPostgrestPaginationCacheData(currentData)) { + return toPaginationCacheData( + mutateOperation( + transformedInput, + mutateInput, + currentData.flat(), + primaryKeys, + filter, + orderBy, + ), + limit, + ); + } else if (isAnyPostgrestResponse(currentData)) { + const { data } = currentData; + + if (!Array.isArray(data)) { + if (data === null) { + return { + data, + count: currentData.count, + }; + } + const newData = mutateInput(data); + return { + // Check if the new data is still valid given the key + data: filter.apply(newData) ? newData : null, + count: currentData.count, + }; + } + + const newData = mutateOperation( transformedInput, mutateInput, - currentData.flat(), + // deep copy data to avoid mutating the original + JSON.parse(JSON.stringify(data)), primaryKeys, filter, orderBy, - ), - limit, - ); - } else if (isAnyPostgrestResponse(currentData)) { - const { data } = currentData; - - if (!Array.isArray(data)) { - if (data === null) { - return { - data, - count: currentData.count, - }; - } - const newData = mutateInput(data); + ); + return { - // Check if the new data is still valid given the key - data: filter.apply(newData) ? newData : null, - count: currentData.count, + data: newData, + count: newData.length, }; } - - const newData = mutateOperation( - transformedInput, - mutateInput, - // deep copy data to avoid mutating the original - JSON.parse(JSON.stringify(data)), - primaryKeys, - filter, - orderBy, - ); - - return { - data: newData, - count: newData.length, - }; - } - return currentData; - }), - ); + return currentData; + }), + ); + } } } @@ -219,7 +226,7 @@ export const mutateItem = async >( if ( revalidateRelationsOpt && shouldRevalidateRelation(revalidateRelationsOpt, { - input: transformedInput, + input: op.input, getPostgrestFilter, decodedKey: key, }) diff --git a/packages/postgrest-core/src/postgrest-filter.ts b/packages/postgrest-core/src/postgrest-filter.ts index af927956..8fec3e4c 100644 --- a/packages/postgrest-core/src/postgrest-filter.ts +++ b/packages/postgrest-core/src/postgrest-filter.ts @@ -25,6 +25,7 @@ export class PostgrestFilter> { private _selectFn: FilterFn | undefined; private _filtersFn: FilterFn | undefined; private _filterPaths: Path[]; + public hasWildcardPath: boolean | undefined; constructor( public readonly params: { filters: FilterDefinitions; paths: Path[] }, @@ -33,6 +34,9 @@ export class PostgrestFilter> { this.params.filters, this.params.paths, ); + this.hasWildcardPath = this.params.paths.some((p) => + p.declaration.endsWith('*'), + ); } public static fromQuery(query: string, opts?: PostgrestQueryParserOptions) { diff --git a/packages/postgrest-core/src/upsert-item.ts b/packages/postgrest-core/src/upsert-item.ts index 7962b89a..54f2a6e3 100644 --- a/packages/postgrest-core/src/upsert-item.ts +++ b/packages/postgrest-core/src/upsert-item.ts @@ -92,6 +92,7 @@ export type UpsertItemCache> = { | 'hasFiltersOnPaths' | 'applyFiltersOnPaths' | 'apply' + | 'hasWildcardPath' >; /** * Decode a key. Should return null if not a PostgREST key. @@ -126,8 +127,8 @@ export const upsertItem = async >( // Exit early if not a postgrest key if (!key) continue; const filter = getPostgrestFilter(key.queryKey); - // parse input into expected target format if (key.schema === schema && key.table === table) { + // parse input into expected target format const transformedInput = filter.denormalize(op.input); if ( filter.applyFilters(transformedInput) || @@ -141,72 +142,79 @@ export const upsertItem = async >( const orderBy = key.orderByKey ? parseOrderByKey(key.orderByKey) : undefined; - mutations.push( - mutate(k, (currentData) => { - // Return early if undefined or null - if (!currentData) return currentData; - - if (isPostgrestHasMorePaginationCacheData(currentData)) { - return toHasMorePaginationCacheData( - upsert( - transformedInput, - currentData.flatMap((p) => p.data), - primaryKeys, - filter, - merge, - orderBy, - ), - currentData, - limit, - ); - } else if (isPostgrestPaginationCacheData(currentData)) { - return toPaginationCacheData( - upsert( + + if (key.isHead === true || filter.hasWildcardPath) { + // we cannot know whether the new item after merging still has all paths required for a query if it contains a wildcard, + // because we do not know what columns a table has. we must always revalidate then. + mutations.push(revalidate(k)); + } else { + mutations.push( + mutate(k, (currentData) => { + // Return early if undefined or null + if (!currentData) return currentData; + + if (isPostgrestHasMorePaginationCacheData(currentData)) { + return toHasMorePaginationCacheData( + upsert( + transformedInput, + currentData.flatMap((p) => p.data), + primaryKeys, + filter, + merge, + orderBy, + ), + currentData, + limit, + ); + } else if (isPostgrestPaginationCacheData(currentData)) { + return toPaginationCacheData( + upsert( + transformedInput, + currentData.flat(), + primaryKeys, + filter, + merge, + orderBy, + ), + limit, + ); + } else if (isAnyPostgrestResponse(currentData)) { + const { data } = currentData; + + if (!Array.isArray(data)) { + if (data === null) { + return { + data, + count: currentData.count, + }; + } + const newData = merge(data, transformedInput); + return { + // Check if the new data is still valid given the key + data: filter.apply(newData) ? newData : null, + count: currentData.count, + }; + } + + const newData = upsert( transformedInput, - currentData.flat(), + // deep copy data to avoid mutating the original + JSON.parse(JSON.stringify(data)), primaryKeys, filter, merge, orderBy, - ), - limit, - ); - } else if (isAnyPostgrestResponse(currentData)) { - const { data } = currentData; - - if (!Array.isArray(data)) { - if (data === null) { - return { - data, - count: currentData.count, - }; - } - const newData = merge(data, transformedInput); + ); + return { - // Check if the new data is still valid given the key - data: filter.apply(newData) ? newData : null, - count: currentData.count, + data: newData, + count: newData.length, }; } - - const newData = upsert( - transformedInput, - // deep copy data to avoid mutating the original - JSON.parse(JSON.stringify(data)), - primaryKeys, - filter, - merge, - orderBy, - ); - - return { - data: newData, - count: newData.length, - }; - } - return currentData; - }), - ); + return currentData; + }), + ); + } } } diff --git a/packages/postgrest-core/tests/delete-item.spec.ts b/packages/postgrest-core/tests/delete-item.spec.ts index 530acca0..a3124749 100644 --- a/packages/postgrest-core/tests/delete-item.spec.ts +++ b/packages/postgrest-core/tests/delete-item.spec.ts @@ -220,6 +220,15 @@ describe('deleteItem', () => { expect(revalidate).toHaveBeenCalledTimes(0); }); + it('should revalidate isHead query', async () => { + const { mutate, revalidate } = await mutateFnMock( + { id_1: '0', id_2: '0', value: 'test' }, + { isHead: true }, + ); + expect(mutate).toHaveBeenCalledTimes(0); + expect(revalidate).toHaveBeenCalledTimes(1); + }); + it('should delete item from paged cache data', async () => { expect( await mutateFnResult( diff --git a/packages/postgrest-core/tests/fetch/build-mutation-fetcher-response.spec.ts b/packages/postgrest-core/tests/fetch/build-mutation-fetcher-response.spec.ts index f00eb65f..da5fd40e 100644 --- a/packages/postgrest-core/tests/fetch/build-mutation-fetcher-response.spec.ts +++ b/packages/postgrest-core/tests/fetch/build-mutation-fetcher-response.spec.ts @@ -8,6 +8,52 @@ import { PostgrestParser } from '../../src/postgrest-parser'; const c = createClient('https://localhost', 'any'); describe('buildMutationFetcherResponse', () => { + it('should work with json columns', () => { + const q = c + .from('campaign') + .select('jsoncol,jsonarraycol,jsonarrayobjcol') + .eq('id', 'some-id'); + + const query = buildNormalizedQuery({ + query: 'jsoncol,jsonarraycol,jsonarrayobjcol', + queriesForTable: () => [new PostgrestParser(q)], + }); + + expect(query).toBeTruthy(); + + expect( + buildMutationFetcherResponse( + { + id: 'some-id', + jsoncol: { + test: '123', + }, + jsonarraycol: ['123'], + jsonarrayobjcol: [{ some: 'value' }, { some: 'other' }], + }, + { + groupedUserQueryPaths: query!.groupedUserQueryPaths, + groupedPaths: query!.groupedPaths, + }, + ), + ).toEqual({ + normalizedData: { + id: 'some-id', + 'jsoncol.test': '123', + 'jsonarraycol.0': '123', + 'jsonarrayobjcol.0.some': 'value', + 'jsonarrayobjcol.1.some': 'other', + }, + userQueryData: { + jsoncol: { + test: '123', + }, + jsonarraycol: ['123'], + jsonarrayobjcol: [{ some: 'value' }, { some: 'other' }], + }, + }); + }); + it('should work with dedupe alias on the same relation', () => { const q = c .from('campaign') @@ -48,6 +94,62 @@ describe('buildMutationFetcherResponse', () => { }); }); + it('should work with wildcard', () => { + const q = c + .from('contact') + .select('some,value,ishouldbetheretoo,*,note_id(id,test,*)') + .eq('test', 'value'); + + const query = buildNormalizedQuery({ + query: '*', + queriesForTable: () => [new PostgrestParser(q)], + }); + + expect(query).toBeTruthy(); + + expect( + buildMutationFetcherResponse( + { + some: '456', + value: '789', + ishouldbethere: '123', + ishouldbetheretoo: { some: 'object' }, + ishouldbetheretootoo: ['one'], + ishouldbetheretootootoo: [{ one: 'two' }], + note_id: { + id: 'id', + test: '123', + ishouldalsobethere: 'id', + }, + }, + { + groupedUserQueryPaths: query!.groupedUserQueryPaths, + groupedPaths: query!.groupedPaths, + }, + ), + ).toEqual({ + normalizedData: { + some: '456', + value: '789', + ishouldbethere: '123', + 'ishouldbetheretoo.some': 'object', + 'ishouldbetheretootoo.0': 'one', + 'ishouldbetheretootootoo.0.one': 'two', + 'note_id.id': 'id', + 'note_id.test': '123', + 'note_id.ishouldalsobethere': 'id', + }, + userQueryData: { + some: '456', + value: '789', + ishouldbethere: '123', + ishouldbetheretoo: { some: 'object' }, + ishouldbetheretootoo: ['one'], + ishouldbetheretootootoo: [{ one: 'two' }], + }, + }); + }); + it('should work with dedupe alias and user-defined alias', () => { const q = c.from('contact').select('some,value').eq('test', 'value'); diff --git a/packages/postgrest-core/tests/fetch/build-normalized-query.spec.ts b/packages/postgrest-core/tests/fetch/build-normalized-query.spec.ts index 616c76d8..806f81c6 100644 --- a/packages/postgrest-core/tests/fetch/build-normalized-query.spec.ts +++ b/packages/postgrest-core/tests/fetch/build-normalized-query.spec.ts @@ -148,6 +148,17 @@ describe('buildNormalizedQuery', () => { ); }); + it('should work with wildcard', () => { + const q = c.from('contact').select('some,value').eq('test', 'value'); + + expect( + buildNormalizedQuery({ + query: 'something,the,user,queries,*', + queriesForTable: () => [new PostgrestParser(q)], + })?.selectQuery, + ).toEqual('something,the,user,queries,*,test,some,value'); + }); + it('should add deduplication alias to nested alias', () => { const q = c.from('contact').select('some,value').eq('test', 'value'); diff --git a/packages/postgrest-core/tests/filter/denormalize.spec.ts b/packages/postgrest-core/tests/filter/denormalize.spec.ts index aaa3e927..66c990f5 100644 --- a/packages/postgrest-core/tests/filter/denormalize.spec.ts +++ b/packages/postgrest-core/tests/filter/denormalize.spec.ts @@ -3,6 +3,178 @@ import { denormalize } from '../../src/filter/denormalize'; import { parseSelectParam } from '../../src/lib/parse-select-param'; describe('denormalize', () => { + it('should work with wildcard', () => { + const paths = parseSelectParam( + 'note_id(test,relation_id,rel:relation_id(test,*))', + ); + + expect( + denormalize(paths, { + test: '123', + some: '456', + value: '789', + 'note_id.test': '123', + 'note_id.relation_id': 'id', + 'note_id.relation_id.test': '345', + 'note_id.relation_id.some': '678', + 'note_id.relation_id.other': '910', + 'note_id.relation_id.columns': '111', + }), + ).toEqual({ + note_id: { + test: '123', + relation_id: 'id', + rel: { + test: '345', + some: '678', + other: '910', + columns: '111', + }, + }, + }); + }); + + it('should work with wildcard on first level', () => { + const paths = parseSelectParam( + 'some,*,note_id(test,relation_id,rel:relation_id(test))', + ); + + expect( + denormalize(paths, { + ishouldbethere: '123', + some: '456', + value: '789', + 'note_id.test': '123', + 'note_id.relation_id': 'id', + 'note_id.relation_id.test': '345', + 'note_id.relation_id.some': '678', + 'note_id.relation_id.other': '910', + 'note_id.relation_id.columns': '111', + }), + ).toEqual({ + ishouldbethere: '123', + some: '456', + value: '789', + note_id: { + test: '123', + relation_id: 'id', + rel: { + test: '345', + }, + }, + }); + }); + + it('should work with json array of objects and wildcard', () => { + expect( + denormalize( + [ + { + declaration: 'id', + path: 'id', + }, + { + declaration: '*', + path: '*', + }, + { + declaration: 'template:template_id.id', + alias: 'template.id', + path: 'template_id.id', + }, + { + declaration: 'template:template_id.buttons', + alias: 'template.buttons', + path: 'template_id.buttons', + }, + ], + { + id: '741c29ab-e03d-4b97-8d51-954579effa10', + value: '123', + ishouldbethere: '456', + 'inbox.id': '123', + 'template_id.id': 'da6d45ca-6644-437a-8a58-0da73ecda566', + 'template_id.buttons.0.url': 'https://hellomateo.de', + 'template_id.buttons.0.text': 'Visit us', + 'template_id.buttons.0.type': 'call_to_action', + 'template_id.buttons.0.subtype': 'url', + 'template_id.buttons.1.text': 'Call us', + 'template_id.buttons.1.type': 'call_to_action', + 'template_id.buttons.1.subtype': 'phone_number', + 'template_id.buttons.1.phone_number': '+420123456789', + }, + ), + ).toEqual({ + id: '741c29ab-e03d-4b97-8d51-954579effa10', + value: '123', + ishouldbethere: '456', + inbox: { id: '123' }, + template: { + id: 'da6d45ca-6644-437a-8a58-0da73ecda566', + buttons: [ + { + url: 'https://hellomateo.de', + text: 'Visit us', + type: 'call_to_action', + subtype: 'url', + }, + { + text: 'Call us', + type: 'call_to_action', + subtype: 'phone_number', + phone_number: '+420123456789', + }, + ], + }, + }); + }); + + it('should work with json column and wildcard', () => { + expect( + denormalize( + [ + { + declaration: 'id', + path: 'id', + }, + { + declaration: 'segments:segment.id', + alias: 'segments.id', + path: 'segment.id', + }, + { + declaration: 'segments:segment.*', + alias: 'segments.*', + path: 'segment.*', + }, + ], + { + id: '1c09d6c3-9a77-4e49-8193-8bb7430dd3f0', + 'segment.0.id': '85bc6e0a-2b24-4b2b-9563-9d8e59a13c31', + 'segment.0.name': 'Test Segment', + 'segment.0.counts.contacts_total': 4, + 'segment.0.counts.contacts_with_marketing_opt_in': 0, + 'segment.0.counts.contacts_with_transactional_opt_in': 1, + 'segment.0.channel_type': 'whatsapp', + }, + ), + ).toEqual({ + id: '1c09d6c3-9a77-4e49-8193-8bb7430dd3f0', + segments: [ + { + id: '85bc6e0a-2b24-4b2b-9563-9d8e59a13c31', + name: 'Test Segment', + channel_type: 'whatsapp', + counts: { + contacts_total: 4, + contacts_with_marketing_opt_in: 0, + contacts_with_transactional_opt_in: 1, + }, + }, + ], + }); + }); + it('should work with nested alias', () => { const paths = parseSelectParam( 'note_id(test,relation_id,rel:relation_id(test))', diff --git a/packages/postgrest-core/tests/mutate-item.spec.ts b/packages/postgrest-core/tests/mutate-item.spec.ts index edc4b340..8aa068a1 100644 --- a/packages/postgrest-core/tests/mutate-item.spec.ts +++ b/packages/postgrest-core/tests/mutate-item.spec.ts @@ -52,6 +52,11 @@ const mutateFnMock = async ( }, getPostgrestFilter() { return { + get hasWildcardPath(): boolean { + return typeof postgrestFilter.hasWildcardPath === 'boolean' + ? postgrestFilter.hasWildcardPath + : false; + }, denormalize(obj: ItemType): ItemType { return obj; }, @@ -132,6 +137,9 @@ const mutateRelationMock = async ( }, getPostgrestFilter() { return { + get hasWildcardPath(): boolean { + return false; + }, denormalize(obj: RelationType): RelationType { return obj; }, @@ -197,6 +205,11 @@ const mutateFnResult = async ( }, getPostgrestFilter() { return { + get hasWildcardPath(): boolean { + return typeof postgrestFilter.hasWildcardPath === 'boolean' + ? postgrestFilter.hasWildcardPath + : false; + }, denormalize(obj: ItemType): ItemType { return obj; }, @@ -316,6 +329,41 @@ describe('mutateItem', () => { expect(mutate).toHaveBeenCalledTimes(0); }); + it('should revalidate wildcard query', async () => { + const { mutate, revalidate } = await mutateFnMock( + { id_1: '0', id_2: '0' }, + (c) => c, + {}, + { + apply: false, + applyFilters: false, + hasPaths: false, + hasWildcardPath: true, + hasFiltersOnPaths: true, + applyFiltersOnPaths: true, + }, + ); + expect(revalidate).toHaveBeenCalledTimes(1); + expect(mutate).toHaveBeenCalledTimes(0); + }); + + it('should revalidate isHead query', async () => { + const { mutate, revalidate } = await mutateFnMock( + { id_1: '0', id_2: '0' }, + (c) => c, + { isHead: true }, + { + apply: false, + applyFilters: false, + hasPaths: false, + hasFiltersOnPaths: false, + applyFiltersOnPaths: false, + }, + ); + expect(mutate).toHaveBeenCalledTimes(0); + expect(revalidate).toHaveBeenCalledTimes(1); + }); + it('should apply mutation if key does have filters on pks, and input does match pk filters', async () => { const { mutate } = await mutateFnMock( { id_1: '0', id_2: '0' }, diff --git a/packages/postgrest-core/tests/postgrest-filter.spec.ts b/packages/postgrest-core/tests/postgrest-filter.spec.ts index beededff..52b549ad 100644 --- a/packages/postgrest-core/tests/postgrest-filter.spec.ts +++ b/packages/postgrest-core/tests/postgrest-filter.spec.ts @@ -42,6 +42,19 @@ describe('PostgrestFilter', () => { ).toEqual(true); }); + it('should set has wildcard paths', () => { + expect( + PostgrestFilter.fromQuery( + new PostgrestParser( + createClient('https://localhost', 'test') + .from('contact') + .select('some,*') + .eq('username', 'test'), + ).queryKey, + ).hasWildcardPath, + ).toEqual(true); + }); + describe('.transform', () => { it('should transform nested one-to-many relations', () => { expect( diff --git a/packages/postgrest-core/tests/postgrest-parser.spec.ts b/packages/postgrest-core/tests/postgrest-parser.spec.ts index 001c38a6..56c0c41e 100644 --- a/packages/postgrest-core/tests/postgrest-parser.spec.ts +++ b/packages/postgrest-core/tests/postgrest-parser.spec.ts @@ -291,7 +291,7 @@ describe('PostgrestParser', () => { ]); }); - it('should throw if wildcard is used', () => { + it('should not throw if wildcard is used', () => { expect.assertions(1); const query = c.from('test').select(` name, @@ -300,20 +300,41 @@ describe('PostgrestParser', () => { ), countries ( capital, - population + population, some_ref ( - * + id,* ) - ), - test (prop) - ,prop2,prop3 + ) `); - try { - new PostgrestParser(query).paths; - } catch (e) { - expect(e).toEqual(Error('Wildcard selector is not supported')); - } + expect(new PostgrestParser(query).paths).toEqual([ + { alias: undefined, path: 'name', declaration: 'name' }, + { + alias: undefined, + path: 'cities.name', + declaration: 'cities.name', + }, + { + alias: undefined, + path: 'countries.capital', + declaration: 'countries.capital', + }, + { + alias: undefined, + path: 'countries.population', + declaration: 'countries.population', + }, + { + alias: undefined, + path: 'countries.some_ref.id', + declaration: 'countries.some_ref.id', + }, + { + alias: undefined, + path: 'countries.some_ref.*', + declaration: 'countries.some_ref.*', + }, + ]); }); it('should work for mapped names', () => { diff --git a/packages/postgrest-core/tests/upsert-item.spec.ts b/packages/postgrest-core/tests/upsert-item.spec.ts index dc611a62..1f1f93bf 100644 --- a/packages/postgrest-core/tests/upsert-item.spec.ts +++ b/packages/postgrest-core/tests/upsert-item.spec.ts @@ -50,6 +50,11 @@ const mutateFnMock = async ( }, getPostgrestFilter() { return { + get hasWildcardPath(): boolean { + return typeof postgrestFilter.hasWildcardPath === 'boolean' + ? postgrestFilter.hasWildcardPath + : false; + }, denormalize(obj: ItemType): ItemType { return obj; }, @@ -129,6 +134,9 @@ const mutateRelationMock = async ( }, getPostgrestFilter() { return { + get hasWildcardPath(): boolean { + return false; + }, denormalize(obj: RelationType): RelationType { return obj; }, @@ -194,6 +202,11 @@ const mutateFnResult = async ( }, getPostgrestFilter() { return { + get hasWildcardPath(): boolean { + return typeof postgrestFilter.hasWildcardPath === 'boolean' + ? postgrestFilter.hasWildcardPath + : false; + }, denormalize(obj: ItemType): ItemType { return obj; }, @@ -311,6 +324,39 @@ describe('upsertItem', () => { expect(mutate).toHaveBeenCalledTimes(1); }); + it('should revalidate wildcard query', async () => { + const { mutate, revalidate } = await mutateFnMock( + { id_1: '0', id_2: '0', value: 'test' }, + {}, + { + hasWildcardPath: true, + apply: false, + applyFilters: false, + hasPaths: false, + hasFiltersOnPaths: true, + applyFiltersOnPaths: true, + }, + ); + expect(mutate).toHaveBeenCalledTimes(0); + expect(revalidate).toHaveBeenCalledTimes(1); + }); + + it('should revalidate isHead query', async () => { + const { mutate, revalidate } = await mutateFnMock( + { id_1: '0', id_2: '0', value: 'test' }, + { isHead: true }, + { + apply: false, + applyFilters: false, + hasPaths: false, + hasFiltersOnPaths: false, + applyFiltersOnPaths: false, + }, + ); + expect(mutate).toHaveBeenCalledTimes(0); + expect(revalidate).toHaveBeenCalledTimes(1); + }); + it('should apply mutation if key does not have filters on pks', async () => { const { mutate } = await mutateFnMock( { id_1: '0', value: 'test' } as ItemType, diff --git a/packages/postgrest-react-query/package.json b/packages/postgrest-react-query/package.json index 3d377ae3..17626004 100644 --- a/packages/postgrest-react-query/package.json +++ b/packages/postgrest-react-query/package.json @@ -10,9 +10,7 @@ "main": "./dist/index.js", "source": "./src/index.ts", "types": "./dist/index.d.ts", - "files": [ - "dist/**" - ], + "files": ["dist/**"], "publishConfig": { "access": "public" }, diff --git a/packages/postgrest-react-query/tests/mutate/use-update-mutation.integration.spec.tsx b/packages/postgrest-react-query/tests/mutate/use-update-mutation.integration.spec.tsx index 12de84b1..9f3220fc 100644 --- a/packages/postgrest-react-query/tests/mutate/use-update-mutation.integration.spec.tsx +++ b/packages/postgrest-react-query/tests/mutate/use-update-mutation.integration.spec.tsx @@ -87,4 +87,118 @@ describe('useUpdateMutation', () => { expect(screen.getByTestId('count').textContent).toEqual('count: 1'); await screen.findByText('success: true', {}, { timeout: 10000 }); }); + + it('should revalidate existing cache item with wildcard', async () => { + const queryClient = new QueryClient(); + const USERNAME_1 = `${testRunPrefix}-wildcard-1`; + const USERNAME_2 = `${testRunPrefix}-wildcard-2`; + function Page() { + const [success, setSuccess] = useState(false); + const { data, count } = useQuery( + client + .from('contact') + .select('*', { count: 'exact' }) + .in('username', [USERNAME_1, USERNAME_2]), + ); + const { mutateAsync: insert } = useInsertMutation( + client.from('contact'), + ['id'], + ); + const { mutateAsync: update } = useUpdateMutation( + client.from('contact'), + ['id'], + null, + { + onSuccess: () => setSuccess(true), + }, + ); + return ( +
+
await insert([{ username: USERNAME_1 }])} + /> +
+ await update({ + id: (data ?? []).find((d) => d.username === USERNAME_1)?.id, + username: USERNAME_2, + }) + } + /> + + { + data?.find((d) => + [USERNAME_1, USERNAME_2].includes(d.username ?? ''), + )?.username + } + + {`count: ${count}`} + {`success: ${success}`} +
+ ); + } + + renderWithConfig(, queryClient); + await screen.findByText('count: 0'); + fireEvent.click(screen.getByTestId('insert')); + await screen.findByText(USERNAME_1); + expect(screen.getByTestId('count').textContent).toEqual('count: 1'); + fireEvent.click(screen.getByTestId('update')); + await screen.findByText(USERNAME_2); + expect(screen.getByTestId('count').textContent).toEqual('count: 1'); + await screen.findByText('success: true'); + }); + + it('should revalidate existing cache item with count and head', async () => { + const queryClient = new QueryClient(); + const USERNAME_1 = `${testRunPrefix}-count-1`; + const USERNAME_2 = `${testRunPrefix}-count-2`; + function Page() { + const [success, setSuccess] = useState(false); + const { data, count } = useQuery( + client + .from('contact') + .select('*', { count: 'exact', head: true }) + .in('username', [USERNAME_1, USERNAME_2]), + ); + const { mutateAsync: insert } = useInsertMutation( + client.from('contact'), + ['id'], + ); + const { mutateAsync: update } = useUpdateMutation( + client.from('contact'), + ['id'], + null, + { + onSuccess: () => setSuccess(true), + }, + ); + return ( +
+
await insert([{ username: USERNAME_1 }])} + /> +
+ await update({ + id: (data ?? []).find((d) => d.username === USERNAME_1)?.id, + username: USERNAME_2, + }) + } + /> + {`count: ${count}`} + {`success: ${success}`} +
+ ); + } + + renderWithConfig(, queryClient); + await screen.findByText('count: 0'); + fireEvent.click(screen.getByTestId('insert')); + await screen.findByText('count: 1'); + }); }); diff --git a/packages/postgrest-swr/package.json b/packages/postgrest-swr/package.json index b59648e5..786b83e6 100644 --- a/packages/postgrest-swr/package.json +++ b/packages/postgrest-swr/package.json @@ -23,9 +23,7 @@ "./package.json": "./package.json" }, "types": "./dist/index.d.ts", - "files": [ - "dist/**" - ], + "files": ["dist/**"], "publishConfig": { "access": "public" }, @@ -36,12 +34,7 @@ "clean": "rm -rf .turbo && rm -rf .nyc_output && rm -rf node_modules && rm -rf dist", "typecheck": "tsc --pretty --noEmit" }, - "keywords": [ - "Supabase", - "PostgREST", - "Cache", - "SWR" - ], + "keywords": ["Supabase", "PostgREST", "Cache", "SWR"], "repository": { "type": "git", "url": "git+https://github.com/psteinroe/supabase-cache-helpers.git", diff --git a/packages/postgrest-swr/tests/mutate/use-update-mutation.integration.spec.tsx b/packages/postgrest-swr/tests/mutate/use-update-mutation.integration.spec.tsx index d3881d7a..525cb1b3 100644 --- a/packages/postgrest-swr/tests/mutate/use-update-mutation.integration.spec.tsx +++ b/packages/postgrest-swr/tests/mutate/use-update-mutation.integration.spec.tsx @@ -164,6 +164,124 @@ describe('useUpdateMutation', () => { await screen.findByText('success: true', {}, { timeout: 10000 }); }); + it('should revalidate existing cache item with wildcard', async () => { + const USERNAME_1 = `${testRunPrefix}-wildcard-1`; + const USERNAME_2 = `${testRunPrefix}-wildcard-2`; + function Page() { + const [success, setSuccess] = useState(false); + const { data, count } = useQuery( + client + .from('contact') + .select('*', { count: 'exact' }) + .in('username', [USERNAME_1, USERNAME_2]), + { + revalidateOnFocus: false, + revalidateOnReconnect: false, + }, + ); + const { trigger: insert } = useInsertMutation(client.from('contact'), [ + 'id', + ]); + const { trigger: update } = useUpdateMutation( + client.from('contact'), + ['id'], + null, + { + onSuccess: () => setSuccess(true), + }, + ); + return ( +
+
await insert([{ username: USERNAME_1 }])} + /> +
+ await update({ + id: (data ?? []).find((d) => d.username === USERNAME_1)?.id, + username: USERNAME_2, + }) + } + /> + + { + data?.find((d) => + [USERNAME_1, USERNAME_2].includes(d.username ?? ''), + )?.username + } + + {`count: ${count}`} + {`success: ${success}`} +
+ ); + } + + renderWithConfig(, { provider: () => provider }); + await screen.findByText('count: 0'); + fireEvent.click(screen.getByTestId('insert')); + await screen.findByText(USERNAME_1, {}, { timeout: 2000 }); + expect(screen.getByTestId('count').textContent).toEqual('count: 1'); + fireEvent.click(screen.getByTestId('update')); + await screen.findByText(USERNAME_2); + expect(screen.getByTestId('count').textContent).toEqual('count: 1'); + await screen.findByText('success: true'); + }); + + it('should revalidate existing cache item with count and head', async () => { + const USERNAME_1 = `${testRunPrefix}-count-1`; + const USERNAME_2 = `${testRunPrefix}-count-2`; + function Page() { + const [success, setSuccess] = useState(false); + const { data, count } = useQuery( + client + .from('contact') + .select('*', { count: 'exact', head: true }) + .in('username', [USERNAME_1, USERNAME_2]), + { + revalidateOnFocus: false, + revalidateOnReconnect: false, + }, + ); + const { trigger: insert } = useInsertMutation(client.from('contact'), [ + 'id', + ]); + const { trigger: update } = useUpdateMutation( + client.from('contact'), + ['id'], + null, + { + onSuccess: () => setSuccess(true), + }, + ); + return ( +
+
await insert([{ username: USERNAME_1 }])} + /> +
+ await update({ + id: (data ?? []).find((d) => d.username === USERNAME_1)?.id, + username: USERNAME_2, + }) + } + /> + {`count: ${count}`} + {`success: ${success}`} +
+ ); + } + + renderWithConfig(, { provider: () => provider }); + await screen.findByText('count: 0'); + fireEvent.click(screen.getByTestId('insert')); + await screen.findByText('count: 1'); + }); + it('revalidate relations should work', async () => { const USERNAME = `${testRunPrefix}-rev-relations`; const USERNAME_UPDATED = `${testRunPrefix}-rev-relations-updated`; diff --git a/packages/storage-core/package.json b/packages/storage-core/package.json index efd59b2d..5af0080e 100644 --- a/packages/storage-core/package.json +++ b/packages/storage-core/package.json @@ -13,18 +13,13 @@ "./package.json": "./package.json" }, "types": "./dist/index.d.ts", - "files": [ - "dist/**" - ], + "files": ["dist/**"], "repository": { "type": "git", "url": "git+https://github.com/psteinroe/supabase-cache-helpers.git", "directory": "packages/storage-fetcher" }, - "keywords": [ - "Supabase", - "Storage" - ], + "keywords": ["Supabase", "Storage"], "publishConfig": { "access": "public" }, diff --git a/packages/storage-react-query/package.json b/packages/storage-react-query/package.json index 73b92900..ed1ff159 100644 --- a/packages/storage-react-query/package.json +++ b/packages/storage-react-query/package.json @@ -18,9 +18,7 @@ "./package.json": "./package.json" }, "types": "./dist/index.d.ts", - "files": [ - "dist/**" - ], + "files": ["dist/**"], "publishConfig": { "access": "public" }, @@ -31,13 +29,7 @@ "clean": "rm -rf .turbo && rm -rf .nyc_output && rm -rf node_modules && rm -rf dist", "typecheck": "tsc --pretty --noEmit" }, - "keywords": [ - "Supabase", - "Storage", - "Cache", - "Tanstack Query", - "React Query" - ], + "keywords": ["Supabase", "Storage", "Cache", "Tanstack Query", "React Query"], "repository": { "type": "git", "url": "git+https://github.com/psteinroe/supabase-cache-helpers.git", diff --git a/packages/storage-swr/package.json b/packages/storage-swr/package.json index ca69ce03..68925006 100644 --- a/packages/storage-swr/package.json +++ b/packages/storage-swr/package.json @@ -18,9 +18,7 @@ "./package.json": "./package.json" }, "types": "./dist/index.d.ts", - "files": [ - "dist/**" - ], + "files": ["dist/**"], "publishConfig": { "access": "public" }, @@ -31,12 +29,7 @@ "clean": "rm -rf .turbo && rm -rf coverage && rm -rf .nyc_output && rm -rf node_modules && rm -rf dist", "typecheck": "tsc --pretty --noEmit" }, - "keywords": [ - "Supabase", - "Storage", - "Cache", - "SWR" - ], + "keywords": ["Supabase", "Storage", "Cache", "SWR"], "repository": { "type": "git", "url": "git+https://github.com/psteinroe/supabase-cache-helpers.git",