diff --git a/.github/workflows/node.js.yml b/.github/workflows/node.js.yml index 1b2e0ae..ed99abe 100644 --- a/.github/workflows/node.js.yml +++ b/.github/workflows/node.js.yml @@ -1,23 +1,23 @@ name: Run tests on: push: - branches: [ "main" ] + branches: ["main"] pull_request: - branches: [ "main" ] + branches: ["main"] jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Use Node.js - uses: actions/setup-node@v4 - with: - cache-dependency-path: | - example/package-lock.json - package-lock.json - node-version: '18.x' - cache: 'npm' - - run: npm i - - run: npm ci - - run: cd example && npm i && cd .. - - run: npm test + - uses: actions/checkout@v4 + - name: Use Node.js + uses: actions/setup-node@v4 + with: + cache-dependency-path: | + example/package-lock.json + package-lock.json + node-version: "18.x" + cache: "npm" + - run: npm i + - run: npm ci + - run: cd example && npm i && cd .. + - run: npm test diff --git a/README.md b/README.md index 2728bdf..2b6d04e 100644 --- a/README.md +++ b/README.md @@ -13,20 +13,37 @@ Suppose you have a leaderboard of game scores. These are some operations that the Aggregate component makes easy and efficient: 1. Count the total number of scores: `aggregate.count(ctx)` -2. Count the number of scores greater than 65: `aggregate.count(ctx, { lower: { key: 65, inclusive: false } })` +2. Count the number of scores greater than 65: `aggregate.count(ctx, { bounds: { lower: { key: 65, inclusive: false } } })` 3. Find the p95 score: `aggregate.at(ctx, Math.floor(aggregate.count(ctx) * 0.95))` 4. Find the overall average score: `aggregate.sum(ctx) / aggregate.count(ctx)` 5. Find the ranking for a score of 65 in the leaderboard: `aggregate.indexOf(ctx, 65)` 6. Find the average score for an individual user. You can define another aggregate - partitioned by user and aggregate within each: + grouped by user and aggregate within each: ```ts -// aggregateScoreByUser is the leaderboard scores partitioned by username. +// aggregateScoreByUser is the leaderboard scores grouped by username. const bounds = { prefix: [username] }; -const highScoreForUser = aggregateScoreByUser.max(ctx, bounds); +const highScoreForUser = await aggregateScoreByUser.max(ctx, { bounds }); const avgScoreForUser = - aggregateScoreByUser.sum(ctx, bounds) / - aggregateScoreByUser.count(ctx, bounds); + (await aggregateScoreByUser.sum(ctx, { bounds })) / + (await aggregateScoreByUser.count(ctx, { bounds })); +// It still enables adding or averaging all scores across all usernames. +const globalAverageScore = + (await aggregateScoreByUser.sum(ctx)) / + (await aggregateScoreByUser.count(ctx)); +``` + +7. Alternatively, you can define an aggregate with separate namespaces, + and do the same query. This method increases throughput because a user's data + won't interfere with other users. However, you lose the ability to aggregate + over all users. + +```ts +const forUser = { namespace: username }; +const highScoreForUser = await aggregateScoreByUser.max(ctx, forUser); +const avgScoreForUser = + (await aggregateScoreByUser.sum(ctx, { bounds })) / + (await aggregateScoreByUser.count(ctx, { bounds })); ``` The Aggregate component provides `O(log(n))`-time lookups, instead of the `O(n)` @@ -51,10 +68,9 @@ The keys may be arbitrary Convex values, so you can choose to sort your data by: 4. Nothing, use `key=null` for everything if you just want [a total count, such as for random access](#total-count-and-randomization). -### Partitioning +### Grouping -You can use sorting to partition your data set, enabling namspacing, -multitenancy, sharding, and more. +You can use sorting to group your data set. If you want to keep track of multiple games with scores for each user, use a tuple of `[game, username, score]` as the key. @@ -76,6 +92,54 @@ would need to aggregate with key `[game, score]`. To support different sorting and partitioning keys, you can define multiple instances. See [below](#defining-multiple-aggregates) for details. +If you separate your data via the `sortKey` and `prefix` bounds, you can look at +your data from any altitude. You can do a global `count` to see how many total +data points there are, or you can zero in on an individual group of the data. + +However, there's a tradeoff: nearby data points can interfere with each other +in the internal data structure, reducing throughput. See +[below](#read-dependencies-and-writes) for more details. To avoid interference, +you can use Namespaces. + +### Namespacing + +If your data is separated into distinct partitions, and you don't need to +aggregate between partitions, then you can put each partition into its own +namespace. Each namespace gets its own internal data structure. + +If your app has multiple games, it's not useful to aggregate scores across +different games. The scoring system for chess isn't related to the scoring +system for football. So we can namespace our scores based on the game. + +Whenever we aggregate scores, we _must_ specify the namespace. +On the other hand, the internal aggregation data structure can keep the scores +separate and keep throughput high. + +Here's how you would create the aggregate we just described: + +```ts +const leaderboardByGame = new TableAggregate<{ + Namespace: Id<"games">; + Key: number; + DataModel: DataModel; + TableName: "scores"; +}>(components.leaderboardByGame, { + namespace: (doc) => doc.gameId, + sortKey: (doc) => doc.score, +}); +``` + +And whenever you use this aggregate, you specify the namespace. + +```ts +const footballHighScore = await leaderboardByGame.max(ctx, { + namespace: footballId, +}); +``` + +See an example of a namespaced aggregate in +[example/convex/photos.ts](./example/convex/photos.ts). + ### More examples The Aggregate component can efficiently calculate all of these: @@ -149,13 +213,16 @@ import { DataModel } from "./_generated/dataModel"; import { mutation as rawMutation } from "./_generated/server"; import { TableAggregate } from "@convex-dev/aggregate"; -const aggregate = new TableAggregate( - components.aggregate, - { - sortKey: (doc) => doc._creationTime, // Allows querying across time ranges. - sumValue: (doc) => doc.value, // The value to be used in `.sum` calculations. - } -); +const aggregate = new TableAggregate<{ + Namespace: undefined; + Key: number; + DataModel: DataModel; + TableName: "mytable"; +}>(components.aggregate, { + namespace: (doc) => undefined, // disable namespacing. + sortKey: (doc) => doc._creationTime, // Allows querying across time ranges. + sumValue: (doc) => doc.value, // The value to be used in `.sum` calculations. +}); ``` Since these are happening in a @@ -167,12 +234,14 @@ here's how you might define `aggregateByGame`, as an aggregate on the "scores" table: ```ts -const aggregateByGame = new TableAggregate< - [Id<"games">, string, number], - DataModel, - "leaderboard" ->(components.aggregateByGame, { - sortKey: (doc) => [doc.gameId, doc.username, doc.score], +const aggregateByGame = new TableAggregate<{ + Namespace: Id<"games">; + Key: [string, number]; + DataModel: DataModel; + TableName: "leaderboard"; +}>(components.aggregateByGame, { + namespace: (doc) => doc.gameId, + sortKey: (doc) => [doc.username, doc.score], }); ``` @@ -237,31 +306,24 @@ To run the examples: ### Total Count and Randomization If you don't need the ordering, partitioning, or summing behavior of -`TableAggregate`, there's a simpler interface you can use: `Randomize`. +`TableAggregate`, you can set `namespace: undefined` and `sortKey: null`. ```ts -import { components } from "./_generated/api"; -import { DataModel } from "./_generated/dataModel"; -import { mutation as rawMutation } from "./_generated/server"; -import { Randomize } from "@convex-dev/aggregate"; -import { customMutation } from "convex-helpers/server/customFunctions"; -// This is like TableAggregate but there's no key or sumValue. -const randomize = new Randomize(components.aggregate); - -// In a mutation, insert into the component when you insert into your table. -const id = await ctx.db.insert("mytable", data); -await randomize.insert(ctx, id); - -// As before, delete from the component when you delete from your table -await ctx.db.delete(id); -await randomize.delete(ctx, id); - -// in a query, get the total document count. -const totalCount = await randomize.count(ctx); -// get a random document's id. -const randomId = await randomize.random(ctx); +const randomize = new TableAggregate<{ + Namespace: undefined; + Key: null; + DataModel: DataModel; + TableName: "mytable"; +}>(components.aggregate, { + namespace: (doc) => undefined, + sortKey: (doc) => null, +}); ``` +Without sorting, all documents are ordered by their `_id` which is generally +random. And you can look up the document at any index to find one at random +or shuffle the whole table. + See more examples in [`example/convex/shuffle.ts`](example/convex/shuffle.ts), including a paginated random shuffle of some music. @@ -272,28 +334,34 @@ Convex supports infinite-scroll pagination which is to worry about items going missing from your list. But sometimes you want to display separate pages of results on separate pages of your app. -For this example, imagine you have a table of photos +For this example, imagine you have a table of photo albums. ```ts // convex/schema.ts defineSchema({ photos: defineTable({ + album: v.string(), url: v.string(), - }), + }).index("by_album_creation_time", ["album"]), }); ``` -And an aggregate defined with key as `_creationTime`. +And an aggregate defined with key as `_creationTime` and namespace as `album`. ```ts // convex/convex.config.ts app.use(aggregate, { name: "photos" }); // convex/photos.ts -const photos = new TableAggregate( - components.photos, - { sortKey: (doc) => doc._creationTime } -); +const photos = new TableAggregate<{ + Namespace: string; // album name + Key: number; // creation time + DataModel: DataModel; + TableName: "photos"; +}>(components.photos, { + namespace: (doc) => doc.album, + sortKey: (doc) => doc._creationTime, +}); ``` You can pick a page size and jump to any page once you have `TableAggregate` to @@ -301,15 +369,15 @@ map from offset to an index key. In this example, if `offset` is 100 and `numItems` is 10, we get the hundredth `_creationTime` (in ascending order) and starting there we get the next ten -documents. +documents. In this way we can paginate through the whole photo album. ```ts export const pageOfPhotos({ - args: { offset: v.number(), numItems: v.number() }, - handler: async (ctx, { offset, numItems }) => { - const { key } = await photos.at(ctx, offset); + args: { offset: v.number(), numItems: v.number(), album: v.string() }, + handler: async (ctx, { offset, numItems, album }) => { + const { key } = await photos.at(ctx, offset, { namespace: album }); return await ctx.db.query("photos") - .withIndex("by_creation_time", q=>q.gte("_creationTime", key)) + .withIndex("by_album_creation_time", q=>q.eq("album", album).gte("_creationTime", key)) .take(numItems); }, }); @@ -328,19 +396,22 @@ insert, delete, and replace operations yourself. import { components } from "./_generated/api"; import { DataModel } from "./_generated/dataModel"; import { DirectAggregate } from "@convex-dev/aggregate"; -// The first generic parameter (number in this case) is the key. -// The second generic parameter (string in this case) should be unique to -// be a tie-breaker in case two data points have the same key. -const aggregate = new DirectAggregate(components.aggregate); +// Note the `id` should be unique to be a tie-breaker in case two data points +// have the same key. +const aggregate = new DirectAggregate<{ + Namespace: undefined; + Key: number; + Id: string; +}>(components.aggregate); // within a mutation, add values to be aggregated -await aggregate.insert(ctx, key, id); +await aggregate.insert(ctx, { key, id }); // if you want to use `.sum` to aggregate sums of values, insert with a sumValue -await aggregate.insert(ctx, key, id, sumValue); +await aggregate.insert(ctx, { key, id, sumValue }); // or delete values that were previously added -await aggregate.delete(ctx, key, id); +await aggregate.delete(ctx, { key, id }); // or update values -await aggregate.replace(ctx, oldKey, newKey, id); +await aggregate.replace(ctx, { key: oldKey, id }, { key: newKey }); ``` See [`example/convex/stats.ts`](example/convex/stats.ts) for an example. diff --git a/example/convex/_generated/api.d.ts b/example/convex/_generated/api.d.ts index 89dfe7d..6900773 100644 --- a/example/convex/_generated/api.d.ts +++ b/example/convex/_generated/api.d.ts @@ -49,38 +49,37 @@ export declare const components: { aggregateBetween: FunctionReference< "query", "internal", - { k1?: any; k2?: any }, + { k1?: any; k2?: any; namespace?: any }, { count: number; sum: number } >; atNegativeOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; atOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; - count: FunctionReference<"query", "internal", {}, any>; get: FunctionReference< "query", "internal", - { key: any }, + { key: any; namespace?: any }, null | { k: any; s: number; v: any } >; offset: FunctionReference< "query", "internal", - { k1?: any; key: any }, + { k1?: any; key: any; namespace?: any }, number >; offsetUntil: FunctionReference< "query", "internal", - { k2?: any; key: any }, + { k2?: any; key: any; namespace?: any }, number >; paginate: FunctionReference< @@ -91,6 +90,7 @@ export declare const components: { k1?: any; k2?: any; limit: number; + namespace?: any; order: "asc" | "desc"; }, { @@ -99,16 +99,26 @@ export declare const components: { page: Array<{ k: any; s: number; v: any }>; } >; - sum: FunctionReference<"query", "internal", {}, number>; - validate: FunctionReference<"query", "internal", {}, any>; + paginateNamespaces: FunctionReference< + "query", + "internal", + { cursor?: string; limit: number }, + { cursor: string; isDone: boolean; page: Array } + >; + validate: FunctionReference< + "query", + "internal", + { namespace?: any }, + any + >; }; inspect: { - display: FunctionReference<"query", "internal", {}, any>; - dump: FunctionReference<"query", "internal", {}, string>; + display: FunctionReference<"query", "internal", { namespace?: any }, any>; + dump: FunctionReference<"query", "internal", { namespace?: any }, string>; inspectNode: FunctionReference< "query", "internal", - { node?: string }, + { namespace?: any; node?: string }, null >; }; @@ -116,39 +126,63 @@ export declare const components: { clear: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; deleteIfExists: FunctionReference< "mutation", "internal", - { key: any }, + { key: any; namespace?: any }, any >; - delete_: FunctionReference<"mutation", "internal", { key: any }, null>; + delete_: FunctionReference< + "mutation", + "internal", + { key: any; namespace?: any }, + null + >; init: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; insert: FunctionReference< "mutation", "internal", - { key: any; summand?: number; value: any }, + { key: any; namespace?: any; summand?: number; value: any }, + null + >; + makeRootLazy: FunctionReference< + "mutation", + "internal", + { namespace?: any }, null >; - makeRootLazy: FunctionReference<"mutation", "internal", {}, null>; replace: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, null >; replaceOrInsert: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, any >; }; @@ -158,38 +192,37 @@ export declare const components: { aggregateBetween: FunctionReference< "query", "internal", - { k1?: any; k2?: any }, + { k1?: any; k2?: any; namespace?: any }, { count: number; sum: number } >; atNegativeOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; atOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; - count: FunctionReference<"query", "internal", {}, any>; get: FunctionReference< "query", "internal", - { key: any }, + { key: any; namespace?: any }, null | { k: any; s: number; v: any } >; offset: FunctionReference< "query", "internal", - { k1?: any; key: any }, + { k1?: any; key: any; namespace?: any }, number >; offsetUntil: FunctionReference< "query", "internal", - { k2?: any; key: any }, + { k2?: any; key: any; namespace?: any }, number >; paginate: FunctionReference< @@ -200,6 +233,7 @@ export declare const components: { k1?: any; k2?: any; limit: number; + namespace?: any; order: "asc" | "desc"; }, { @@ -208,16 +242,26 @@ export declare const components: { page: Array<{ k: any; s: number; v: any }>; } >; - sum: FunctionReference<"query", "internal", {}, number>; - validate: FunctionReference<"query", "internal", {}, any>; + paginateNamespaces: FunctionReference< + "query", + "internal", + { cursor?: string; limit: number }, + { cursor: string; isDone: boolean; page: Array } + >; + validate: FunctionReference< + "query", + "internal", + { namespace?: any }, + any + >; }; inspect: { - display: FunctionReference<"query", "internal", {}, any>; - dump: FunctionReference<"query", "internal", {}, string>; + display: FunctionReference<"query", "internal", { namespace?: any }, any>; + dump: FunctionReference<"query", "internal", { namespace?: any }, string>; inspectNode: FunctionReference< "query", "internal", - { node?: string }, + { namespace?: any; node?: string }, null >; }; @@ -225,39 +269,63 @@ export declare const components: { clear: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; deleteIfExists: FunctionReference< "mutation", "internal", - { key: any }, + { key: any; namespace?: any }, any >; - delete_: FunctionReference<"mutation", "internal", { key: any }, null>; + delete_: FunctionReference< + "mutation", + "internal", + { key: any; namespace?: any }, + null + >; init: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; insert: FunctionReference< "mutation", "internal", - { key: any; summand?: number; value: any }, + { key: any; namespace?: any; summand?: number; value: any }, + null + >; + makeRootLazy: FunctionReference< + "mutation", + "internal", + { namespace?: any }, null >; - makeRootLazy: FunctionReference<"mutation", "internal", {}, null>; replace: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, null >; replaceOrInsert: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, any >; }; @@ -267,38 +335,37 @@ export declare const components: { aggregateBetween: FunctionReference< "query", "internal", - { k1?: any; k2?: any }, + { k1?: any; k2?: any; namespace?: any }, { count: number; sum: number } >; atNegativeOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; atOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; - count: FunctionReference<"query", "internal", {}, any>; get: FunctionReference< "query", "internal", - { key: any }, + { key: any; namespace?: any }, null | { k: any; s: number; v: any } >; offset: FunctionReference< "query", "internal", - { k1?: any; key: any }, + { k1?: any; key: any; namespace?: any }, number >; offsetUntil: FunctionReference< "query", "internal", - { k2?: any; key: any }, + { k2?: any; key: any; namespace?: any }, number >; paginate: FunctionReference< @@ -309,6 +376,7 @@ export declare const components: { k1?: any; k2?: any; limit: number; + namespace?: any; order: "asc" | "desc"; }, { @@ -317,16 +385,26 @@ export declare const components: { page: Array<{ k: any; s: number; v: any }>; } >; - sum: FunctionReference<"query", "internal", {}, number>; - validate: FunctionReference<"query", "internal", {}, any>; + paginateNamespaces: FunctionReference< + "query", + "internal", + { cursor?: string; limit: number }, + { cursor: string; isDone: boolean; page: Array } + >; + validate: FunctionReference< + "query", + "internal", + { namespace?: any }, + any + >; }; inspect: { - display: FunctionReference<"query", "internal", {}, any>; - dump: FunctionReference<"query", "internal", {}, string>; + display: FunctionReference<"query", "internal", { namespace?: any }, any>; + dump: FunctionReference<"query", "internal", { namespace?: any }, string>; inspectNode: FunctionReference< "query", "internal", - { node?: string }, + { namespace?: any; node?: string }, null >; }; @@ -334,39 +412,63 @@ export declare const components: { clear: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; deleteIfExists: FunctionReference< "mutation", "internal", - { key: any }, + { key: any; namespace?: any }, any >; - delete_: FunctionReference<"mutation", "internal", { key: any }, null>; + delete_: FunctionReference< + "mutation", + "internal", + { key: any; namespace?: any }, + null + >; init: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; insert: FunctionReference< "mutation", "internal", - { key: any; summand?: number; value: any }, + { key: any; namespace?: any; summand?: number; value: any }, + null + >; + makeRootLazy: FunctionReference< + "mutation", + "internal", + { namespace?: any }, null >; - makeRootLazy: FunctionReference<"mutation", "internal", {}, null>; replace: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, null >; replaceOrInsert: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, any >; }; @@ -376,38 +478,37 @@ export declare const components: { aggregateBetween: FunctionReference< "query", "internal", - { k1?: any; k2?: any }, + { k1?: any; k2?: any; namespace?: any }, { count: number; sum: number } >; atNegativeOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; atOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; - count: FunctionReference<"query", "internal", {}, any>; get: FunctionReference< "query", "internal", - { key: any }, + { key: any; namespace?: any }, null | { k: any; s: number; v: any } >; offset: FunctionReference< "query", "internal", - { k1?: any; key: any }, + { k1?: any; key: any; namespace?: any }, number >; offsetUntil: FunctionReference< "query", "internal", - { k2?: any; key: any }, + { k2?: any; key: any; namespace?: any }, number >; paginate: FunctionReference< @@ -418,6 +519,7 @@ export declare const components: { k1?: any; k2?: any; limit: number; + namespace?: any; order: "asc" | "desc"; }, { @@ -426,16 +528,26 @@ export declare const components: { page: Array<{ k: any; s: number; v: any }>; } >; - sum: FunctionReference<"query", "internal", {}, number>; - validate: FunctionReference<"query", "internal", {}, any>; + paginateNamespaces: FunctionReference< + "query", + "internal", + { cursor?: string; limit: number }, + { cursor: string; isDone: boolean; page: Array } + >; + validate: FunctionReference< + "query", + "internal", + { namespace?: any }, + any + >; }; inspect: { - display: FunctionReference<"query", "internal", {}, any>; - dump: FunctionReference<"query", "internal", {}, string>; + display: FunctionReference<"query", "internal", { namespace?: any }, any>; + dump: FunctionReference<"query", "internal", { namespace?: any }, string>; inspectNode: FunctionReference< "query", "internal", - { node?: string }, + { namespace?: any; node?: string }, null >; }; @@ -443,39 +555,63 @@ export declare const components: { clear: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; deleteIfExists: FunctionReference< "mutation", "internal", - { key: any }, + { key: any; namespace?: any }, any >; - delete_: FunctionReference<"mutation", "internal", { key: any }, null>; + delete_: FunctionReference< + "mutation", + "internal", + { key: any; namespace?: any }, + null + >; init: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; insert: FunctionReference< "mutation", "internal", - { key: any; summand?: number; value: any }, + { key: any; namespace?: any; summand?: number; value: any }, + null + >; + makeRootLazy: FunctionReference< + "mutation", + "internal", + { namespace?: any }, null >; - makeRootLazy: FunctionReference<"mutation", "internal", {}, null>; replace: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, null >; replaceOrInsert: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, any >; }; @@ -485,38 +621,37 @@ export declare const components: { aggregateBetween: FunctionReference< "query", "internal", - { k1?: any; k2?: any }, + { k1?: any; k2?: any; namespace?: any }, { count: number; sum: number } >; atNegativeOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; atOffset: FunctionReference< "query", "internal", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; - count: FunctionReference<"query", "internal", {}, any>; get: FunctionReference< "query", "internal", - { key: any }, + { key: any; namespace?: any }, null | { k: any; s: number; v: any } >; offset: FunctionReference< "query", "internal", - { k1?: any; key: any }, + { k1?: any; key: any; namespace?: any }, number >; offsetUntil: FunctionReference< "query", "internal", - { k2?: any; key: any }, + { k2?: any; key: any; namespace?: any }, number >; paginate: FunctionReference< @@ -527,6 +662,7 @@ export declare const components: { k1?: any; k2?: any; limit: number; + namespace?: any; order: "asc" | "desc"; }, { @@ -535,16 +671,26 @@ export declare const components: { page: Array<{ k: any; s: number; v: any }>; } >; - sum: FunctionReference<"query", "internal", {}, number>; - validate: FunctionReference<"query", "internal", {}, any>; + paginateNamespaces: FunctionReference< + "query", + "internal", + { cursor?: string; limit: number }, + { cursor: string; isDone: boolean; page: Array } + >; + validate: FunctionReference< + "query", + "internal", + { namespace?: any }, + any + >; }; inspect: { - display: FunctionReference<"query", "internal", {}, any>; - dump: FunctionReference<"query", "internal", {}, string>; + display: FunctionReference<"query", "internal", { namespace?: any }, any>; + dump: FunctionReference<"query", "internal", { namespace?: any }, string>; inspectNode: FunctionReference< "query", "internal", - { node?: string }, + { namespace?: any; node?: string }, null >; }; @@ -552,39 +698,63 @@ export declare const components: { clear: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; deleteIfExists: FunctionReference< "mutation", "internal", - { key: any }, + { key: any; namespace?: any }, any >; - delete_: FunctionReference<"mutation", "internal", { key: any }, null>; + delete_: FunctionReference< + "mutation", + "internal", + { key: any; namespace?: any }, + null + >; init: FunctionReference< "mutation", "internal", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; insert: FunctionReference< "mutation", "internal", - { key: any; summand?: number; value: any }, + { key: any; namespace?: any; summand?: number; value: any }, + null + >; + makeRootLazy: FunctionReference< + "mutation", + "internal", + { namespace?: any }, null >; - makeRootLazy: FunctionReference<"mutation", "internal", {}, null>; replace: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, null >; replaceOrInsert: FunctionReference< "mutation", "internal", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, any >; }; diff --git a/example/convex/aggregate.test.ts b/example/convex/aggregate.test.ts index daef77c..fdacf87 100644 --- a/example/convex/aggregate.test.ts +++ b/example/convex/aggregate.test.ts @@ -1,7 +1,7 @@ /// import { convexTest } from "convex-test"; -import { afterEach, beforeEach, describe, expect, test, vi } from "vitest" +import { afterEach, beforeEach, describe, expect, test, vi } from "vitest"; import schema from "./schema"; import componentSchema from "../../src/component/schema"; import migrationsSchema from "../node_modules/@convex-dev/migrations/src/component/schema"; @@ -9,17 +9,27 @@ import { api, components, internal } from "./_generated/api"; const modules = import.meta.glob("./**/*.ts"); const componentModules = import.meta.glob("../../src/component/**/*.ts"); -const migrationsModules = import.meta.glob("../node_modules/@convex-dev/migrations/src/component/**/*.ts"); +const migrationsModules = import.meta.glob( + "../node_modules/@convex-dev/migrations/src/component/**/*.ts" +); describe("leaderboard", () => { async function setupTest() { const t = convexTest(schema, modules); t.registerComponent("aggregateByScore", componentSchema, componentModules); - t.registerComponent("aggregateScoreByUser", componentSchema, componentModules); + t.registerComponent( + "aggregateScoreByUser", + componentSchema, + componentModules + ); t.registerComponent("migrations", migrationsSchema, migrationsModules); // Reduce maxNodeSize so we can test complex trees with fewer items. - await t.mutation(components.aggregateByScore.public.clear, { maxNodeSize: 4 }); - await t.mutation(components.aggregateScoreByUser.public.clear, { maxNodeSize: 4 }); + await t.mutation(components.aggregateByScore.public.clear, { + maxNodeSize: 4, + }); + await t.mutation(components.aggregateScoreByUser.public.clear, { + maxNodeSize: 4, + }); return t; } @@ -45,14 +55,17 @@ describe("leaderboard", () => { await t.mutation(api.leaderboard.addScore, { name: "Lee", score: 15 }); await t.mutation(api.leaderboard.addScore, { name: "Lee", score: 25 }); await t.mutation(api.leaderboard.addScore, { name: "Lee", score: 30 }); - const highScoreId = await t.mutation(api.leaderboard.addScore, { name: "Sarah", score: 35 }); + const highScoreId = await t.mutation(api.leaderboard.addScore, { + name: "Sarah", + score: 35, + }); await t.mutation(api.leaderboard.addScore, { name: "Sarah", score: 5 }); expect(await t.query(api.leaderboard.countScores)).toStrictEqual(7); expect(await t.query(api.leaderboard.sumNumbers)).toStrictEqual(140); await t.mutation(api.leaderboard.removeScore, { id: highScoreId }); expect(await t.query(api.leaderboard.countScores)).toStrictEqual(6); expect(await t.query(api.leaderboard.sumNumbers)).toStrictEqual(105); - }) + }); test("score ranks", async () => { await t.mutation(api.leaderboard.addScore, { name: "Sujay", score: 10 }); @@ -63,17 +76,39 @@ describe("leaderboard", () => { await t.mutation(api.leaderboard.addScore, { name: "Sarah", score: 35 }); await t.mutation(api.leaderboard.addScore, { name: "Sarah", score: 5 }); - expect(await t.query(api.leaderboard.scoreAtRank, { rank: 0 })).toMatchObject({ name: "Sarah", score: 35 }); - expect(await t.query(api.leaderboard.scoreAtRank, { rank: 1 })).toMatchObject({ name: "Lee", score: 30 }); - expect(await t.query(api.leaderboard.scoreAtRank, { rank: 5 })).toMatchObject({ name: "Sujay", score: 10 }); + expect( + await t.query(api.leaderboard.scoreAtRank, { rank: 0 }) + ).toMatchObject({ name: "Sarah", score: 35 }); + expect( + await t.query(api.leaderboard.scoreAtRank, { rank: 1 }) + ).toMatchObject({ name: "Lee", score: 30 }); + expect( + await t.query(api.leaderboard.scoreAtRank, { rank: 5 }) + ).toMatchObject({ name: "Sujay", score: 10 }); - expect(await t.query(api.leaderboard.rankOfScore, { score: 35 })).toStrictEqual(0); - expect(await t.query(api.leaderboard.rankOfScore, { score: 30 })).toStrictEqual(1); - expect(await t.query(api.leaderboard.rankOfScore, { score: 10 })).toStrictEqual(5); - expect(await t.query(api.leaderboard.rankOfScore, { score: 33 })).toStrictEqual(1); + expect( + await t.query(api.leaderboard.rankOfScore, { score: 35 }) + ).toStrictEqual(0); + expect( + await t.query(api.leaderboard.rankOfScore, { score: 30 }) + ).toStrictEqual(1); + expect( + await t.query(api.leaderboard.rankOfScore, { score: 10 }) + ).toStrictEqual(5); + expect( + await t.query(api.leaderboard.rankOfScore, { score: 33 }) + ).toStrictEqual(1); const scoresInOrder = await t.query(api.leaderboard.scoresInOrder); - expect(scoresInOrder).toEqual(["Sarah: 35", "Lee: 30", "Lee: 25", "Sujay: 20", "Lee: 15", "Sujay: 10", "Sarah: 5"]); + expect(scoresInOrder).toEqual([ + "Sarah: 35", + "Lee: 30", + "Lee: 25", + "Sujay: 20", + "Lee: 15", + "Sujay: 10", + "Sarah: 5", + ]); }); test("backfill", async () => { @@ -98,11 +133,17 @@ describe("leaderboard", () => { await t.mutation(api.leaderboard.addScore, { name: "Lee", score: 15 }); await t.mutation(api.leaderboard.addScore, { name: "Lee", score: 25 }); - const highScore1 = await t.query(api.leaderboard.userHighScore, { name: "Sujay" }); + const highScore1 = await t.query(api.leaderboard.userHighScore, { + name: "Sujay", + }); expect(highScore1).toEqual(20); - const highScore2 = await t.query(api.leaderboard.userHighScore, { name: "Lee" }); + const highScore2 = await t.query(api.leaderboard.userHighScore, { + name: "Lee", + }); expect(highScore2).toEqual(25); - const averageScore = await t.query(api.leaderboard.userAverageScore, { name: "Sujay" }); + const averageScore = await t.query(api.leaderboard.userAverageScore, { + name: "Sujay", + }); expect(averageScore).toEqual(15); }); }); @@ -126,14 +167,27 @@ describe("photos", () => { }); test("triggers and pagination", async () => { + const album = "birthday party"; for (let i = 0; i < 30; i++) { - await t.mutation(api.photos.addPhoto, { url: `photo${i}` }); + await t.mutation(api.photos.addPhoto, { album, url: `photo${i}` }); } - const page0 = await t.query(api.photos.pageOfPhotos, { offset: 0, numItems: 10 }); + const page0 = await t.query(api.photos.pageOfPhotos, { + album, + offset: 0, + numItems: 10, + }); expect(page0).toEqual(Array.from({ length: 10 }, (_, i) => `photo${i}`)); - const emptyPage = await t.query(api.photos.pageOfPhotos, { offset: 0, numItems: 0 }); + const emptyPage = await t.query(api.photos.pageOfPhotos, { + album, + offset: 0, + numItems: 0, + }); expect(emptyPage).toEqual([]); - const lastPage = await t.query(api.photos.pageOfPhotos, { offset: 28, numItems: 10 }); + const lastPage = await t.query(api.photos.pageOfPhotos, { + album, + offset: 28, + numItems: 10, + }); expect(lastPage).toEqual(["photo28", "photo29"]); }); }); @@ -160,7 +214,9 @@ describe("shuffle", () => { await t.mutation(api.shuffle.addMusic, { title: "Song1" }); await t.mutation(api.shuffle.addMusic, { title: "Song2" }); await t.mutation(api.shuffle.addMusic, { title: "Song3" }); - const idToDelete = await t.mutation(api.shuffle.addMusic, { title: "Song4" }); + const idToDelete = await t.mutation(api.shuffle.addMusic, { + title: "Song4", + }); await t.mutation(api.shuffle.addMusic, { title: "Song5" }); await t.mutation(api.shuffle.addMusic, { title: "Song6" }); await t.mutation(api.shuffle.removeMusic, { id: idToDelete }); @@ -169,15 +225,31 @@ describe("shuffle", () => { expect(randomSong).toMatch(/Song[12356]/); // With same seed, pagination should return unique songs - const shufflePage0 = await t.query(api.shuffle.shufflePaginated, { offset: 0, numItems: 3, seed: "" }); + const shufflePage0 = await t.query(api.shuffle.shufflePaginated, { + offset: 0, + numItems: 3, + seed: "", + }); expect(shufflePage0).toEqual(["Song6", "Song1", "Song3"]); - const shufflePage1 = await t.query(api.shuffle.shufflePaginated, { offset: 3, numItems: 3, seed: "" }); + const shufflePage1 = await t.query(api.shuffle.shufflePaginated, { + offset: 3, + numItems: 3, + seed: "", + }); expect(shufflePage1).toEqual(["Song5", "Song2"]); - + // With different seed, we should get a different shuffle - const shufflePage0Seed1 = await t.query(api.shuffle.shufflePaginated, { offset: 0, numItems: 3, seed: "x" }); + const shufflePage0Seed1 = await t.query(api.shuffle.shufflePaginated, { + offset: 0, + numItems: 3, + seed: "x", + }); expect(shufflePage0Seed1).toEqual(["Song1", "Song6", "Song5"]); - const shufflePage1Seed1 = await t.query(api.shuffle.shufflePaginated, { offset: 3, numItems: 3, seed: "x" }); + const shufflePage1Seed1 = await t.query(api.shuffle.shufflePaginated, { + offset: 3, + numItems: 3, + seed: "x", + }); expect(shufflePage1Seed1).toEqual(["Song3", "Song2"]); }); }); @@ -206,7 +278,7 @@ describe("stats", () => { await t.mutation(api.stats.reportLatency, { latency: 25 }); await t.mutation(api.stats.reportLatency, { latency: 30 }); await t.mutation(api.stats.reportLatency, { latency: 35 }); - + const stats = await t.query(api.stats.getStats); expect(stats.max).toEqual(35); expect(stats.min).toEqual(10); diff --git a/example/convex/leaderboard.ts b/example/convex/leaderboard.ts index 87e5b31..6a89ec4 100644 --- a/example/convex/leaderboard.ts +++ b/example/convex/leaderboard.ts @@ -12,15 +12,22 @@ import { Migrations } from "@convex-dev/migrations"; export const migrations = new Migrations(components.migrations); export const run = migrations.runner(); -const aggregateByScore = new TableAggregate( - components.aggregateByScore, - { sortKey: (doc) => doc.score } -); -const aggregateScoreByUser = new TableAggregate< - [string, number], - DataModel, - "leaderboard" ->(components.aggregateScoreByUser, { +const aggregateByScore = new TableAggregate<{ + Namespace: undefined; + Key: number; + DataModel: DataModel; + TableName: "leaderboard"; +}>(components.aggregateByScore, { + namespace: (_doc) => undefined, + sortKey: (doc) => doc.score, +}); +const aggregateScoreByUser = new TableAggregate<{ + Key: [string, number]; + DataModel: DataModel; + TableName: "leaderboard"; + Namespace: undefined; +}>(components.aggregateScoreByUser, { + namespace: (_doc) => undefined, sortKey: (doc) => [doc.name, doc.score], sumValue: (doc) => doc.score, }); @@ -99,11 +106,10 @@ export const scoresInOrder = query({ handler: async (ctx) => { let count = 0; const lines = []; - for await (const { id, key } of aggregateByScore.iter( - ctx, - undefined, - "desc" - )) { + for await (const { id, key } of aggregateByScore.iter(ctx, { + bounds: undefined, + order: "desc", + })) { if (count >= 200) { lines.push("..."); break; @@ -134,12 +140,14 @@ export const userAverageScore = query({ }, handler: async (ctx, args) => { const count = await aggregateScoreByUser.count(ctx, { - prefix: [args.name], + bounds: { prefix: [args.name] }, }); if (!count) { throw new ConvexError("no scores for " + args.name); } - const sum = await aggregateScoreByUser.sum(ctx, { prefix: [args.name] }); + const sum = await aggregateScoreByUser.sum(ctx, { + bounds: { prefix: [args.name] }, + }); return sum / count; }, }); @@ -151,7 +159,7 @@ export const userHighScore = query({ returns: v.number(), handler: async (ctx, args) => { const item = await aggregateScoreByUser.max(ctx, { - prefix: [args.name], + bounds: { prefix: [args.name] }, }); if (!item) { throw new ConvexError("no scores for " + args.name); diff --git a/example/convex/photos.ts b/example/convex/photos.ts index cc01f72..168481a 100644 --- a/example/convex/photos.ts +++ b/example/convex/photos.ts @@ -22,10 +22,15 @@ import { } from "convex-helpers/server/customFunctions"; import { Triggers } from "convex-helpers/server/triggers"; -const photos = new TableAggregate( - components.photos, - { sortKey: (doc) => doc._creationTime } -); +const photos = new TableAggregate<{ + Namespace: string; + Key: number; + DataModel: DataModel; + TableName: "photos"; +}>(components.photos, { + namespace: (doc) => doc.album, + sortKey: (doc) => doc._creationTime, +}); const triggers = new Triggers(); @@ -43,17 +48,21 @@ export const init = internalMutation({ // rootLazy can be false because the table doesn't change much, and this // makes aggregates faster (this is entirely optional). // Also reducing node size uses less bandwidth, since nodes are smaller. - await photos.clear(ctx, 4, false); + await photos.clearAll(ctx, { + maxNodeSize: 4, + rootLazy: false, + }); }, }); export const addPhoto = mutation({ args: { + album: v.string(), url: v.string(), }, returns: v.id("photos"), handler: async (ctx, args) => { - return await ctx.db.insert("photos", { url: args.url }); + return await ctx.db.insert("photos", { album: args.album, url: args.url }); }, }); @@ -63,16 +72,19 @@ export const addPhoto = mutation({ */ export const pageOfPhotos = query({ args: { + album: v.string(), offset: v.number(), numItems: v.number(), }, returns: v.array(v.string()), - handler: async (ctx, { offset, numItems }) => { - const { key: firstPhotoCreationTime } = await photos.at(ctx, offset); + handler: async (ctx, { offset, numItems, album }) => { + const { key: firstPhotoCreationTime } = await photos.at(ctx, offset, { + namespace: album, + }); const photoDocs = await ctx.db .query("photos") - .withIndex("by_creation_time", (q) => - q.gte("_creationTime", firstPhotoCreationTime) + .withIndex("by_album_creation_time", (q) => + q.eq("album", album).gte("_creationTime", firstPhotoCreationTime) ) .take(numItems); return photoDocs.map((doc) => doc.url); diff --git a/example/convex/schema.ts b/example/convex/schema.ts index 8b8d8fd..e198b0f 100644 --- a/example/convex/schema.ts +++ b/example/convex/schema.ts @@ -10,6 +10,7 @@ export default defineSchema({ title: v.string(), }), photos: defineTable({ + album: v.string(), url: v.string(), - }), + }).index("by_album_creation_time", ["album"]), }); diff --git a/example/convex/shuffle.ts b/example/convex/shuffle.ts index bf616db..dddc5b6 100644 --- a/example/convex/shuffle.ts +++ b/example/convex/shuffle.ts @@ -3,14 +3,22 @@ * implemented with Convex. */ -import { Randomize } from "@convex-dev/aggregate"; +import { TableAggregate } from "@convex-dev/aggregate"; import { mutation, query } from "./_generated/server"; import { components } from "./_generated/api"; import { DataModel } from "./_generated/dataModel"; import { ConvexError, v } from "convex/values"; import Rand from "rand-seed"; -const randomize = new Randomize(components.music); +const randomize = new TableAggregate<{ + DataModel: DataModel; + TableName: "music"; + Namespace: undefined; + Key: null; +}>(components.music, { + namespace: () => undefined, + sortKey: () => null, +}); export const addMusic = mutation({ args: { @@ -19,7 +27,8 @@ export const addMusic = mutation({ returns: v.id("music"), handler: async (ctx, args) => { const id = await ctx.db.insert("music", { title: args.title }); - await randomize.insert(ctx, id); + const doc = (await ctx.db.get(id))!; + await randomize.insert(ctx, doc); return id; }, }); @@ -29,8 +38,9 @@ export const removeMusic = mutation({ id: v.id("music"), }, handler: async (ctx, { id }) => { + const doc = (await ctx.db.get(id))!; await ctx.db.delete(id); - await randomize.delete(ctx, id); + await randomize.delete(ctx, doc); }, }); @@ -40,11 +50,11 @@ export const getRandomMusicTitle = query({ }, returns: v.string(), handler: async (ctx) => { - const id = await randomize.random(ctx); - if (!id) { + const randomMusic = await randomize.random(ctx); + if (!randomMusic) { throw new ConvexError("no music"); } - const doc = (await ctx.db.get(id))!; + const doc = (await ctx.db.get(randomMusic.id))!; return doc.title; }, }); @@ -90,10 +100,13 @@ export const shufflePaginated = query({ const indexes = allIndexes.slice(offset, offset + numItems); + const atIndexes = await Promise.all( + indexes.map((i) => randomize.at(ctx, i)) + ); + return await Promise.all( - indexes.map(async (i) => { - const id = await randomize.at(ctx, i); - const doc = (await ctx.db.get(id))!; + atIndexes.map(async (atIndex) => { + const doc = (await ctx.db.get(atIndex.id))!; return doc.title; }) ); diff --git a/example/convex/stats.ts b/example/convex/stats.ts index da64c0c..cd5756f 100644 --- a/example/convex/stats.ts +++ b/example/convex/stats.ts @@ -7,7 +7,11 @@ import { v } from "convex/values"; import { DirectAggregate } from "@convex-dev/aggregate"; import { components } from "./_generated/api"; -const stats = new DirectAggregate(components.stats); +const stats = new DirectAggregate<{ + Namespace: undefined; + Key: number; + Id: string; +}>(components.stats); export const reportLatency = mutation({ args: { @@ -15,7 +19,11 @@ export const reportLatency = mutation({ }, returns: v.null(), handler: async (ctx, { latency }) => { - await stats.insert(ctx, latency, new Date().toISOString(), latency); + await stats.insert(ctx, { + key: latency, + id: new Date().toISOString(), + sumValue: latency, + }); }, }); diff --git a/example/package-lock.json b/example/package-lock.json index 284d9ab..b6a68cd 100644 --- a/example/package-lock.json +++ b/example/package-lock.json @@ -19,7 +19,7 @@ "@eslint/js": "^9.9.1", "@typescript-eslint/eslint-plugin": "^8.4.0", "@typescript-eslint/parser": "^8.4.0", - "convex-test": "^0.0.33", + "convex-test": "^0.0.34", "eslint": "^9.9.1", "globals": "^15.9.0", "typescript": "^5.5.0" @@ -27,14 +27,14 @@ }, "..": { "name": "@convex-dev/aggregate", - "version": "0.1.16-alpha.0", + "version": "0.1.16", "license": "Apache-2.0", "devDependencies": { "@eslint/js": "^9.9.1", "@fast-check/vitest": "^0.1.3", "@types/node": "^18.17.0", "@vitest/coverage-v8": "^2.1.1", - "convex-test": "^0.0.33", + "convex-test": "^0.0.34", "eslint": "^9.9.1", "globals": "^15.9.0", "prettier": "3.2.5", @@ -1032,9 +1032,9 @@ } }, "node_modules/convex-test": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.33.tgz", - "integrity": "sha512-UPhuHnXW84wOvZjwdtwfXqd4vsNchmjEJ2giO0BDCR+wIATHFoH/pzkh822GhE8ugrKddkxSxQjnGSt6nTnt5w==", + "version": "0.0.34", + "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.34.tgz", + "integrity": "sha512-srntn1drKy0Aa8zzatvJ10ZJFVpBYvLWQrhTibdfnslQqNdzQV5Ui4XKFtmrEmqw+DjmeJATgvZHbLzhP7Dh4w==", "dev": true, "peerDependencies": { "convex": "^1.16.4" @@ -1999,7 +1999,7 @@ "@fast-check/vitest": "^0.1.3", "@types/node": "^18.17.0", "@vitest/coverage-v8": "^2.1.1", - "convex-test": "^0.0.33", + "convex-test": "^0.0.34", "eslint": "^9.9.1", "globals": "^15.9.0", "prettier": "3.2.5", @@ -2537,9 +2537,9 @@ "requires": {} }, "convex-test": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.33.tgz", - "integrity": "sha512-UPhuHnXW84wOvZjwdtwfXqd4vsNchmjEJ2giO0BDCR+wIATHFoH/pzkh822GhE8ugrKddkxSxQjnGSt6nTnt5w==", + "version": "0.0.34", + "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.34.tgz", + "integrity": "sha512-srntn1drKy0Aa8zzatvJ10ZJFVpBYvLWQrhTibdfnslQqNdzQV5Ui4XKFtmrEmqw+DjmeJATgvZHbLzhP7Dh4w==", "dev": true, "requires": {} }, diff --git a/example/package.json b/example/package.json index 14c0121..c72952d 100644 --- a/example/package.json +++ b/example/package.json @@ -19,7 +19,7 @@ "@eslint/js": "^9.9.1", "@typescript-eslint/eslint-plugin": "^8.4.0", "@typescript-eslint/parser": "^8.4.0", - "convex-test": "^0.0.33", + "convex-test": "^0.0.34", "eslint": "^9.9.1", "globals": "^15.9.0", "typescript": "^5.5.0" diff --git a/package-lock.json b/package-lock.json index d9da060..fdea445 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ "@fast-check/vitest": "^0.1.3", "@types/node": "^18.17.0", "@vitest/coverage-v8": "^2.1.1", - "convex-test": "^0.0.33", + "convex-test": "^0.0.34", "eslint": "^9.9.1", "globals": "^15.9.0", "prettier": "3.2.5", @@ -1630,9 +1630,9 @@ } }, "node_modules/convex-test": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.33.tgz", - "integrity": "sha512-UPhuHnXW84wOvZjwdtwfXqd4vsNchmjEJ2giO0BDCR+wIATHFoH/pzkh822GhE8ugrKddkxSxQjnGSt6nTnt5w==", + "version": "0.0.34", + "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.34.tgz", + "integrity": "sha512-srntn1drKy0Aa8zzatvJ10ZJFVpBYvLWQrhTibdfnslQqNdzQV5Ui4XKFtmrEmqw+DjmeJATgvZHbLzhP7Dh4w==", "dev": true, "peerDependencies": { "convex": "^1.16.4" @@ -4817,9 +4817,9 @@ } }, "convex-test": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.33.tgz", - "integrity": "sha512-UPhuHnXW84wOvZjwdtwfXqd4vsNchmjEJ2giO0BDCR+wIATHFoH/pzkh822GhE8ugrKddkxSxQjnGSt6nTnt5w==", + "version": "0.0.34", + "resolved": "https://registry.npmjs.org/convex-test/-/convex-test-0.0.34.tgz", + "integrity": "sha512-srntn1drKy0Aa8zzatvJ10ZJFVpBYvLWQrhTibdfnslQqNdzQV5Ui4XKFtmrEmqw+DjmeJATgvZHbLzhP7Dh4w==", "dev": true, "requires": {} }, diff --git a/package.json b/package.json index c7367dc..95220cf 100644 --- a/package.json +++ b/package.json @@ -58,7 +58,7 @@ "@fast-check/vitest": "^0.1.3", "@types/node": "^18.17.0", "@vitest/coverage-v8": "^2.1.1", - "convex-test": "^0.0.33", + "convex-test": "^0.0.34", "eslint": "^9.9.1", "globals": "^15.9.0", "prettier": "3.2.5", diff --git a/src/client/index.ts b/src/client/index.ts index 7db9bb9..3c176e0 100644 --- a/src/client/index.ts +++ b/src/client/index.ts @@ -17,7 +17,7 @@ import { Bounds, boundsToPositions, } from "./positions.js"; -import { GenericId } from "convex/values"; +import { GenericId, Value as ConvexValue } from "convex/values"; export type UsedAPI = UseApi; @@ -52,7 +52,11 @@ export type { Key, Bound }; * Once values have been added to the data structure, you can query for the * count and sum of items between a range of keys. */ -export class Aggregate { +export class Aggregate< + K extends Key, + ID extends string, + Namespace extends ConvexValue | undefined = undefined, +> { constructor(protected component: UsedAPI) {} /// Aggregate queries. @@ -60,21 +64,30 @@ export class Aggregate { /** * Counts items between the given bounds. */ - async count(ctx: RunQueryCtx, bounds?: Bounds): Promise { + async count( + ctx: RunQueryCtx, + ...opts: NamespacedOpts<{ bounds: Bounds }, Namespace> + ): Promise { const { count } = await ctx.runQuery( this.component.btree.aggregateBetween, - boundsToPositions(bounds) + { + ...boundsToPositions(opts[0]?.bounds), + namespace: namespaceFromOpts(opts), + } ); return count; } /** * Adds up the sumValue of items between the given bounds. */ - async sum(ctx: RunQueryCtx, bounds?: Bounds): Promise { - const { sum } = await ctx.runQuery( - this.component.btree.aggregateBetween, - boundsToPositions(bounds) - ); + async sum( + ctx: RunQueryCtx, + ...opts: NamespacedOpts<{ bounds: Bounds }, Namespace> + ): Promise { + const { sum } = await ctx.runQuery(this.component.btree.aggregateBetween, { + ...boundsToPositions(opts[0]?.bounds), + namespace: namespaceFromOpts(opts), + }); return sum; } /** @@ -88,18 +101,20 @@ export class Aggregate { async at( ctx: RunQueryCtx, offset: number, - bounds?: Bounds + ...opts: NamespacedOpts<{ bounds?: Bounds }, Namespace> ): Promise> { if (offset < 0) { const item = await ctx.runQuery(this.component.btree.atNegativeOffset, { offset: -offset - 1, - ...boundsToPositions(bounds), + namespace: namespaceFromOpts(opts), + ...boundsToPositions(opts[0]?.bounds), }); return btreeItemToAggregateItem(item); } const item = await ctx.runQuery(this.component.btree.atOffset, { offset, - ...boundsToPositions(bounds), + namespace: namespaceFromOpts(opts), + ...boundsToPositions(opts[0]?.bounds), }); return btreeItemToAggregateItem(item); } @@ -113,18 +128,27 @@ export class Aggregate { async indexOf( ctx: RunQueryCtx, key: K, - opts?: { id?: ID; bounds?: Bounds; order?: "asc" | "desc" } + ...opts: NamespacedOpts< + { id?: ID; bounds?: Bounds; order?: "asc" | "desc" }, + Namespace + > ): Promise { - const { k1, k2 } = boundsToPositions(opts?.bounds); - if (opts?.order === "desc") { + const { k1, k2 } = boundsToPositions(opts[0]?.bounds); + if (opts[0]?.order === "desc") { return await ctx.runQuery(this.component.btree.offsetUntil, { - key: boundToPosition("upper", { key, id: opts?.id, inclusive: true }), + key: boundToPosition("upper", { + key, + id: opts[0]?.id, + inclusive: true, + }), k2, + namespace: namespaceFromOpts(opts), }); } return await ctx.runQuery(this.component.btree.offset, { - key: boundToPosition("lower", { key, id: opts?.id, inclusive: true }), + key: boundToPosition("lower", { key, id: opts[0]?.id, inclusive: true }), k1, + namespace: namespaceFromOpts(opts), }); } /** @@ -133,10 +157,11 @@ export class Aggregate { async offsetOf( ctx: RunQueryCtx, key: K, + namespace: Namespace, id?: ID, bounds?: Bounds ): Promise { - return this.indexOf(ctx, key, { id, bounds }); + return this.indexOf(ctx, key, { id, bounds, order: "asc", namespace }); } /** * @deprecated Use `indexOf` instead. @@ -144,10 +169,11 @@ export class Aggregate { async offsetUntil( ctx: RunQueryCtx, key: K, + namespace: Namespace, id?: ID, bounds?: Bounds ): Promise { - return this.indexOf(ctx, key, { id, bounds, order: "desc" }); + return this.indexOf(ctx, key, { id, bounds, order: "desc", namespace }); } /** @@ -155,9 +181,14 @@ export class Aggregate { */ async min( ctx: RunQueryCtx, - bounds?: Bounds + ...opts: NamespacedOpts<{ bounds: Bounds }, Namespace> ): Promise | null> { - const { page } = await this.paginate(ctx, bounds, undefined, "asc", 1); + const { page } = await this.paginate(ctx, { + namespace: namespaceFromOpts(opts), + bounds: opts[0]?.bounds, + order: "asc", + pageSize: 1, + }); return page[0] ?? null; } /** @@ -165,9 +196,14 @@ export class Aggregate { */ async max( ctx: RunQueryCtx, - bounds?: Bounds + ...opts: NamespacedOpts<{ bounds: Bounds }, Namespace> ): Promise | null> { - const { page } = await this.paginate(ctx, bounds, undefined, "desc", 1); + const { page } = await this.paginate(ctx, { + namespace: namespaceFromOpts(opts), + bounds: opts[0]?.bounds, + order: "desc", + pageSize: 1, + }); return page[0] ?? null; } /** @@ -175,14 +211,14 @@ export class Aggregate { */ async random( ctx: RunQueryCtx, - bounds?: Bounds + ...opts: NamespacedOpts<{ bounds: Bounds }, Namespace> ): Promise | null> { - const count = await this.count(ctx, bounds); + const count = await this.count(ctx, ...opts); if (count === 0) { return null; } const index = Math.floor(Math.random() * count); - return await this.at(ctx, index, bounds); + return await this.at(ctx, index, ...opts); } /** * Get a page of items between the given bounds, with a cursor to paginate. @@ -190,18 +226,26 @@ export class Aggregate { */ async paginate( ctx: RunQueryCtx, - bounds?: Bounds, - cursor?: string, - order: "asc" | "desc" = "asc", - pageSize: number = 100 + ...opts: NamespacedOpts< + { + bounds?: Bounds; + cursor?: string; + order?: "asc" | "desc"; + pageSize?: number; + }, + Namespace + > ): Promise<{ page: Item[]; cursor: string; isDone: boolean }> { + const order = opts[0]?.order ?? "asc"; + const pageSize = opts[0]?.pageSize ?? 100; const { page, cursor: newCursor, isDone, } = await ctx.runQuery(this.component.btree.paginate, { - ...boundsToPositions(bounds), - cursor, + namespace: namespaceFromOpts(opts), + ...boundsToPositions(opts[0]?.bounds), + cursor: opts[0]?.cursor, order, limit: pageSize, }); @@ -221,10 +265,15 @@ export class Aggregate { */ async *iter( ctx: RunQueryCtx, - bounds?: Bounds, - order: "asc" | "desc" = "asc", - pageSize: number = 100 + ...opts: NamespacedOpts< + { bounds?: Bounds; order?: "asc" | "desc"; pageSize?: number }, + Namespace + > ): AsyncGenerator, void, undefined> { + const order = opts[0]?.order ?? "asc"; + const pageSize = opts[0]?.pageSize ?? 100; + const bounds = opts[0]?.bounds; + const namespace = namespaceFromOpts(opts); let isDone = false; let cursor: string | undefined = undefined; while (!isDone) { @@ -232,7 +281,13 @@ export class Aggregate { page, cursor: newCursor, isDone: newIsDone, - } = await this.paginate(ctx, bounds, cursor, order, pageSize); + } = await this.paginate(ctx, { + namespace, + bounds, + cursor, + order, + pageSize, + }); for (const item of page) { yield item; } @@ -244,6 +299,7 @@ export class Aggregate { /** Write operations. See {@link DirectAggregate} for docstrings. */ async _insert( ctx: RunMutationCtx, + namespace: Namespace, key: K, id: ID, summand?: number @@ -252,16 +308,25 @@ export class Aggregate { key: keyToPosition(key, id), summand, value: id, + namespace, }); } - async _delete(ctx: RunMutationCtx, key: K, id: ID): Promise { + async _delete( + ctx: RunMutationCtx, + namespace: Namespace, + key: K, + id: ID + ): Promise { await ctx.runMutation(this.component.public.delete_, { key: keyToPosition(key, id), + namespace, }); } async _replace( ctx: RunMutationCtx, + currentNamespace: Namespace, currentKey: K, + newNamespace: Namespace, newKey: K, id: ID, summand?: number @@ -271,24 +336,43 @@ export class Aggregate { newKey: keyToPosition(newKey, id), summand, value: id, + namespace: currentNamespace, + newNamespace, }); } async _insertIfDoesNotExist( ctx: RunMutationCtx, + namespace: Namespace, key: K, id: ID, summand?: number ): Promise { - await this._replaceOrInsert(ctx, key, key, id, summand); + await this._replaceOrInsert( + ctx, + namespace, + key, + namespace, + key, + id, + summand + ); } - async _deleteIfExists(ctx: RunMutationCtx, key: K, id: ID): Promise { + async _deleteIfExists( + ctx: RunMutationCtx, + namespace: Namespace, + key: K, + id: ID + ): Promise { await ctx.runMutation(this.component.public.deleteIfExists, { key: keyToPosition(key, id), + namespace, }); } async _replaceOrInsert( ctx: RunMutationCtx, + currentNamespace: Namespace, currentKey: K, + newNamespace: Namespace, newKey: K, id: ID, summand?: number @@ -298,6 +382,8 @@ export class Aggregate { newKey: keyToPosition(newKey, id), summand, value: id, + namespace: currentNamespace, + newNamespace, }); } @@ -317,12 +403,15 @@ export class Aggregate { */ async clear( ctx: RunMutationCtx, - maxNodeSize?: number, - rootLazy?: boolean + ...opts: NamespacedOpts< + { maxNodeSize?: number; rootLazy?: boolean }, + Namespace + > ): Promise { await ctx.runMutation(this.component.public.clear, { - maxNodeSize, - rootLazy, + maxNodeSize: opts[0]?.maxNodeSize, + rootLazy: opts[0]?.rootLazy, + namespace: namespaceFromOpts(opts), }); } /** @@ -336,11 +425,84 @@ export class Aggregate { * same shard of the tree. The number of shards is determined by maxNodeSize, * so larger maxNodeSize can also help. */ - async makeRootLazy(ctx: RunMutationCtx): Promise { - await ctx.runMutation(this.component.public.makeRootLazy); + async makeRootLazy(ctx: RunMutationCtx, namespace: Namespace): Promise { + await ctx.runMutation(this.component.public.makeRootLazy, { namespace }); + } + + async paginateNamespaces( + ctx: RunQueryCtx, + cursor?: string, + pageSize: number = 100 + ): Promise<{ page: Namespace[]; cursor: string; isDone: boolean }> { + const { + page, + cursor: newCursor, + isDone, + } = await ctx.runQuery(this.component.btree.paginateNamespaces, { + cursor, + limit: pageSize, + }); + return { + page: page as Namespace[], + cursor: newCursor, + isDone, + }; + } + + async *iterNamespaces( + ctx: RunQueryCtx, + pageSize: number = 100 + ): AsyncGenerator { + let isDone = false; + let cursor: string | undefined = undefined; + while (!isDone) { + const { + page, + cursor: newCursor, + isDone: newIsDone, + } = await this.paginateNamespaces(ctx, cursor, pageSize); + for (const item of page) { + yield item; + } + isDone = newIsDone; + cursor = newCursor; + } + } + + async clearAll( + ctx: RunMutationCtx & RunQueryCtx, + opts?: { maxNodeSize?: number; rootLazy?: boolean } + ): Promise { + for await (const namespace of this.iterNamespaces(ctx)) { + await this.clear(ctx, { ...opts, namespace }); + } + // In case there are no namespaces, make sure we create at least one tree, + // at namespace=undefined. This is where the default settings are stored. + await this.clear(ctx, { ...opts, namespace: undefined as Namespace }); + } + + async makeAllRootsLazy(ctx: RunMutationCtx & RunQueryCtx): Promise { + for await (const namespace of this.iterNamespaces(ctx)) { + await this.makeRootLazy(ctx, namespace); + } } } +export type DirectAggregateType< + K extends Key, + ID extends string, + Namespace extends ConvexValue | undefined, +> = { + Key: K; + Id: ID; + Namespace: Namespace; +}; +type AnyDirectAggregateType = DirectAggregateType< + Key, + string, + ConvexValue | undefined +>; + /** * A DirectAggregate is an Aggregate where you can insert, delete, and replace * items directly, and keys and IDs can be customized. @@ -349,9 +511,8 @@ export class Aggregate { * computes keys and sumValues from the table's documents. */ export class DirectAggregate< - K extends Key, - ID extends string, -> extends Aggregate { + T extends AnyDirectAggregateType, +> extends Aggregate { /** * Insert a new key into the data structure. * The id should be unique. @@ -362,18 +523,28 @@ export class DirectAggregate< */ async insert( ctx: RunMutationCtx, - key: K, - id: ID, - sumValue?: number + args: NamespacedArgs< + { key: T["Key"]; id: T["Id"]; sumValue?: number }, + T["Namespace"] + > ): Promise { - await this._insert(ctx, key, id, sumValue); + await this._insert( + ctx, + namespaceFromArg(args), + args.key, + args.id, + args.sumValue + ); } /** * Delete the key with the given ID from the data structure. * Throws if the given key and ID do not exist. */ - async delete(ctx: RunMutationCtx, key: K, id: ID): Promise { - await this._delete(ctx, key, id); + async delete( + ctx: RunMutationCtx, + args: NamespacedArgs<{ key: T["Key"]; id: T["Id"] }, T["Namespace"]> + ): Promise { + await this._delete(ctx, namespaceFromArg(args), args.key, args.id); } /** * Update an existing item in the data structure. @@ -382,12 +553,21 @@ export class DirectAggregate< */ async replace( ctx: RunMutationCtx, - currentKey: K, - newKey: K, - id: ID, - sumValue?: number + currentItem: NamespacedArgs<{ key: T["Key"]; id: T["Id"] }, T["Namespace"]>, + newItem: NamespacedArgs< + { key: T["Key"]; sumValue?: number }, + T["Namespace"] + > ): Promise { - await this._replace(ctx, currentKey, newKey, id, sumValue); + await this._replace( + ctx, + namespaceFromArg(currentItem), + currentItem.key, + namespaceFromArg(newItem), + newItem.key, + currentItem.id, + newItem.sumValue + ); } /** * Equivalents to `insert`, `delete`, and `replace` where the item may or may not exist. @@ -399,36 +579,86 @@ export class DirectAggregate< */ async insertIfDoesNotExist( ctx: RunMutationCtx, - key: K, - id: ID, - sumValue?: number + args: NamespacedArgs< + { key: T["Key"]; id: T["Id"]; sumValue?: number }, + T["Namespace"] + > ): Promise { - await this._insertIfDoesNotExist(ctx, key, id, sumValue); + await this._insertIfDoesNotExist( + ctx, + namespaceFromArg(args), + args.key, + args.id, + args.sumValue + ); } - async deleteIfExists(ctx: RunMutationCtx, key: K, id: ID): Promise { - await this._deleteIfExists(ctx, key, id); + async deleteIfExists( + ctx: RunMutationCtx, + args: NamespacedArgs<{ key: T["Key"]; id: T["Id"] }, T["Namespace"]> + ): Promise { + await this._deleteIfExists(ctx, namespaceFromArg(args), args.key, args.id); } async replaceOrInsert( ctx: RunMutationCtx, - currentKey: K, - newKey: K, - id: ID, - sumValue?: number + currentItem: NamespacedArgs<{ key: T["Key"]; id: T["Id"] }, T["Namespace"]>, + newItem: NamespacedArgs< + { key: T["Key"]; sumValue?: number }, + T["Namespace"] + > ): Promise { - await this._replaceOrInsert(ctx, currentKey, newKey, id, sumValue); + await this._replaceOrInsert( + ctx, + namespaceFromArg(currentItem), + currentItem.key, + namespaceFromArg(newItem), + newItem.key, + currentItem.id, + newItem.sumValue + ); } } -export class TableAggregate< +export type TableAggregateType< K extends Key, DataModel extends GenericDataModel, TableName extends TableNamesInDataModel, -> extends Aggregate> { + Namespace extends ConvexValue | undefined, +> = { + Key: K; + DataModel: DataModel; + TableName: TableName; + Namespace: Namespace; +}; +type AnyTableAggregateType = TableAggregateType< + Key, + GenericDataModel, + TableNamesInDataModel, + ConvexValue | undefined +>; +type TableAggregateDocument = DocumentByName< + T["DataModel"], + T["TableName"] +>; +type TableAggregateId = GenericId< + T["TableName"] +>; +type TableAggregateTrigger = Trigger< + Ctx, + T["DataModel"], + T["TableName"] +>; + +export class TableAggregate extends Aggregate< + T["Key"], + GenericId, + T["Namespace"] +> { constructor( component: UsedAPI, private options: { - sortKey: (d: DocumentByName) => K; - sumValue?: (d: DocumentByName) => number; + namespace: (d: TableAggregateDocument) => T["Namespace"]; + sortKey: (d: TableAggregateDocument) => T["Key"]; + sumValue?: (d: TableAggregateDocument) => number; } ) { super(component); @@ -436,69 +666,77 @@ export class TableAggregate< async insert( ctx: RunMutationCtx, - doc: DocumentByName + doc: TableAggregateDocument ): Promise { await this._insert( ctx, + this.options.namespace(doc), this.options.sortKey(doc), - doc._id as GenericId, + doc._id as TableAggregateId, this.options.sumValue?.(doc) ); } async delete( ctx: RunMutationCtx, - doc: DocumentByName + doc: TableAggregateDocument ): Promise { await this._delete( ctx, + this.options.namespace(doc), this.options.sortKey(doc), - doc._id as GenericId + doc._id as TableAggregateId ); } async replace( ctx: RunMutationCtx, - oldDoc: DocumentByName, - newDoc: DocumentByName + oldDoc: TableAggregateDocument, + newDoc: TableAggregateDocument ): Promise { await this._replace( ctx, + this.options.namespace(oldDoc), this.options.sortKey(oldDoc), + this.options.namespace(newDoc), this.options.sortKey(newDoc), - newDoc._id as GenericId, + newDoc._id as TableAggregateId, this.options.sumValue?.(newDoc) ); } async insertIfDoesNotExist( ctx: RunMutationCtx, - doc: DocumentByName + doc: TableAggregateDocument ): Promise { await this._insertIfDoesNotExist( ctx, + this.options.namespace(doc), this.options.sortKey(doc), - doc._id as GenericId, + doc._id as TableAggregateId, this.options.sumValue?.(doc) ); } async deleteIfExists( ctx: RunMutationCtx, - doc: DocumentByName + doc: TableAggregateDocument ): Promise { await this._deleteIfExists( ctx, + this.options.namespace(doc), this.options.sortKey(doc), - doc._id as GenericId + doc._id as TableAggregateId ); } async replaceOrInsert( ctx: RunMutationCtx, - oldDoc: DocumentByName, - newDoc: DocumentByName + oldDoc: TableAggregateDocument, + newDoc: TableAggregateDocument ): Promise { await this._replaceOrInsert( ctx, + this.options.namespace(oldDoc), this.options.sortKey(oldDoc), + this.options.namespace(newDoc), this.options.sortKey(newDoc), - newDoc._id as GenericId, + newDoc._id as TableAggregateId, this.options.sumValue?.(newDoc) ); } @@ -512,18 +750,21 @@ export class TableAggregate< */ async indexOfDoc( ctx: RunQueryCtx, - doc: DocumentByName, + doc: TableAggregateDocument, opts?: { - id?: GenericId; - bounds?: Bounds>; + id?: TableAggregateId; + bounds?: Bounds>; order?: "asc" | "desc"; } ): Promise { const key = this.options.sortKey(doc); - return this.indexOf(ctx, key, opts); + return this.indexOf(ctx, key, { + namespace: this.options.namespace(doc), + ...opts, + }); } - trigger(): Trigger { + trigger(): TableAggregateTrigger { return async (ctx, change) => { if (change.operation === "insert") { await this.insert(ctx, change.newDoc); @@ -535,10 +776,9 @@ export class TableAggregate< }; } - idempotentTrigger(): Trigger< + idempotentTrigger(): TableAggregateTrigger< Ctx, - DataModel, - TableName + T > { return async (ctx, change) => { if (change.operation === "insert") { @@ -581,68 +821,6 @@ export type Change< } ); -/** - * Simplified TableAggregate API that doesn't have keys or sumValue, so it's - * simpler to use for counting all items or getting a random item. - * - * See docstrings on Aggregate for more details. - */ -export class Randomize< - DataModel extends GenericDataModel, - TableName extends TableNamesInDataModel, -> { - private aggregate: TableAggregate; - constructor(component: UsedAPI) { - this.aggregate = new TableAggregate(component, { sortKey: (_doc) => null }); - } - async count(ctx: RunQueryCtx): Promise { - return await this.aggregate.count(ctx); - } - async at(ctx: RunQueryCtx, offset: number): Promise> { - const item = await this.aggregate.at(ctx, offset); - return item.id; - } - async random(ctx: RunQueryCtx): Promise | null> { - const item = await this.aggregate.random(ctx); - return item ? item.id : null; - } - trigger(): Trigger { - return this.aggregate.trigger(); - } - idempotentTrigger(): Trigger< - Ctx, - DataModel, - TableName - > { - return this.aggregate.idempotentTrigger(); - } - async insert(ctx: RunMutationCtx, id: GenericId): Promise { - await this.aggregate.insert(ctx, { _id: id }); - } - async delete(ctx: RunMutationCtx, id: GenericId): Promise { - await this.aggregate.delete(ctx, { _id: id }); - } - async insertIfDoesNotExist( - ctx: RunMutationCtx, - id: GenericId - ): Promise { - await this.aggregate.insertIfDoesNotExist(ctx, { _id: id }); - } - async deleteIfExists( - ctx: RunMutationCtx, - id: GenericId - ): Promise { - await this.aggregate.deleteIfExists(ctx, { _id: id }); - } - async clear( - ctx: RunMutationCtx, - maxNodeSize?: number, - rootLazy?: boolean - ): Promise { - await this.aggregate.clear(ctx, maxNodeSize, rootLazy); - } -} - export function btreeItemToAggregateItem({ k, s, @@ -657,3 +835,29 @@ export function btreeItemToAggregateItem({ sumValue: s, }; } + +export type NamespacedArgs = + | (Args & { namespace: Namespace }) + | (Namespace extends undefined ? Args : never); +export type NamespacedOpts = + | [{ namespace: Namespace } & Opts] + | (Namespace extends undefined ? [Opts?] : never); + +function namespaceFromArg( + args: NamespacedArgs +): Namespace { + if ("namespace" in args) { + return args["namespace"]; + } + return undefined as Namespace; +} +function namespaceFromOpts( + opts: NamespacedOpts +): Namespace { + if (opts.length === 0) { + // Only possible if Namespace extends undefined, so undefined is the only valid namespace. + return undefined as Namespace; + } + const [{ namespace }] = opts as [{ namespace: Namespace }]; + return namespace; +} diff --git a/src/component/_generated/api.d.ts b/src/component/_generated/api.d.ts index b8111fc..3e55ed2 100644 --- a/src/component/_generated/api.d.ts +++ b/src/component/_generated/api.d.ts @@ -37,38 +37,37 @@ export type Mounts = { aggregateBetween: FunctionReference< "query", "public", - { k1?: any; k2?: any }, + { k1?: any; k2?: any; namespace?: any }, { count: number; sum: number } >; atNegativeOffset: FunctionReference< "query", "public", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; atOffset: FunctionReference< "query", "public", - { k1?: any; k2?: any; offset: number }, + { k1?: any; k2?: any; namespace?: any; offset: number }, { k: any; s: number; v: any } >; - count: FunctionReference<"query", "public", {}, any>; get: FunctionReference< "query", "public", - { key: any }, + { key: any; namespace?: any }, null | { k: any; s: number; v: any } >; offset: FunctionReference< "query", "public", - { k1?: any; key: any }, + { k1?: any; key: any; namespace?: any }, number >; offsetUntil: FunctionReference< "query", "public", - { k2?: any; key: any }, + { k2?: any; key: any; namespace?: any }, number >; paginate: FunctionReference< @@ -79,6 +78,7 @@ export type Mounts = { k1?: any; k2?: any; limit: number; + namespace?: any; order: "asc" | "desc"; }, { @@ -87,46 +87,85 @@ export type Mounts = { page: Array<{ k: any; s: number; v: any }>; } >; - sum: FunctionReference<"query", "public", {}, number>; - validate: FunctionReference<"query", "public", {}, any>; + paginateNamespaces: FunctionReference< + "query", + "public", + { cursor?: string; limit: number }, + { cursor: string; isDone: boolean; page: Array } + >; + validate: FunctionReference<"query", "public", { namespace?: any }, any>; }; inspect: { - display: FunctionReference<"query", "public", {}, any>; - dump: FunctionReference<"query", "public", {}, string>; - inspectNode: FunctionReference<"query", "public", { node?: string }, null>; + display: FunctionReference<"query", "public", { namespace?: any }, any>; + dump: FunctionReference<"query", "public", { namespace?: any }, string>; + inspectNode: FunctionReference< + "query", + "public", + { namespace?: any; node?: string }, + null + >; }; public: { clear: FunctionReference< "mutation", "public", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, + null + >; + deleteIfExists: FunctionReference< + "mutation", + "public", + { key: any; namespace?: any }, + any + >; + delete_: FunctionReference< + "mutation", + "public", + { key: any; namespace?: any }, null >; - deleteIfExists: FunctionReference<"mutation", "public", { key: any }, any>; - delete_: FunctionReference<"mutation", "public", { key: any }, null>; init: FunctionReference< "mutation", "public", - { maxNodeSize?: number; rootLazy?: boolean }, + { maxNodeSize?: number; namespace?: any; rootLazy?: boolean }, null >; insert: FunctionReference< "mutation", "public", - { key: any; summand?: number; value: any }, + { key: any; namespace?: any; summand?: number; value: any }, + null + >; + makeRootLazy: FunctionReference< + "mutation", + "public", + { namespace?: any }, null >; - makeRootLazy: FunctionReference<"mutation", "public", {}, null>; replace: FunctionReference< "mutation", "public", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, null >; replaceOrInsert: FunctionReference< "mutation", "public", - { currentKey: any; newKey: any; summand?: number; value: any }, + { + currentKey: any; + namespace?: any; + newKey: any; + newNamespace?: any; + summand?: number; + value: any; + }, any >; }; diff --git a/src/component/btree.test.ts b/src/component/btree.test.ts index 84562a0..783cc16 100644 --- a/src/component/btree.test.ts +++ b/src/component/btree.test.ts @@ -10,7 +10,6 @@ import { getHandler, insertHandler, offsetHandler, - sumHandler, validateTree, getOrCreateTree, Value, @@ -26,11 +25,11 @@ describe("btree", () => { test("insert", async () => { const t = convexTest(schema, modules); await t.run(async (ctx) => { - await getOrCreateTree(ctx.db, 4, false); + await getOrCreateTree(ctx.db, undefined, 4, false); // Insert lots of keys. At each stage, the tree is valid. async function insert(key: number, value: string) { await insertHandler(ctx, { key, value }); - await validateTree(ctx); + await validateTree(ctx, {}); const get = await getHandler(ctx, { key }); expect(get).toEqual({ k: key, @@ -56,10 +55,10 @@ describe("btree", () => { test("delete", async () => { const t = convexTest(schema, modules); await t.run(async (ctx) => { - await getOrCreateTree(ctx.db, 4, false); + await getOrCreateTree(ctx.db, undefined, 4, false); async function insert(key: number, value: string) { await insertHandler(ctx, { key, value }); - await validateTree(ctx); + await validateTree(ctx, {}); const get = await getHandler(ctx, { key }); expect(get).toEqual({ k: key, @@ -70,7 +69,7 @@ describe("btree", () => { // Delete keys. At each stage, the tree is valid. async function del(key: number) { await deleteHandler(ctx, { key }); - await validateTree(ctx); + await validateTree(ctx, {}); const get = await getHandler(ctx, { key }); expect(get).toBeNull(); } @@ -101,10 +100,10 @@ describe("btree", () => { test("atOffset and offsetOf", async () => { const t = convexTest(schema, modules); await t.run(async (ctx) => { - await getOrCreateTree(ctx.db, 4, false); + await getOrCreateTree(ctx.db, undefined, 4, false); async function insert(key: number, value: string) { await insertHandler(ctx, { key, value }); - await validateTree(ctx); + await validateTree(ctx, {}); const rank = await offsetHandler(ctx, { key }); expect(rank).not.toBeNull(); const atIndex = await atOffsetHandler(ctx, { @@ -143,10 +142,10 @@ describe("btree", () => { test("countBetween", async () => { const t = convexTest(schema, modules); await t.run(async (ctx) => { - await getOrCreateTree(ctx.db, 4, false); + await getOrCreateTree(ctx.db, undefined, 4, false); async function insert(key: number, value: string) { await insertHandler(ctx, { key, value }); - await validateTree(ctx); + await validateTree(ctx, {}); } async function countBetween( k1: number | undefined, @@ -180,21 +179,21 @@ describe("btree", () => { test("sums", async () => { const t = convexTest(schema, modules); await t.run(async (ctx) => { - await getOrCreateTree(ctx.db, 4, false); + await getOrCreateTree(ctx.db, undefined, 4, false); async function insert(key: number, value: string, summand: number) { - const sumBefore = await sumHandler(ctx); + const { sum: sumBefore } = await aggregateBetweenHandler(ctx, {}); await insertHandler(ctx, { key, value, summand }); - await validateTree(ctx); - const sumAfter = await sumHandler(ctx); + await validateTree(ctx, {}); + const { sum: sumAfter } = await aggregateBetweenHandler(ctx, {}); expect(sumAfter).toEqual(sumBefore + summand); } async function del(key: number) { - const sumBefore = await sumHandler(ctx); + const { sum: sumBefore } = await aggregateBetweenHandler(ctx, {}); const itemBefore = await getHandler(ctx, { key }); expect(itemBefore).not.toBeNull(); await deleteHandler(ctx, { key }); - await validateTree(ctx); - const sumAfter = await sumHandler(ctx); + await validateTree(ctx, {}); + const { sum: sumAfter } = await aggregateBetweenHandler(ctx, {}); expect(sumAfter).toEqual(sumBefore - itemBefore!.s); } await insert(1, "a", 1); @@ -212,6 +211,38 @@ describe("btree", () => { }); }); +describe("namespaced btree", () => { + test("counts", async () => { + const t = convexTest(schema, modules); + await t.run(async (ctx) => { + await getOrCreateTree(ctx.db, "a", 4, false); + await getOrCreateTree(ctx.db, "b", 4, false); + async function insert(namespace: string, key: number, value: string) { + await insertHandler(ctx, { key, value, namespace }); + await validateTree(ctx, { namespace }); + } + async function count(namespace: string, count: number) { + const c = await aggregateBetweenHandler(ctx, { namespace }); + expect(c).toEqual({ + count, + sum: 0, + }); + } + await insert("a", 1, "a"); + await insert("a", 4, "b"); + await insert("a", 3, "c"); + await insert("a", 2, "d"); + await insert("a", 5, "e"); + await insert("b", 6, "e"); + await insert("b", 7, "e"); + await insert("b", 10, "e"); + await insert("b", 0, "e"); + await count("a", 5); + await count("b", 4); + }); + }); +}); + class SimpleBTree { private items: Item[] = []; constructor() {} @@ -415,7 +446,7 @@ describe("btree matches simpler impl", () => { }; const t = convexTest(schema, modules); await t.run(async (ctx) => { - await getOrCreateTree(ctx.db, minNodeSize * 2, rootLazy); + await getOrCreateTree(ctx.db, undefined, minNodeSize * 2, rootLazy); const simple = new SimpleBTree(); // Do a bunch of writes. // If there are conflicts on insert and delete, assert they happen on @@ -447,7 +478,7 @@ describe("btree matches simpler impl", () => { ); } } - await validateTree(ctx); + await validateTree(ctx, {}); // Do a bunch of reads. for (const read of reads) { if (read.type === "atOffset") { diff --git a/src/component/btree.ts b/src/component/btree.ts index e7d94f4..933c370 100644 --- a/src/component/btree.ts +++ b/src/component/btree.ts @@ -21,6 +21,7 @@ export const DEFAULT_MAX_NODE_SIZE = 16; export type Key = ConvexValue; export type Value = ConvexValue; +export type Namespace = ConvexValue | undefined; export function p(v: ConvexValue): string { return v?.toString() ?? "undefined"; @@ -34,11 +35,16 @@ function log(s: string) { export async function insertHandler( ctx: { db: DatabaseWriter }, - args: { key: Key; value: Value; summand?: number } + args: { key: Key; value: Value; summand?: number; namespace?: Namespace } ) { - const tree = await getOrCreateTree(ctx.db, DEFAULT_MAX_NODE_SIZE, true); + const tree = await getOrCreateTree( + ctx.db, + args.namespace, + DEFAULT_MAX_NODE_SIZE, + true + ); const summand = args.summand ?? 0; - const pushUp = await insertIntoNode(ctx, tree.root, { + const pushUp = await insertIntoNode(ctx, args.namespace, tree.root, { k: args.key, v: args.value, s: summand, @@ -64,10 +70,15 @@ export async function insertHandler( export async function deleteHandler( ctx: { db: DatabaseWriter }, - args: { key: Key } + args: { key: Key; namespace?: Namespace } ) { - const tree = await getOrCreateTree(ctx.db, DEFAULT_MAX_NODE_SIZE, true); - await deleteFromNode(ctx, tree.root, args.key); + const tree = await getOrCreateTree( + ctx.db, + args.namespace, + DEFAULT_MAX_NODE_SIZE, + true + ); + await deleteFromNode(ctx, args.namespace, tree.root, args.key); const root = (await ctx.db.get(tree.root))!; if (root.items.length === 0 && root.subtrees.length === 1) { log( @@ -87,16 +98,19 @@ export async function deleteHandler( } export const validate = query({ - args: {}, + args: { namespace: v.optional(v.any()) }, handler: validateTree, }); -export async function validateTree(ctx: { db: DatabaseReader }) { - const tree = await getTree(ctx.db); +export async function validateTree( + ctx: { db: DatabaseReader }, + args: { namespace?: Namespace } +) { + const tree = await getTree(ctx.db, args.namespace); if (!tree) { return; } - await validateNode(ctx, tree.root, 0); + await validateNode(ctx, args.namespace, tree.root, 0); } type ValidationResult = { @@ -105,13 +119,19 @@ type ValidationResult = { height: number; }; -async function MAX_NODE_SIZE(ctx: { db: DatabaseReader }) { - const tree = await mustGetTree(ctx.db); +async function MAX_NODE_SIZE( + ctx: { db: DatabaseReader }, + namespace: Namespace +) { + const tree = await mustGetTree(ctx.db, namespace); return tree.maxNodeSize; } -async function MIN_NODE_SIZE(ctx: { db: DatabaseReader }) { - const max = await MAX_NODE_SIZE(ctx); +async function MIN_NODE_SIZE( + ctx: { db: DatabaseReader }, + namespace: Namespace +) { + const max = await MAX_NODE_SIZE(ctx, namespace); if (max % 2 !== 0 || max < 4) { throw new Error("MAX_NODE_SIZE must be even and at least 4"); } @@ -120,6 +140,7 @@ async function MIN_NODE_SIZE(ctx: { db: DatabaseReader }) { async function validateNode( ctx: { db: DatabaseReader }, + namespace: Namespace, node: Id<"btreeNode">, depth: number ): Promise { @@ -127,10 +148,10 @@ async function validateNode( if (!n) { throw new ConvexError(`missing node ${node}`); } - if (n.items.length > (await MAX_NODE_SIZE(ctx))) { + if (n.items.length > (await MAX_NODE_SIZE(ctx, namespace))) { throw new ConvexError(`node ${node} exceeds max size`); } - if (depth > 0 && n.items.length < (await MIN_NODE_SIZE(ctx))) { + if (depth > 0 && n.items.length < (await MIN_NODE_SIZE(ctx, namespace))) { throw new ConvexError(`non-root node ${node} has less than min-size`); } if (n.subtrees.length > 0 && n.items.length + 1 !== n.subtrees.length) { @@ -146,7 +167,9 @@ async function validateNode( } } const validatedSubtrees = await Promise.all( - n.subtrees.map((subtree) => validateNode(ctx, subtree, depth + 1)) + n.subtrees.map((subtree) => + validateNode(ctx, namespace, subtree, depth + 1) + ) ); for (let i = 0; i < n.subtrees.length; i++) { // Each subtree's min is greater than the key at the prior index @@ -198,44 +221,13 @@ async function validateNode( return { min, max, height }; } -export const count = query({ - args: {}, - handler: countHandler, -}); - -export async function countHandler(ctx: { db: DatabaseReader }) { - const tree = await getTree(ctx.db); - if (!tree) { - return 0; - } - const root = (await ctx.db.get(tree.root))!; - const nAggregate = await nodeAggregate(ctx.db, root); - return nAggregate.count; -} - -export const sum = query({ - args: {}, - returns: v.number(), - handler: sumHandler, -}); - -export async function sumHandler(ctx: { db: DatabaseReader }) { - const tree = await getTree(ctx.db); - if (!tree) { - return 0; - } - const root = (await ctx.db.get(tree.root))!; - const nAggregate = await nodeAggregate(ctx.db, root); - return nAggregate.sum; -} - /// Count of keys that are *strictly* between k1 and k2. /// If k1 or k2 are undefined, that bound is unlimited. export async function aggregateBetweenHandler( ctx: { db: DatabaseReader }, - args: { k1?: Key; k2?: Key } + args: { k1?: Key; k2?: Key; namespace?: Namespace } ) { - const tree = await getTree(ctx.db); + const tree = await getTree(ctx.db, args.namespace); if (tree === null) { return { count: 0, sum: 0 }; } @@ -300,7 +292,11 @@ async function filterBetween( } export const aggregateBetween = query({ - args: { k1: v.optional(v.any()), k2: v.optional(v.any()) }, + args: { + k1: v.optional(v.any()), + k2: v.optional(v.any()), + namespace: v.optional(v.any()), + }, returns: aggregate, handler: aggregateBetweenHandler, }); @@ -326,14 +322,14 @@ async function aggregateBetweenInNode( export async function getHandler( ctx: { db: DatabaseReader }, - args: { key: Key } + args: { key: Key; namespace?: Namespace } ) { - const tree = (await getTree(ctx.db))!; + const tree = (await getTree(ctx.db, args.namespace))!; return await getInNode(ctx.db, tree.root, args.key); } export const get = query({ - args: { key: v.any() }, + args: { key: v.any(), namespace: v.optional(v.any()) }, returns: v.union(v.null(), itemValidator), handler: getHandler, }); @@ -366,6 +362,7 @@ export const atOffset = query({ offset: v.number(), k1: v.optional(v.any()), k2: v.optional(v.any()), + namespace: v.optional(v.any()), }, returns: itemValidator, handler: atOffsetHandler, @@ -373,9 +370,9 @@ export const atOffset = query({ export async function atOffsetHandler( ctx: { db: DatabaseReader }, - args: { offset: number; k1?: Key; k2?: Key } + args: { offset: number; k1?: Key; k2?: Key; namespace?: Namespace } ) { - const tree = await getTree(ctx.db); + const tree = await getTree(ctx.db, args.namespace); if (tree === null) { throw new ConvexError("tree is empty"); } @@ -387,6 +384,7 @@ export const atNegativeOffset = query({ offset: v.number(), k1: v.optional(v.any()), k2: v.optional(v.any()), + namespace: v.optional(v.any()), }, returns: itemValidator, handler: atNegativeOffsetHandler, @@ -394,9 +392,9 @@ export const atNegativeOffset = query({ export async function atNegativeOffsetHandler( ctx: { db: DatabaseReader }, - args: { offset: number; k1?: Key; k2?: Key } + args: { offset: number; k1?: Key; k2?: Key; namespace?: Namespace } ) { - const tree = await getTree(ctx.db); + const tree = await getTree(ctx.db, args.namespace); if (tree === null) { throw new ConvexError("tree is empty"); } @@ -411,36 +409,55 @@ export async function atNegativeOffsetHandler( export async function offsetHandler( ctx: { db: DatabaseReader }, - args: { key: Key; k1?: Key } + args: { key: Key; k1?: Key; namespace?: Namespace } ) { - return (await aggregateBetweenHandler(ctx, { k1: args.k1, k2: args.key })) - .count; + return ( + await aggregateBetweenHandler(ctx, { + k1: args.k1, + k2: args.key, + namespace: args.namespace, + }) + ).count; } // Returns the offset of the smallest key >= the given target key. export const offset = query({ - args: { key: v.any(), k1: v.optional(v.any()) }, + args: { + key: v.any(), + k1: v.optional(v.any()), + namespace: v.optional(v.any()), + }, returns: v.number(), handler: offsetHandler, }); export async function offsetUntilHandler( ctx: { db: DatabaseReader }, - args: { key: Key; k2?: Key } + args: { key: Key; k2?: Key; namespace?: Namespace } ) { - return (await aggregateBetweenHandler(ctx, { k1: args.key, k2: args.k2 })) - .count; + return ( + await aggregateBetweenHandler(ctx, { + k1: args.key, + k2: args.k2, + namespace: args.namespace, + }) + ).count; } // Returns the offset of the smallest key >= the given target key. export const offsetUntil = query({ - args: { key: v.any(), k2: v.optional(v.any()) }, + args: { + key: v.any(), + k2: v.optional(v.any()), + namespace: v.optional(v.any()), + }, returns: v.number(), handler: offsetUntilHandler, }); async function deleteFromNode( ctx: { db: DatabaseWriter }, + namespace: Namespace, node: Id<"btreeNode">, key: Key ): Promise { @@ -491,7 +508,7 @@ async function deleteFromNode( message: `key ${p(key)} not found in node ${n._id}`, }); } - const deleted = await deleteFromNode(ctx, n.subtrees[i], key); + const deleted = await deleteFromNode(ctx, namespace, n.subtrees[i], key); if (!deleted) { return null; } @@ -507,7 +524,7 @@ async function deleteFromNode( // Now we need to check if the subtree at index i is too small const deficientSubtree = (await ctx.db.get(n.subtrees[i]))!; - const minNodeSize = await MIN_NODE_SIZE(ctx); + const minNodeSize = await MIN_NODE_SIZE(ctx, namespace); if (deficientSubtree.items.length < minNodeSize) { log(`deficient subtree ${deficientSubtree._id}`); // If the subtree is too small, we need to rebalance @@ -764,6 +781,7 @@ type PushUp = { async function insertIntoNode( ctx: { db: DatabaseWriter }, + namespace: Namespace, node: Id<"btreeNode">, item: Item ): Promise { @@ -782,7 +800,7 @@ async function insertIntoNode( // insert key before index i if (n.subtrees.length > 0) { // insert into subtree - const pushUp = await insertIntoNode(ctx, n.subtrees[i], item); + const pushUp = await insertIntoNode(ctx, namespace, n.subtrees[i], item); if (pushUp) { await ctx.db.patch(node, { items: [...n.items.slice(0, i), pushUp.item, ...n.items.slice(i)], @@ -807,8 +825,8 @@ async function insertIntoNode( } const newN = (await ctx.db.get(node))!; - const maxNodeSize = await MAX_NODE_SIZE(ctx); - const minNodeSize = await MIN_NODE_SIZE(ctx); + const maxNodeSize = await MAX_NODE_SIZE(ctx, namespace); + const minNodeSize = await MIN_NODE_SIZE(ctx, namespace); if (newN.items.length > maxNodeSize) { if ( newN.items.length !== maxNodeSize + 1 || @@ -879,12 +897,15 @@ function compareKeys(k1: Key, k2: Key) { return compareValues(k1, k2); } -export async function getTree(db: DatabaseReader) { - return await db.query("btree").unique(); +export async function getTree(db: DatabaseReader, namespace: Namespace) { + return await db + .query("btree") + .withIndex("by_namespace", (q) => q.eq("namespace", namespace)) + .unique(); } -export async function mustGetTree(db: DatabaseReader) { - const tree = await getTree(db); +export async function mustGetTree(db: DatabaseReader, namespace: Namespace) { + const tree = await getTree(db, namespace); if (!tree) { throw new Error("btree not initialized"); } @@ -893,10 +914,11 @@ export async function mustGetTree(db: DatabaseReader) { export async function getOrCreateTree( db: DatabaseWriter, - maxNodeSize: number, - rootLazy: boolean + namespace: Namespace, + maxNodeSize?: number, + rootLazy?: boolean ): Promise> { - const originalTree = await getTree(db); + const originalTree = await getTree(db, namespace); if (originalTree) { return originalTree; } @@ -908,19 +930,37 @@ export async function getOrCreateTree( sum: 0, }, }); + const effectiveMaxNodeSize = + maxNodeSize ?? + (await MAX_NODE_SIZE({ db }, undefined)) ?? + DEFAULT_MAX_NODE_SIZE; + const effectiveRootLazy = + rootLazy ?? (await isRootLazy(db, undefined)) ?? true; const id = await db.insert("btree", { root, - maxNodeSize, + maxNodeSize: effectiveMaxNodeSize, + namespace, }); const newTree = await db.get(id); // Check the maxNodeSize is valid. - await MIN_NODE_SIZE({ db }); - if (rootLazy) { + await MIN_NODE_SIZE({ db }, namespace); + if (effectiveRootLazy) { await db.patch(root, { aggregate: undefined }); } return newTree!; } +async function isRootLazy( + db: DatabaseReader, + namespace: Namespace +): Promise { + const tree = await getTree(db, namespace); + if (!tree) { + return true; + } + return (await db.get(tree.root))?.aggregate === undefined; +} + export const deleteTreeNodes = internalMutation({ args: { node: v.id("btreeNode") }, returns: v.null(), @@ -942,6 +982,7 @@ export const paginate = query({ cursor: v.optional(v.string()), k1: v.optional(v.any()), k2: v.optional(v.any()), + namespace: v.optional(v.any()), }, returns: v.object({ page: v.array(itemValidator), @@ -959,9 +1000,10 @@ export async function paginateHandler( cursor?: string; k1?: Key; k2?: Key; + namespace?: Namespace; } ) { - const tree = await getTree(ctx.db); + const tree = await getTree(ctx.db, args.namespace); if (tree === null) { return { page: [], cursor: "", isDone: true }; } @@ -1051,3 +1093,44 @@ export async function paginateInNode( isDone: true, }; } + +export const paginateNamespaces = query({ + args: { + limit: v.number(), + cursor: v.optional(v.string()), + }, + returns: v.object({ + page: v.array(v.any()), + cursor: v.string(), + isDone: v.boolean(), + }), + handler: paginateNamespacesHandler, +}); + +export async function paginateNamespacesHandler( + ctx: { db: DatabaseReader }, + args: { limit: number; cursor?: string } +) { + if (args.cursor === "endcursor") { + return { + page: [], + cursor: "endcursor", + isDone: true, + }; + } + let trees = []; + if (args.cursor === undefined) { + trees = await ctx.db.query("btree").withIndex("by_id").take(args.limit); + } else { + trees = await ctx.db + .query("btree") + .withIndex("by_id", (q) => q.gt("_id", args.cursor as Id<"btree">)) + .take(args.limit); + } + const isDone = trees.length < args.limit; + return { + page: trees.map((t) => t.namespace), + cursor: isDone ? "endcursor" : trees[trees.length - 1]._id, + isDone, + }; +} diff --git a/src/component/inspect.ts b/src/component/inspect.ts index 289b418..b320158 100644 --- a/src/component/inspect.ts +++ b/src/component/inspect.ts @@ -1,12 +1,12 @@ import { v } from "convex/values"; import { Id } from "./_generated/dataModel.js"; import { DatabaseReader, query } from "./_generated/server.js"; -import { getTree, p } from "./btree.js"; +import { getTree, Namespace, p } from "./btree.js"; export const display = query({ - args: {}, - handler: async (ctx) => { - const tree = await getTree(ctx.db); + args: { namespace: v.optional(v.any()) }, + handler: async (ctx, args) => { + const tree = await getTree(ctx.db, args.namespace); if (!tree) { return "empty"; } @@ -32,15 +32,15 @@ async function displayNode( } export const dump = query({ - args: {}, + args: { namespace: v.optional(v.any()) }, returns: v.string(), - handler: async (ctx) => { - return await dumpTree(ctx.db); + handler: async (ctx, args) => { + return await dumpTree(ctx.db, args.namespace); }, }); -export async function dumpTree(db: DatabaseReader) { - const t = (await getTree(db))!; +export async function dumpTree(db: DatabaseReader, namespace: Namespace) { + const t = (await getTree(db, namespace))!; return dumpNode(db, t.root); } @@ -70,10 +70,10 @@ async function dumpNode( } export const inspectNode = query({ - args: { node: v.optional(v.string()) }, + args: { node: v.optional(v.string()), namespace: v.optional(v.any()) }, returns: v.null(), handler: async (ctx, args) => { - const tree = await getTree(ctx.db); + const tree = await getTree(ctx.db, args.namespace); if (!tree) { console.log("no tree"); return; diff --git a/src/component/public.ts b/src/component/public.ts index f8e9080..ccf27af 100644 --- a/src/component/public.ts +++ b/src/component/public.ts @@ -13,15 +13,17 @@ export const init = mutation({ args: { maxNodeSize: v.optional(v.number()), rootLazy: v.optional(v.boolean()), + namespace: v.optional(v.any()), }, returns: v.null(), - handler: async (ctx, { maxNodeSize, rootLazy }) => { - const existing = await getTree(ctx.db); + handler: async (ctx, { maxNodeSize, rootLazy, namespace }) => { + const existing = await getTree(ctx.db, namespace); if (existing) { throw new Error("tree already initialized"); } await getOrCreateTree( ctx.db, + namespace, maxNodeSize ?? DEFAULT_MAX_NODE_SIZE, rootLazy ?? true ); @@ -35,23 +37,33 @@ export const init = mutation({ * Lazy roots are the default; use `clear` to revert to eager roots. */ export const makeRootLazy = mutation({ - args: {}, + args: { namespace: v.optional(v.any()) }, returns: v.null(), - handler: async (ctx) => { - const tree = await getOrCreateTree(ctx.db, DEFAULT_MAX_NODE_SIZE, true); + handler: async (ctx, args) => { + const tree = await getOrCreateTree( + ctx.db, + args.namespace, + DEFAULT_MAX_NODE_SIZE, + true + ); await ctx.db.patch(tree.root, { aggregate: undefined }); }, }); export const insert = mutation({ - args: { key: v.any(), value: v.any(), summand: v.optional(v.number()) }, + args: { + key: v.any(), + value: v.any(), + summand: v.optional(v.number()), + namespace: v.optional(v.any()), + }, returns: v.null(), handler: insertHandler, }); // delete is a keyword, hence the underscore. export const delete_ = mutation({ - args: { key: v.any() }, + args: { key: v.any(), namespace: v.optional(v.any()) }, returns: v.null(), handler: deleteHandler, }); @@ -62,23 +74,29 @@ export const replace = mutation({ newKey: v.any(), value: v.any(), summand: v.optional(v.number()), + namespace: v.optional(v.any()), + newNamespace: v.optional(v.any()), }, returns: v.null(), handler: async (ctx, args) => { - await deleteHandler(ctx, { key: args.currentKey }); + await deleteHandler(ctx, { + key: args.currentKey, + namespace: args.namespace, + }); await insertHandler(ctx, { key: args.newKey, value: args.value, summand: args.summand, + namespace: args.newNamespace, }); }, }); export const deleteIfExists = mutation({ - args: { key: v.any() }, - handler: async (ctx, { key }) => { + args: { key: v.any(), namespace: v.optional(v.any()) }, + handler: async (ctx, { key, namespace }) => { try { - await deleteHandler(ctx, { key }); + await deleteHandler(ctx, { key, namespace }); } catch (e) { if (e instanceof ConvexError && e.data?.code === "DELETE_MISSING_KEY") { return; @@ -94,10 +112,15 @@ export const replaceOrInsert = mutation({ newKey: v.any(), value: v.any(), summand: v.optional(v.number()), + namespace: v.optional(v.any()), + newNamespace: v.optional(v.any()), }, handler: async (ctx, args) => { try { - await deleteHandler(ctx, { key: args.currentKey }); + await deleteHandler(ctx, { + key: args.currentKey, + namespace: args.namespace, + }); } catch (e) { if ( !(e instanceof ConvexError && e.data?.code === "DELETE_MISSING_KEY") @@ -109,6 +132,7 @@ export const replaceOrInsert = mutation({ key: args.newKey, value: args.value, summand: args.summand, + namespace: args.newNamespace, }); }, }); @@ -121,12 +145,13 @@ export const replaceOrInsert = mutation({ */ export const clear = mutation({ args: { + namespace: v.optional(v.any()), maxNodeSize: v.optional(v.number()), rootLazy: v.optional(v.boolean()), }, returns: v.null(), - handler: async (ctx, { maxNodeSize, rootLazy }) => { - const tree = await getTree(ctx.db); + handler: async (ctx, { maxNodeSize, rootLazy, namespace }) => { + const tree = await getTree(ctx.db, namespace); let existingRootLazy = true; let existingMaxNodeSize = DEFAULT_MAX_NODE_SIZE; if (tree) { @@ -140,6 +165,7 @@ export const clear = mutation({ } await getOrCreateTree( ctx.db, + namespace, maxNodeSize ?? existingMaxNodeSize, rootLazy ?? existingRootLazy ); diff --git a/src/component/schema.ts b/src/component/schema.ts index 5a3517c..887549a 100644 --- a/src/component/schema.ts +++ b/src/component/schema.ts @@ -30,11 +30,12 @@ export const aggregate = v.object({ export type Aggregate = Infer; export default defineSchema({ - // Singleton. + // One per namespace btree: defineTable({ root: v.id("btreeNode"), + namespace: v.optional(v.any()), maxNodeSize: v.number(), - }), + }).index("by_namespace", ["namespace"]), btreeNode: defineTable({ items: v.array(item), subtrees: v.array(v.id("btreeNode")), diff --git a/tsconfig.json b/tsconfig.json index e110334..91ec09c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -18,5 +18,5 @@ "outDir": "./dist", "skipLibCheck": true }, - "include": ["./src/**/*"], + "include": ["./src/**/*"] }