diff --git a/dev/src/aggregate.ts b/dev/src/aggregate.ts new file mode 100644 index 000000000..982ac6832 --- /dev/null +++ b/dev/src/aggregate.ts @@ -0,0 +1,166 @@ +/** + * @license + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as firestore from '@google-cloud/firestore'; + +import {FieldPath} from './path'; +import {google} from '../protos/firestore_v1_proto_api'; + +import IAggregation = google.firestore.v1.StructuredAggregationQuery.IAggregation; +import * as assert from 'assert'; + +/** + * Concrete implementation of the Aggregate type. + */ +export class Aggregate { + constructor( + readonly alias: string, + readonly aggregateType: AggregateType, + readonly fieldPath?: string | FieldPath + ) {} + + /** + * Converts this object to the proto representation of an Aggregate. + * @internal + */ + toProto(): IAggregation { + const proto: IAggregation = {}; + if (this.aggregateType === 'count') { + proto.count = {}; + } else if (this.aggregateType === 'sum') { + assert( + this.fieldPath !== undefined, + 'Missing field path for sum aggregation.' + ); + proto.sum = { + field: { + fieldPath: FieldPath.fromArgument(this.fieldPath!).formattedName, + }, + }; + } else if (this.aggregateType === 'avg') { + assert( + this.fieldPath !== undefined, + 'Missing field path for average aggregation.' + ); + proto.avg = { + field: { + fieldPath: FieldPath.fromArgument(this.fieldPath!).formattedName, + }, + }; + } else { + throw new Error(`Aggregate type ${this.aggregateType} unimplemented.`); + } + proto.alias = this.alias; + return proto; + } +} + +/** + * Represents an aggregation that can be performed by Firestore. + */ +export class AggregateField implements firestore.AggregateField { + /** A type string to uniquely identify instances of this class. */ + readonly type = 'AggregateField'; + + /** + * The field on which the aggregation is performed. + * @internal + **/ + public readonly _field?: string | FieldPath; + + /** + * Create a new AggregateField + * @param aggregateType Specifies the type of aggregation operation to perform. + * @param field Optionally specifies the field that is aggregated. + * @internal + */ + private constructor( + public readonly aggregateType: AggregateType, + field?: string | FieldPath + ) { + this._field = field; + } + + /** + * Compares this object with the given object for equality. + * + * This object is considered "equal" to the other object if and only if + * `other` performs the same kind of aggregation on the same field (if any). + * + * @param other The object to compare to this object for equality. + * @return `true` if this object is "equal" to the given object, as + * defined above, or `false` otherwise. + */ + isEqual(other: AggregateField): boolean { + return ( + other instanceof AggregateField && + this.aggregateType === other.aggregateType && + ((this._field === undefined && other._field === undefined) || + (this._field !== undefined && + other._field !== undefined && + FieldPath.fromArgument(this._field).isEqual( + FieldPath.fromArgument(other._field) + ))) + ); + } + + /** + * Create an AggregateField object that can be used to compute the count of + * documents in the result set of a query. + */ + static count(): AggregateField { + return new AggregateField('count'); + } + + /** + * Create an AggregateField object that can be used to compute the average of + * a specified field over a range of documents in the result set of a query. + * @param field Specifies the field to average across the result set. + */ + static average(field: string | FieldPath): AggregateField { + return new AggregateField('avg', field); + } + + /** + * Create an AggregateField object that can be used to compute the sum of + * a specified field over a range of documents in the result set of a query. + * @param field Specifies the field to sum across the result set. + */ + static sum(field: string | FieldPath): AggregateField { + return new AggregateField('sum', field); + } +} + +/** + * A type whose property values are all `AggregateField` objects. + */ +export interface AggregateSpec { + [field: string]: AggregateFieldType; +} + +/** + * The union of all `AggregateField` types that are supported by Firestore. + */ +export type AggregateFieldType = + | ReturnType + | ReturnType + | ReturnType; + +/** + * Union type representing the aggregate type to be performed. + */ +export type AggregateType = 'count' | 'avg' | 'sum'; diff --git a/dev/src/index.ts b/dev/src/index.ts index b8d0c3a00..1a95d689b 100644 --- a/dev/src/index.ts +++ b/dev/src/index.ts @@ -105,6 +105,7 @@ export {GeoPoint} from './geo-point'; export {CollectionGroup}; export {QueryPartition} from './query-partition'; export {setLogFunction} from './logger'; +export {AggregateField, Aggregate} from './aggregate'; const libVersion = require('../../package.json').version; setLibVersion(libVersion); diff --git a/dev/src/reference.ts b/dev/src/reference.ts index 89f6eace4..79626b479 100644 --- a/dev/src/reference.ts +++ b/dev/src/reference.ts @@ -15,6 +15,7 @@ */ import * as firestore from '@google-cloud/firestore'; +import * as assert from 'assert'; import {Duplex, Readable, Transform} from 'stream'; import * as deepEqual from 'fast-deep-equal'; import {GoogleError} from 'google-gax'; @@ -44,6 +45,7 @@ import { autoId, Deferred, isPermanentRpcError, + mapToArray, requestTag, wrapError, } from './util'; @@ -58,6 +60,7 @@ import {DocumentWatch, QueryWatch} from './watch'; import {validateDocumentData, WriteBatch, WriteResult} from './write-batch'; import api = protos.google.firestore.v1; import {CompositeFilter, Filter, UnaryFilter} from './filter'; +import {AggregateField, Aggregate, AggregateSpec} from './aggregate'; /** * The direction of a `Query.orderBy()` clause is specified as 'desc' or 'asc' @@ -1848,7 +1851,47 @@ export class Query< AppModelType, DbModelType > { - return new AggregateQuery(this, {count: {}}); + return this.aggregate({ + count: AggregateField.count(), + }); + } + + /** + * Returns a query that can perform the given aggregations. + * + * The returned query, when executed, calculates the specified aggregations + * over the documents in the result set of this query, without actually + * downloading the documents. + * + * Using the returned query to perform aggregations is efficient because only + * the final aggregation values, not the documents' data, is downloaded. The + * returned query can even perform aggregations of the documents if the result set + * would be prohibitively large to download entirely (e.g. thousands of documents). + * + * @param aggregateSpec An `AggregateSpec` object that specifies the aggregates + * to perform over the result set. The AggregateSpec specifies aliases for each + * aggregate, which can be used to retrieve the aggregate result. + * @example + * ```typescript + * const aggregateQuery = col.aggregate(query, { + * countOfDocs: count(), + * totalHours: sum('hours'), + * averageScore: average('score') + * }); + * + * const aggregateSnapshot = await aggregateQuery.get(); + * const countOfDocs: number = aggregateSnapshot.data().countOfDocs; + * const totalHours: number = aggregateSnapshot.data().totalHours; + * const averageScore: number | null = aggregateSnapshot.data().averageScore; + * ``` + */ + aggregate( + aggregateSpec: T + ): AggregateQuery { + return new AggregateQuery( + this, + aggregateSpec + ); } /** @@ -3163,12 +3206,15 @@ export class CollectionReference< * A query that calculates aggregations over an underlying query. */ export class AggregateQuery< - AggregateSpecType extends firestore.AggregateSpec, + AggregateSpecType extends AggregateSpec, AppModelType = firestore.DocumentData, DbModelType extends firestore.DocumentData = firestore.DocumentData, > implements firestore.AggregateQuery { + private readonly clientAliasToServerAliasMap: Record = {}; + private readonly serverAliasToClientAliasMap: Record = {}; + /** * @private * @internal @@ -3181,7 +3227,19 @@ export class AggregateQuery< // eslint-disable-next-line @typescript-eslint/no-explicit-any private readonly _query: Query, private readonly _aggregates: AggregateSpecType - ) {} + ) { + // Client-side aliases may be too long and exceed the 1500-byte string size limit. + // Such long strings do not need to be transferred over the wire either. + // The client maps the user's alias to a short form alias and send that to the server. + let aggregationNum = 0; + for (const clientAlias in this._aggregates) { + if (Object.prototype.hasOwnProperty.call(this._aggregates, clientAlias)) { + const serverAlias = `aggregate_${aggregationNum++}`; + this.clientAliasToServerAliasMap[clientAlias] = serverAlias; + this.serverAliasToClientAliasMap[serverAlias] = clientAlias; + } + } + } /** The query whose aggregations will be calculated by this object. */ get query(): Query { @@ -3323,12 +3381,17 @@ export class AggregateQuery< if (fields) { const serializer = this._query.firestore._serializer!; for (const prop of Object.keys(fields)) { - if (this._aggregates[prop] === undefined) { + const alias = this.serverAliasToClientAliasMap[prop]; + assert( + alias !== null && alias !== undefined, + `'${prop}' not present in server-client alias mapping.` + ); + if (this._aggregates[alias] === undefined) { throw new Error( `Unexpected alias [${prop}] in result aggregate result` ); } - data[prop] = serializer.decodeValue(fields[prop]); + data[alias] = serializer.decodeValue(fields[prop]); } } return data; @@ -3344,18 +3407,22 @@ export class AggregateQuery< */ toProto(transactionId?: Uint8Array): api.IRunAggregationQueryRequest { const queryProto = this._query.toProto(); - //TODO(tomandersen) inspect _query to build request - this is just hard - // coded count right now. const runQueryRequest: api.IRunAggregationQueryRequest = { parent: queryProto.parent, structuredAggregationQuery: { structuredQuery: queryProto.structuredQuery, - aggregations: [ - { - alias: 'count', - count: {}, - }, - ], + aggregations: mapToArray(this._aggregates, (aggregate, clientAlias) => { + const serverAlias = this.clientAliasToServerAliasMap[clientAlias]; + assert( + serverAlias !== null && serverAlias !== undefined, + `'${clientAlias}' not present in client-server alias mapping.` + ); + return new Aggregate( + serverAlias, + aggregate.aggregateType, + aggregate._field + ).toProto(); + }), }, }; diff --git a/dev/src/util.ts b/dev/src/util.ts index cf91f4ee0..c68695f82 100644 --- a/dev/src/util.ts +++ b/dev/src/util.ts @@ -20,6 +20,7 @@ import {randomBytes} from 'crypto'; import type {CallSettings, ClientConfig, GoogleError} from 'google-gax'; import type {BackoffSettings} from 'google-gax/build/src/gax'; import * as gapicConfig from './v1/firestore_client_config.json'; +import Dict = NodeJS.Dict; /** * A Promise implementation that supports deferred resolution. @@ -246,3 +247,23 @@ export function tryGetPreferRestEnvironmentVariable(): boolean | undefined { return undefined; } } + +/** + * Returns an array of values that are calculated by performing the given `fn` + * on all keys in the given `obj` dictionary. + * + * @private + * @internal + */ +export function mapToArray( + obj: Dict, + fn: (element: V, key: string, obj: Dict) => R +): R[] { + const result: R[] = []; + for (const key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + result.push(fn(obj[key]!, key, obj)); + } + } + return result; +} diff --git a/dev/system-test/firestore.ts b/dev/system-test/firestore.ts index 21d496d0c..57161a433 100644 --- a/dev/system-test/firestore.ts +++ b/dev/system-test/firestore.ts @@ -28,6 +28,7 @@ import * as extend from 'extend'; import {firestore} from '../protos/firestore_v1_proto_api'; import { + AggregateField, CollectionReference, DocumentReference, DocumentSnapshot, @@ -2939,7 +2940,7 @@ describe('Query class', () => { }); }); -describe('Aggregates', () => { +describe('count queries', () => { let firestore: Firestore; let randomCol: CollectionReference; @@ -3069,6 +3070,1820 @@ describe('Aggregates', () => { } }); +describe('count queries using aggregate api', () => { + let firestore: Firestore; + let randomCol: CollectionReference; + + beforeEach(() => { + randomCol = getTestRoot(); + firestore = randomCol.firestore; + }); + + afterEach(() => verifyInstance(firestore)); + + describe('Run outside Transaction', () => { + countTests(async (q, n) => { + const res = await q.get(); + expect(res.data().count).to.equal(n); + }); + }); + + describe('Run within Transaction', () => { + countTests(async (q, n) => { + const res = await firestore.runTransaction(f => f.get(q)); + expect(res.data().count).to.equal(n); + }); + }); + + function countTests( + runQueryAndExpectCount: ( + query: FirebaseFirestore.AggregateQuery<{ + count: FirebaseFirestore.AggregateField; + }>, + expectedCount: number + ) => Promise + ) { + it('counts 0 document from non-existent collection', async () => { + const count = randomCol.aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count, 0); + }); + + it('counts 0 document from filtered empty collection', async () => { + await randomCol.doc('doc').set({foo: 'bar'}); + const count = randomCol + .where('foo', '==', 'notbar') + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count, 0); + }); + + it('counts 1 document', async () => { + await randomCol.doc('doc').set({foo: 'bar'}); + const count = randomCol.aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count, 1); + }); + + it('counts multiple documents with filter', async () => { + await randomCol.doc('doc1').set({foo: 'bar'}); + await randomCol.doc('doc2').set({foo: 'bar'}); + await randomCol.doc('doc3').set({foo: 'notbar'}); + await randomCol.doc('doc3').set({notfoo: 'bar'}); + const count = randomCol + .where('foo', '==', 'bar') + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count, 2); + }); + + it('counts up to limit', async () => { + await randomCol.doc('doc1').set({foo: 'bar'}); + await randomCol.doc('doc2').set({foo: 'bar'}); + await randomCol.doc('doc3').set({foo: 'bar'}); + await randomCol.doc('doc4').set({foo: 'bar'}); + await randomCol.doc('doc5').set({foo: 'bar'}); + await randomCol.doc('doc6').set({foo: 'bar'}); + await randomCol.doc('doc7').set({foo: 'bar'}); + await randomCol.doc('doc8').set({foo: 'bar'}); + const count = randomCol + .limit(5) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count, 5); + }); + + it('counts with orderBy', async () => { + await randomCol.doc('doc1').set({foo1: 'bar1'}); + await randomCol.doc('doc2').set({foo1: 'bar2'}); + await randomCol.doc('doc3').set({foo1: 'bar3'}); + await randomCol.doc('doc4').set({foo1: 'bar4'}); + await randomCol.doc('doc5').set({foo1: 'bar5'}); + await randomCol.doc('doc6').set({foo2: 'bar6'}); + await randomCol.doc('doc7').set({foo2: 'bar7'}); + await randomCol.doc('doc8').set({foo2: 'bar8'}); + + const count1 = randomCol + .orderBy('foo2') + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count1, 3); + + const count2 = randomCol + .orderBy('foo3') + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count2, 0); + }); + + it('counts with startAt, endAt and offset with DocumentReference cursor', async () => { + await randomCol.doc('doc1').set({foo: 'bar'}); + await randomCol.doc('doc2').set({foo: 'bar'}); + await randomCol.doc('doc3').set({foo: 'bar'}); + await randomCol.doc('doc4').set({foo: 'bar'}); + await randomCol.doc('doc5').set({foo: 'bar'}); + await randomCol.doc('doc6').set({foo: 'bar'}); + await randomCol.doc('doc7').set({foo: 'bar'}); + + const count1 = randomCol + .orderBy(FieldPath.documentId()) + .startAfter(randomCol.doc('doc3')) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count1, 4); + + const count2 = randomCol + .orderBy(FieldPath.documentId()) + .startAt(randomCol.doc('doc3')) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count2, 5); + + const count3 = randomCol + .orderBy(FieldPath.documentId()) + .endAt(randomCol.doc('doc3')) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count3, 3); + + const count4 = randomCol + .orderBy(FieldPath.documentId()) + .endBefore(randomCol.doc('doc3')) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count4, 2); + + const count5 = randomCol + .offset(6) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count5, 1); + }); + + it('counts with startAt, endAt and offset with DocumentSnapshot cursor', async () => { + await randomCol.doc('doc1').set({foo: 'bar'}); + await randomCol.doc('doc2').set({foo: 'bar'}); + await randomCol.doc('doc3').set({foo: 'bar'}); + await randomCol.doc('doc4').set({foo: 'bar'}); + await randomCol.doc('doc5').set({foo: 'bar'}); + await randomCol.doc('doc6').set({foo: 'bar'}); + await randomCol.doc('doc7').set({foo: 'bar'}); + const docSnap = await randomCol.doc('doc3').get(); + + const count1 = randomCol + .startAfter(docSnap) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count1, 4); + + const count2 = randomCol + .startAt(docSnap) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count2, 5); + + const count3 = randomCol + .endAt(docSnap) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count3, 3); + + const count4 = randomCol + .endBefore(docSnap) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count4, 2); + + const count5 = randomCol + .offset(6) + .aggregate({count: AggregateField.count()}); + await runQueryAndExpectCount(count5, 1); + }); + } +}); + +describe('Aggregation queries', () => { + let firestore: Firestore; + let col: CollectionReference; + + beforeEach(() => { + col = getTestRoot(); + firestore = col.firestore; + }); + + afterEach(() => verifyInstance(firestore)); + + async function addTestDocs(docs: { + [key: string]: DocumentData; + }): Promise[]> { + const sets: Array> = []; + Object.keys(docs).forEach(key => { + sets.push(col.doc(key).set(docs[key])); + }); + return Promise.all(sets); + } + + it('can run count query using aggregate api', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + count: AggregateField.count(), + }) + .get(); + expect(snapshot.data().count).to.equal(2); + }); + + it('can alias aggregations using aggregate api', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + foo: AggregateField.count(), + 'with.dots': AggregateField.count(), + }) + .get(); + expect(snapshot.data().foo).to.equal(2); + expect(snapshot.data()['with.dots']).to.equal(2); + }); + + it('allows special chars in aliases when using aggregate api', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + 'with-un/su+pp[or]ted': AggregateField.count(), + }) + .get(); + + expect(snapshot.data()['with-un/su+pp[or]ted']).to.equal(2); + }); + + it('allows backticks in aliases when using aggregate api', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + '`with-un/su+pp[or]ted`': AggregateField.count(), + }) + .get(); + + expect(snapshot.data()['`with-un/su+pp[or]ted`']).to.equal(2); + }); + + it('allows backslash in aliases when using aggregate api', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + 'with\\backshash\\es': AggregateField.count(), + }) + .get(); + + expect(snapshot.data()['with\\backshash\\es']).to.equal(2); + }); + + it('can get duplicate aggregations using aggregate api', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + count: AggregateField.count(), + foo: AggregateField.count(), + }) + .get(); + expect(snapshot.data().foo).to.equal(2); + expect(snapshot.data().count).to.equal(2); + }); + + it("aggregate() doesn't use converter", async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA'}, + b: {author: 'authorB', title: 'titleB'}, + }; + const throwingConverter = { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + toFirestore(obj: never): DocumentData { + throw new Error('should never be called'); + }, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + fromFirestore(snapshot: QueryDocumentSnapshot): never { + throw new Error('should never be called'); + }, + }; + await addTestDocs(testDocs); + const query = col + .where('author', '==', 'authorA') + .withConverter(throwingConverter); + const snapshot = await query + .aggregate({ + count: AggregateField.count(), + }) + .get(); + expect(snapshot.data().count).to.equal(1); + }); + + it('aggregate query supports collection groups', async () => { + const collectionGroupId = autoId(); + const docPaths = [ + `${collectionGroupId}/cg-doc1`, + `abc/123/${collectionGroupId}/cg-doc2`, + `zzz${collectionGroupId}/cg-doc3`, + `abc/123/zzz${collectionGroupId}/cg-doc4`, + `abc/123/zzz/${collectionGroupId}`, + ]; + const batch = firestore.batch(); + for (const docPath of docPaths) { + batch.set(firestore.doc(docPath), {x: 1}); + } + await batch.commit(); + const snapshot = await firestore + .collectionGroup(collectionGroupId) + .aggregate({ + count: AggregateField.count(), + }) + .get(); + expect(snapshot.data().count).to.equal(2); + }); + + it('aggregate() fails if firestore is terminated', async () => { + await firestore.terminate(); + await expect( + col.aggregate({count: AggregateField.count()}).get() + ).to.eventually.be.rejectedWith('The client has already been terminated'); + }); + + it("terminate doesn't crash when there is aggregate query in flight", async () => { + col.aggregate({count: AggregateField.count()}).get(); + await firestore.terminate(); + }); + + // Only verify the error message for missing indexes when running against + // production, since the Firestore Emulator does not require index creation + // and will, therefore, never fail in this situation. + // eslint-disable-next-line no-restricted-properties + (process.env.FIRESTORE_EMULATOR_HOST === undefined ? it.skip : it)( + 'aggregate() error message is good if missing index', + async () => { + const query = col.where('key1', '==', 42).where('key2', '<', 42); + await expect( + query.aggregate({count: AggregateField.count()}).get() + ).to.be.eventually.rejectedWith( + /index.*https:\/\/console\.firebase\.google\.com/ + ); + } + ); + + describe('Aggregation queries - sum / average using aggregate() api', () => { + it('can run sum query', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100}, + b: {author: 'authorB', title: 'titleB', pages: 50}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({totalPages: AggregateField.sum('pages')}) + .get(); + expect(snapshot.data().totalPages).to.equal(150); + }); + + it('can run average query', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100}, + b: {author: 'authorB', title: 'titleB', pages: 50}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({averagePages: AggregateField.average('pages')}) + .get(); + expect(snapshot.data().averagePages).to.equal(75); + }); + + it('can get multiple aggregations', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100}, + b: {author: 'authorB', title: 'titleB', pages: 50}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalPages: AggregateField.sum('pages'), + averagePages: AggregateField.average('pages'), + count: AggregateField.count(), + }) + .get(); + expect(snapshot.data().totalPages).to.equal(150); + expect(snapshot.data().averagePages).to.equal(75); + expect(snapshot.data().count).to.equal(2); + }); + + it('can get duplicate aggregations', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100}, + b: {author: 'authorB', title: 'titleB', pages: 50}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalPages: AggregateField.sum('pages'), + averagePages: AggregateField.average('pages'), + totalPagesX: AggregateField.sum('pages'), + averagePagesY: AggregateField.average('pages'), + }) + .get(); + expect(snapshot.data().totalPages).to.equal(150); + expect(snapshot.data().averagePages).to.equal(75); + expect(snapshot.data().totalPagesX).to.equal(150); + expect(snapshot.data().averagePagesY).to.equal(75); + }); + + it('can perform max (5) aggregations', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100}, + b: {author: 'authorB', title: 'titleB', pages: 50}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalPages: AggregateField.sum('pages'), + averagePages: AggregateField.average('pages'), + count: AggregateField.count(), + totalPagesX: AggregateField.sum('pages'), + averagePagesY: AggregateField.average('pages'), + }) + .get(); + expect(snapshot.data().totalPages).to.equal(150); + expect(snapshot.data().averagePages).to.equal(75); + expect(snapshot.data().count).to.equal(2); + expect(snapshot.data().totalPagesX).to.equal(150); + expect(snapshot.data().averagePagesY).to.equal(75); + }); + + it('fails when exceeding the max (5) aggregations', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100}, + b: {author: 'authorB', title: 'titleB', pages: 50}, + }; + await addTestDocs(testDocs); + const aggregateQuery = await col.aggregate({ + totalPages: AggregateField.sum('pages'), + averagePages: AggregateField.average('pages'), + count: AggregateField.count(), + totalPagesX: AggregateField.sum('pages'), + averagePagesY: AggregateField.average('pages'), + countZ: AggregateField.count(), + }); + await expect(aggregateQuery.get()).to.eventually.be.rejectedWith( + /maximum number of aggregations/ + ); + }); + + it('returns undefined when getting the result of an unrequested aggregation', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: 3, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + averageRating: AggregateField.average('rating'), + }) + .get(); + // @ts-expect-error expected error as 'totalPages' is not in the AggregateSpec. + const totalPages = snapshot.data().totalPages; + expect(totalPages).to.equal(undefined); + }); + + it('performs sum that results in float', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4.5, + }, + c: { + author: 'authorB', + title: 'titleB', + pages: 150, + year: 2021, + rating: 3, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(12.5); + }); + + it('performs sum of ints and floats that results in an int', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4.5, + }, + c: { + author: 'authorB', + title: 'titleB', + pages: 150, + year: 2021, + rating: 3.5, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(13); + }); + + it('performs sum that overflows max int', async () => { + // A large value that will be represented as a Long on the server, but + // doubling (2x) this value must overflow Long and force the result to be + // represented as a Double type on the server. + const maxLong = Math.pow(2, 63) - 1; + + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: maxLong, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: maxLong, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(maxLong + maxLong); + }); + + it('performs sum that can overflow integer values during accumulation', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MAX_SAFE_INTEGER, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 1, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 50, + year: 2020, + rating: -101, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal( + Number.MAX_SAFE_INTEGER - 100 + ); + }); + + it('performs sum that is negative', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MAX_SAFE_INTEGER, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: Number.MIN_SAFE_INTEGER, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 50, + year: 2020, + rating: -101, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: -10000, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(-10101); + }); + + it('performs sum that is positive infinity', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MAX_VALUE, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: Number.MAX_VALUE, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(Number.POSITIVE_INFINITY); + }); + + it('performs sum that is positive infinity v2', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MAX_VALUE, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 1e293, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(Number.POSITIVE_INFINITY); + }); + + it('performs sum that is negative infinity', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: -Number.MAX_VALUE, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: -Number.MAX_VALUE, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(Number.NEGATIVE_INFINITY); + }); + + it('performs sum that is valid but could overflow during aggregation', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MAX_VALUE, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: Number.MAX_VALUE, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: -Number.MAX_VALUE, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: -Number.MAX_VALUE, + }, + e: { + author: 'authorE', + title: 'titleE', + pages: 100, + year: 1980, + rating: Number.MAX_VALUE, + }, + f: { + author: 'authorF', + title: 'titleF', + pages: 50, + year: 2020, + rating: -Number.MAX_VALUE, + }, + g: { + author: 'authorG', + title: 'titleG', + pages: 100, + year: 1980, + rating: -Number.MAX_VALUE, + }, + h: { + author: 'authorH', + title: 'titleDH', + pages: 50, + year: 2020, + rating: Number.MAX_VALUE, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.oneOf([ + 0, + Number.NEGATIVE_INFINITY, + Number.POSITIVE_INFINITY, + ]); + }); + + it('performs sum that includes NaN', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: Number.NaN, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.be.NaN; + }); + + it('performs sum over a result set of zero documents', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 4, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: 3, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .where('rating', '>', 4) + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(0); + }); + + it('performs sum only on numeric fields', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: '3', + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 1, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + countOfDocs: AggregateField.count(), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(10); + expect(snapshot.data().countOfDocs).to.equal(4); + }); + + it('performs sum of min IEEE754', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MIN_VALUE, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(Number.MIN_VALUE); + }); + + it('performs average of ints that results in an int', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 10, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 5, + }, + c: { + author: 'authorB', + title: 'titleB', + pages: 150, + year: 2021, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(5); + }); + + it('performs average of floats that results in an int', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 10.5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 9.5, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(10); + }); + + it('performs average of floats and ints that results in an int', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 10, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 9.5, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 150, + year: 2021, + rating: 10.5, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(10); + }); + + it('performs average of float that results in float', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5.5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4.5, + }, + c: { + author: 'authorB', + title: 'titleB', + pages: 150, + year: 2021, + rating: 3.5, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(4.5); + }); + + it('performs average of floats and ints that results in a float', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 8.6, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 9, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 150, + year: 2021, + rating: 10, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.be.approximately(9.2, 0.0000001); + }); + + it('performs average of ints that results in a float', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 10, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 9, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(9.5); + }); + + it('performs average causing underflow', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MIN_VALUE, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(0); + }); + + it('performs average of min IEEE754', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MIN_VALUE, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(Number.MIN_VALUE); + }); + + it('performs average that overflows IEEE754 during accumulation', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: Number.MAX_VALUE, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: Number.MAX_VALUE, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(Number.POSITIVE_INFINITY); + }); + + it('performs average that includes NaN', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: Number.NaN, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.be.NaN; + }); + + it('performs average over a result set of zero documents', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 4, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: 3, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .where('rating', '>', 4) + .aggregate({ + averageRating: AggregateField.average('rating'), + }) + .get(); + expect(snapshot.data().averageRating).to.be.null; + }); + + it('performs average only on numeric fields', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: '3', + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 6, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + averageRating: AggregateField.average('rating'), + countOfDocs: AggregateField.count(), + }) + .get(); + expect(snapshot.data().averageRating).to.equal(5); + expect(snapshot.data().countOfDocs).to.equal(4); + }); + + it('allows aliases with length greater than 1500 bytes', async () => { + // Alias string length is bytes of UTF-8 encoded alias + 1; + let longAlias = ''; + for (let i = 0; i < 1500; i++) { + longAlias += '0123456789'; + } + + const longerAlias = longAlias + longAlias; + + const testDocs = { + a: {num: 3}, + b: {num: 5}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + [longAlias]: AggregateField.count(), + [longerAlias]: AggregateField.count(), + }) + .get(); + expect(snapshot.data()[longAlias]).to.equal(2); + expect(snapshot.data()[longerAlias]).to.equal(2); + }); + + it('performs aggregations on nested map values', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + metadata: {pages: 100, rating: {critic: 2, user: 5}}, + }, + b: { + author: 'authorB', + title: 'titleB', + metadata: {pages: 50, rating: {critic: 4, user: 4}}, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalPages: AggregateField.sum('metadata.pages'), + averagePages: AggregateField.average('metadata.pages'), + count: AggregateField.count(), + }) + .get(); + expect(snapshot.data().totalPages).to.equal(150); + expect(snapshot.data().averagePages).to.equal(75); + expect(snapshot.data().count).to.equal(2); + }); + + it('performs aggregates when using `in` operator', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: 3, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .where('rating', 'in', [5, 3]) + .aggregate({ + totalRating: AggregateField.sum('rating'), + averageRating: AggregateField.average('rating'), + countOfDocs: AggregateField.count(), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(8); + expect(snapshot.data().averageRating).to.equal(4); + expect(snapshot.data().countOfDocs).to.equal(2); + }); + }); + + // Only run tests that require indexes against the emulator, because we don't + // have a way to dynamically create the indexes when running the tests. + (process.env.FIRESTORE_EMULATOR_HOST ? describe : describe.skip)( + 'queries requiring indexes', + () => { + it('aggregate query supports collection groups', async () => { + const collectionGroupId = autoId(); + const docPaths = [ + `${collectionGroupId}/cg-doc1`, + `abc/123/${collectionGroupId}/cg-doc2`, + `zzz${collectionGroupId}/cg-doc3`, + `abc/123/zzz${collectionGroupId}/cg-doc4`, + `abc/123/zzz/${collectionGroupId}`, + ]; + const batch = firestore.batch(); + for (const docPath of docPaths) { + batch.set(firestore.doc(docPath), {x: 2}); + } + await batch.commit(); + const snapshot = await firestore + .collectionGroup(collectionGroupId) + .aggregate({ + count: AggregateField.count(), + sum: AggregateField.sum('x'), + avg: AggregateField.average('x'), + }) + .get(); + expect(snapshot.data().count).to.equal(2); + expect(snapshot.data().sum).to.equal(4); + expect(snapshot.data().avg).to.equal(2); + }); + + it('performs aggregations on documents with all aggregated fields', async () => { + const testDocs = { + a: {author: 'authorA', title: 'titleA', pages: 100, year: 1980}, + b: {author: 'authorB', title: 'titleB', pages: 50, year: 2020}, + c: {author: 'authorC', title: 'titleC', pages: 150, year: 2021}, + d: {author: 'authorD', title: 'titleD', pages: 50}, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalPages: AggregateField.sum('pages'), + averagePages: AggregateField.average('pages'), + averageYear: AggregateField.average('year'), + count: AggregateField.count(), + }) + .get(); + expect(snapshot.data().totalPages).to.equal(300); + expect(snapshot.data().averagePages).to.equal(100); + expect(snapshot.data().averageYear).to.equal(2007); + expect(snapshot.data().count).to.equal(3); + }); + + it('performs aggregates on multiple fields where one aggregate could cause short-circuit due to NaN', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: 5, + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: 4, + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: Number.NaN, + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: 0, + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .aggregate({ + totalRating: AggregateField.sum('rating'), + totalPages: AggregateField.sum('pages'), + averageYear: AggregateField.average('year'), + }) + .get(); + expect(snapshot.data().totalRating).to.be.NaN; + expect(snapshot.data().totalPages).to.equal(300); + expect(snapshot.data().averageYear).to.equal(2000); + }); + + it('performs aggregates when using `array-contains-any` operator', async () => { + const testDocs = { + a: { + author: 'authorA', + title: 'titleA', + pages: 100, + year: 1980, + rating: [5, 1000], + }, + b: { + author: 'authorB', + title: 'titleB', + pages: 50, + year: 2020, + rating: [4], + }, + c: { + author: 'authorC', + title: 'titleC', + pages: 100, + year: 1980, + rating: [2222, 3], + }, + d: { + author: 'authorD', + title: 'titleD', + pages: 50, + year: 2020, + rating: [0], + }, + }; + await addTestDocs(testDocs); + const snapshot = await col + .where('rating', 'array-contains-any', [5, 3]) + .aggregate({ + totalRating: AggregateField.sum('rating'), + averageRating: AggregateField.average('rating'), + totalPages: AggregateField.sum('pages'), + averagePages: AggregateField.average('pages'), + countOfDocs: AggregateField.count(), + }) + .get(); + expect(snapshot.data().totalRating).to.equal(0); + expect(snapshot.data().averageRating).to.be.null; + expect(snapshot.data().totalPages).to.equal(200); + expect(snapshot.data().averagePages).to.equal(100); + expect(snapshot.data().countOfDocs).to.equal(2); + }); + } + ); + + describe('Aggregation queries - orderBy Normalization Checks', () => { + async function addTwoDocs(): Promise { + const testDocs = { + a: {author: 'authorA', title: 'titleA', num: 5, foo: 1}, + b: {author: 'authorB', title: 'titleB', num: 7, foo: 2}, + }; + await addTestDocs(testDocs); + } + + it('no filter, no orderBy, no cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(12); + }); + + it('equality filter, no orderBy, no cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '==', 5) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(5); + }); + + it('inequality filter, no orderBy, no cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '>', 5) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + it('no filter, explicit orderBy, no cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .orderBy('num') + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(12); + }); + + it('equality filter, explicit orderBy, no cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '==', 5) + .orderBy('num') + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(5); + }); + + it('inequality filter, explicit orderBy, no cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '>', 5) + .orderBy('num') + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + it('no filter, explicit orderBy, field value cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .orderBy('num') + .startAfter(5) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This is expected to fail because it requires the `__name__, num` index. + // SDK sends: orderBy __name__ + it.skip('no filter, explicit orderBy, document reference cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .orderBy(FieldPath.documentId()) + .startAfter(col.doc('a')) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This is expected to fail because it requires the `__name__, num` index. + // SDK sends: orderBy __name__ + it.skip('no filter, no orderBy, document reference cursor', async () => { + await addTwoDocs(); + const docSnap = await col.doc('a').get(); + const snapshot = await col + .startAfter(docSnap) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This is expected to fail because it requires the `foo, __name__, num` index. + // SDK sends: orderBy foo, __name__ + it.skip('no filter, explicit orderBy, document reference cursor', async () => { + await addTwoDocs(); + const docSnap = await col.doc('a').get(); + const snapshot = await col + .orderBy('foo') + .startAfter(docSnap) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This just happens to work because the orderBy field matches the aggregation field. + // SDK sends: orderBy num, __name__ + it('no filter, explicit orderBy, document reference cursor', async () => { + await addTwoDocs(); + const docSnap = await col.doc('a').get(); + const snapshot = await col + .orderBy('num') + .startAfter(docSnap) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + it('equality filter, explicit orderBy, field value cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '==', 5) + .orderBy('num') + .startAt(5) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(5); + }); + + it('inequality filter, explicit orderBy, field value cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '>', 5) + .orderBy('num') + .startAt(5) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This is expected to fail because it requires the `__name__, num` index. + // SDK sends: orderBy __name__ + it.skip('equality filter, explicit orderBy, document reference cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '==', 7) + .orderBy(FieldPath.documentId()) + .startAfter(col.doc('a')) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // Full orderBy is provided. + // SDK sends: orderBy num, __name__ + it('inequality filter, explicit orderBy, document reference cursor', async () => { + await addTwoDocs(); + const snapshot = await col + .where('num', '>', 5) + .orderBy('num') + .orderBy(FieldPath.documentId()) + .startAfter(5, col.doc('a')) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This is expected to fail because it requires the `__name__, num` index. + // SDK sends: orderBy __name__ + it.skip('equality filter, no orderBy, document reference cursor', async () => { + await addTwoDocs(); + const docSnap = await col.doc('a').get(); + const snapshot = await col + .where('num', '==', 7) + .startAfter(docSnap) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This just happens to work because the orderBy field matches the aggregation field. + // SDK sends: orderBy num, __name__ + it('inequality filter, no orderBy, document reference cursor', async () => { + await addTwoDocs(); + const docSnap = await col.doc('a').get(); + const snapshot = await col + .where('num', '>', 0) + .startAfter(docSnap) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + + // This is expected to fail because it requires the `foo, __name__, num` index. + // SDK sends: orderBy foo, __name__ + it.skip('inequality filter, no orderBy, document reference cursor 2', async () => { + await addTwoDocs(); + const docSnap = await col.doc('a').get(); + const snapshot = await col + .where('foo', '>', 0) + .startAfter(docSnap) + .aggregate({sum: AggregateField.sum('num')}) + .get(); + expect(snapshot.data().sum).to.equal(7); + }); + }); +}); + describe('Transaction class', () => { let firestore: Firestore; let randomCol: CollectionReference; diff --git a/dev/test/aggregate.ts b/dev/test/aggregate.ts new file mode 100644 index 000000000..fd2d89485 --- /dev/null +++ b/dev/test/aggregate.ts @@ -0,0 +1,47 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {expect, use} from 'chai'; +import * as chaiAsPromised from 'chai-as-promised'; +import {AggregateField} from '../src/aggregate'; + +use(chaiAsPromised); + +describe('aggregate field equality checks', () => { + it('equates two equal aggregate fields', () => { + expect(AggregateField.count().isEqual(AggregateField.count())).to.be.true; + expect(AggregateField.sum('foo').isEqual(AggregateField.sum('foo'))).to.be + .true; + expect(AggregateField.average('bar').isEqual(AggregateField.average('bar'))) + .to.be.true; + expect(AggregateField.sum('foo.bar').isEqual(AggregateField.sum('foo.bar'))) + .to.be.true; + expect( + AggregateField.average('bar.baz').isEqual( + AggregateField.average('bar.baz') + ) + ).to.be.true; + }); + + it('differentiates two different aggregate fields', () => { + expect(AggregateField.sum('foo').isEqual(AggregateField.sum('bar'))).to.be + .false; + expect(AggregateField.average('foo').isEqual(AggregateField.average('bar'))) + .to.be.false; + expect(AggregateField.average('foo').isEqual(AggregateField.sum('foo'))).to + .be.false; + expect(AggregateField.sum('foo').isEqual(AggregateField.average('foo'))).to + .be.false; + }); +}); diff --git a/dev/test/aggregateQuery.ts b/dev/test/aggregateQuery.ts index db0eb7c82..ff81a254d 100644 --- a/dev/test/aggregateQuery.ts +++ b/dev/test/aggregateQuery.ts @@ -77,10 +77,12 @@ describe('aggregate query interface', () => { }); it('returns results', async () => { + // Here we are mocking the response from the server. The client uses + // `aggregate_$i` aliases in requests and will receive these in responses. const result: api.IRunAggregationQueryResponse = { result: { aggregateFields: { - count: {integerValue: '99'}, + aggregate_0: {integerValue: '99'}, }, }, readTime: {seconds: 5, nanos: 6}, @@ -100,10 +102,12 @@ describe('aggregate query interface', () => { }); it('successful return without ending the stream on get()', async () => { + // Here we are mocking the response from the server. The client uses + // `aggregate_$i` aliases in requests and will receive these in responses. const result: api.IRunAggregationQueryResponse = { result: { aggregateFields: { - count: {integerValue: '99'}, + aggregate_0: {integerValue: '99'}, }, }, readTime: {seconds: 5, nanos: 6}, diff --git a/types/firestore.d.ts b/types/firestore.d.ts index 0992f4145..ce6c9adb2 100644 --- a/types/firestore.d.ts +++ b/types/firestore.d.ts @@ -1801,6 +1801,39 @@ declare namespace FirebaseFirestore { DbModelType >; + /** + * Returns a query that can perform the given aggregations. + * + * The returned query, when executed, calculates the specified aggregations + * over the documents in the result set of this query, without actually + * downloading the documents. + * + * Using the returned query to perform aggregations is efficient because only + * the final aggregation values, not the documents' data, is downloaded. The + * returned query can even perform aggregations of the documents if the result set + * would be prohibitively large to download entirely (e.g. thousands of documents). + * + * @param aggregateSpec An `AggregateSpec` object that specifies the aggregates + * to perform over the result set. The AggregateSpec specifies aliases for each + * aggregate, which can be used to retrieve the aggregate result. + * @example + * ```typescript + * const aggregateQuery = col.aggregate(query, { + * countOfDocs: count(), + * totalHours: sum('hours'), + * averageScore: average('score') + * }); + * + * const aggregateSnapshot = await aggregateQuery.get(); + * const countOfDocs: number = aggregateSnapshot.data().countOfDocs; + * const totalHours: number = aggregateSnapshot.data().totalHours; + * const averageScore: number | null = aggregateSnapshot.data().averageScore; + * ``` + */ + aggregate( + aggregateSpec: T + ): AggregateQuery; + /** * Returns true if this `Query` is equal to the provided one. * @@ -2157,18 +2190,64 @@ declare namespace FirebaseFirestore { toQuery(): Query; } + /** + * Union type representing the aggregate type to be performed. + */ + export type AggregateType = 'count' | 'avg' | 'sum'; + + /** + * The union of all `AggregateField` types that are supported by Firestore. + */ + export type AggregateFieldType = + | ReturnType + | ReturnType + | ReturnType; + /** * Represents an aggregation that can be performed by Firestore. */ // eslint-disable-next-line @typescript-eslint/no-unused-vars export class AggregateField { private constructor(); - } - /** - * The union of all `AggregateField` types that are supported by Firestore. - */ - export type AggregateFieldType = AggregateField; + /** A type string to uniquely identify instances of this class. */ + readonly type = 'AggregateField'; + + /** The kind of aggregation performed by this AggregateField. */ + public readonly aggregateType: AggregateType; + + /** + * Compares this object with the given object for equality. + * + * This object is considered "equal" to the other object if and only if + * `other` performs the same kind of aggregation on the same field (if any). + * + * @param other The object to compare to this object for equality. + * @return `true` if this object is "equal" to the given object, as + * defined above, or `false` otherwise. + */ + isEqual(other: AggregateField): boolean; + + /** + * Create an AggregateField object that can be used to compute the count of + * documents in the result set of a query. + */ + static count(): AggregateField; + + /** + * Create an AggregateField object that can be used to compute the average of + * a specified field over a range of documents in the result set of a query. + * @param field Specifies the field to average across the result set. + */ + static average(field: string | FieldPath): AggregateField; + + /** + * Create an AggregateField object that can be used to compute the sum of + * a specified field over a range of documents in the result set of a query. + * @param field Specifies the field to sum across the result set. + */ + static sum(field: string | FieldPath): AggregateField; + } /** * A type whose property values are all `AggregateField` objects.