} struct.\n */\nfunction __PRIVATE_toInt32Proto(e, t) {\n return e.useProto3Json || __PRIVATE_isNullOrUndefined(t) ? t : {\n value: t\n };\n}\n\n/**\n * Returns a number (or null) from a google.protobuf.Int32Value proto.\n */\n/**\n * Returns a value for a Date that's appropriate to put into a proto.\n */\nfunction toTimestamp(e, t) {\n if (e.useProto3Json) {\n return `${new Date(1e3 * t.seconds).toISOString().replace(/\\.\\d*/, \"\").replace(\"Z\", \"\")}.${(\"000000000\" + t.nanoseconds).slice(-9)}Z`;\n }\n return {\n seconds: \"\" + t.seconds,\n nanos: t.nanoseconds\n };\n}\n\n/**\n * Returns a value for bytes that's appropriate to put in a proto.\n *\n * Visible for testing.\n */\nfunction __PRIVATE_toBytes(e, t) {\n return e.useProto3Json ? t.toBase64() : t.toUint8Array();\n}\n\n/**\n * Returns a ByteString based on the proto string value.\n */\nfunction __PRIVATE_toVersion(e, t) {\n return toTimestamp(e, t.toTimestamp());\n}\nfunction __PRIVATE_fromVersion(e) {\n return __PRIVATE_hardAssert(!!e), SnapshotVersion.fromTimestamp(function fromTimestamp(e) {\n const t = __PRIVATE_normalizeTimestamp(e);\n return new Timestamp(t.seconds, t.nanos);\n }(e));\n}\nfunction __PRIVATE_toResourceName(e, t) {\n return __PRIVATE_toResourcePath(e, t).canonicalString();\n}\nfunction __PRIVATE_toResourcePath(e, t) {\n const n = function __PRIVATE_fullyQualifiedPrefixPath(e) {\n return new ResourcePath([\"projects\", e.projectId, \"databases\", e.database]);\n }(e).child(\"documents\");\n return void 0 === t ? n : n.child(t);\n}\nfunction __PRIVATE_fromResourceName(e) {\n const t = ResourcePath.fromString(e);\n return __PRIVATE_hardAssert(__PRIVATE_isValidResourceName(t)), t;\n}\nfunction __PRIVATE_toName(e, t) {\n return __PRIVATE_toResourceName(e.databaseId, t.path);\n}\nfunction fromName(e, t) {\n const n = __PRIVATE_fromResourceName(t);\n if (n.get(1) !== e.databaseId.projectId) throw new FirestoreError(C.INVALID_ARGUMENT, \"Tried to deserialize key from different project: \" + n.get(1) + \" vs \" + e.databaseId.projectId);\n if (n.get(3) !== e.databaseId.database) throw new FirestoreError(C.INVALID_ARGUMENT, \"Tried to deserialize key from different database: \" + n.get(3) + \" vs \" + e.databaseId.database);\n return new DocumentKey(__PRIVATE_extractLocalPathFromResourceName(n));\n}\nfunction __PRIVATE_toQueryPath(e, t) {\n return __PRIVATE_toResourceName(e.databaseId, t);\n}\nfunction __PRIVATE_fromQueryPath(e) {\n const t = __PRIVATE_fromResourceName(e);\n // In v1beta1 queries for collections at the root did not have a trailing\n // \"/documents\". In v1 all resource paths contain \"/documents\". Preserve the\n // ability to read the v1beta1 form for compatibility with queries persisted\n // in the local target cache.\n return 4 === t.length ? ResourcePath.emptyPath() : __PRIVATE_extractLocalPathFromResourceName(t);\n}\nfunction __PRIVATE_getEncodedDatabaseId(e) {\n return new ResourcePath([\"projects\", e.databaseId.projectId, \"databases\", e.databaseId.database]).canonicalString();\n}\nfunction __PRIVATE_extractLocalPathFromResourceName(e) {\n return __PRIVATE_hardAssert(e.length > 4 && \"documents\" === e.get(4)), e.popFirst(5);\n}\n\n/** Creates a Document proto from key and fields (but no create/update time) */\nfunction __PRIVATE_toMutationDocument(e, t, n) {\n return {\n name: __PRIVATE_toName(e, t),\n fields: n.value.mapValue.fields\n };\n}\nfunction __PRIVATE_fromDocument(e, t, n) {\n const r = fromName(e, t.name),\n i = __PRIVATE_fromVersion(t.updateTime),\n s = t.createTime ? __PRIVATE_fromVersion(t.createTime) : SnapshotVersion.min(),\n o = new ObjectValue({\n mapValue: {\n fields: t.fields\n }\n }),\n _ = MutableDocument.newFoundDocument(r, i, s, o);\n return n && _.setHasCommittedMutations(), n ? _.setHasCommittedMutations() : _;\n}\nfunction __PRIVATE_fromBatchGetDocumentsResponse(e, t) {\n return \"found\" in t ? function __PRIVATE_fromFound(e, t) {\n __PRIVATE_hardAssert(!!t.found), t.found.name, t.found.updateTime;\n const n = fromName(e, t.found.name),\n r = __PRIVATE_fromVersion(t.found.updateTime),\n i = t.found.createTime ? __PRIVATE_fromVersion(t.found.createTime) : SnapshotVersion.min(),\n s = new ObjectValue({\n mapValue: {\n fields: t.found.fields\n }\n });\n return MutableDocument.newFoundDocument(n, r, i, s);\n }(e, t) : \"missing\" in t ? function __PRIVATE_fromMissing(e, t) {\n __PRIVATE_hardAssert(!!t.missing), __PRIVATE_hardAssert(!!t.readTime);\n const n = fromName(e, t.missing),\n r = __PRIVATE_fromVersion(t.readTime);\n return MutableDocument.newNoDocument(n, r);\n }(e, t) : fail();\n}\nfunction __PRIVATE_fromWatchChange(e, t) {\n let n;\n if (\"targetChange\" in t) {\n t.targetChange;\n // proto3 default value is unset in JSON (undefined), so use 'NO_CHANGE'\n // if unset\n const r = function __PRIVATE_fromWatchTargetChangeState(e) {\n return \"NO_CHANGE\" === e ? 0 /* WatchTargetChangeState.NoChange */ : \"ADD\" === e ? 1 /* WatchTargetChangeState.Added */ : \"REMOVE\" === e ? 2 /* WatchTargetChangeState.Removed */ : \"CURRENT\" === e ? 3 /* WatchTargetChangeState.Current */ : \"RESET\" === e ? 4 /* WatchTargetChangeState.Reset */ : fail();\n }(t.targetChange.targetChangeType || \"NO_CHANGE\"),\n i = t.targetChange.targetIds || [],\n s = function __PRIVATE_fromBytes(e, t) {\n return e.useProto3Json ? (__PRIVATE_hardAssert(void 0 === t || \"string\" == typeof t), ByteString.fromBase64String(t || \"\")) : (__PRIVATE_hardAssert(void 0 === t ||\n // Check if the value is an instance of both Buffer and Uint8Array,\n // despite the fact that Buffer extends Uint8Array. In some\n // environments, such as jsdom, the prototype chain of Buffer\n // does not indicate that it extends Uint8Array.\n t instanceof Buffer || t instanceof Uint8Array), ByteString.fromUint8Array(t || new Uint8Array()));\n }(e, t.targetChange.resumeToken),\n o = t.targetChange.cause,\n _ = o && function __PRIVATE_fromRpcStatus(e) {\n const t = void 0 === e.code ? C.UNKNOWN : __PRIVATE_mapCodeFromRpcCode(e.code);\n return new FirestoreError(t, e.message || \"\");\n }(o);\n n = new __PRIVATE_WatchTargetChange(r, i, s, _ || null);\n } else if (\"documentChange\" in t) {\n t.documentChange;\n const r = t.documentChange;\n r.document, r.document.name, r.document.updateTime;\n const i = fromName(e, r.document.name),\n s = __PRIVATE_fromVersion(r.document.updateTime),\n o = r.document.createTime ? __PRIVATE_fromVersion(r.document.createTime) : SnapshotVersion.min(),\n _ = new ObjectValue({\n mapValue: {\n fields: r.document.fields\n }\n }),\n a = MutableDocument.newFoundDocument(i, s, o, _),\n u = r.targetIds || [],\n c = r.removedTargetIds || [];\n n = new __PRIVATE_DocumentWatchChange(u, c, a.key, a);\n } else if (\"documentDelete\" in t) {\n t.documentDelete;\n const r = t.documentDelete;\n r.document;\n const i = fromName(e, r.document),\n s = r.readTime ? __PRIVATE_fromVersion(r.readTime) : SnapshotVersion.min(),\n o = MutableDocument.newNoDocument(i, s),\n _ = r.removedTargetIds || [];\n n = new __PRIVATE_DocumentWatchChange([], _, o.key, o);\n } else if (\"documentRemove\" in t) {\n t.documentRemove;\n const r = t.documentRemove;\n r.document;\n const i = fromName(e, r.document),\n s = r.removedTargetIds || [];\n n = new __PRIVATE_DocumentWatchChange([], s, i, null);\n } else {\n if (!(\"filter\" in t)) return fail();\n {\n t.filter;\n const e = t.filter;\n e.targetId;\n const {\n count: r = 0,\n unchangedNames: i\n } = e,\n s = new ExistenceFilter(r, i),\n o = e.targetId;\n n = new __PRIVATE_ExistenceFilterChange(o, s);\n }\n }\n return n;\n}\nfunction toMutation(e, t) {\n let n;\n if (t instanceof __PRIVATE_SetMutation) n = {\n update: __PRIVATE_toMutationDocument(e, t.key, t.value)\n };else if (t instanceof __PRIVATE_DeleteMutation) n = {\n delete: __PRIVATE_toName(e, t.key)\n };else if (t instanceof __PRIVATE_PatchMutation) n = {\n update: __PRIVATE_toMutationDocument(e, t.key, t.data),\n updateMask: __PRIVATE_toDocumentMask(t.fieldMask)\n };else {\n if (!(t instanceof __PRIVATE_VerifyMutation)) return fail();\n n = {\n verify: __PRIVATE_toName(e, t.key)\n };\n }\n return t.fieldTransforms.length > 0 && (n.updateTransforms = t.fieldTransforms.map(e => function __PRIVATE_toFieldTransform(e, t) {\n const n = t.transform;\n if (n instanceof __PRIVATE_ServerTimestampTransform) return {\n fieldPath: t.field.canonicalString(),\n setToServerValue: \"REQUEST_TIME\"\n };\n if (n instanceof __PRIVATE_ArrayUnionTransformOperation) return {\n fieldPath: t.field.canonicalString(),\n appendMissingElements: {\n values: n.elements\n }\n };\n if (n instanceof __PRIVATE_ArrayRemoveTransformOperation) return {\n fieldPath: t.field.canonicalString(),\n removeAllFromArray: {\n values: n.elements\n }\n };\n if (n instanceof __PRIVATE_NumericIncrementTransformOperation) return {\n fieldPath: t.field.canonicalString(),\n increment: n.Pe\n };\n throw fail();\n }(0, e))), t.precondition.isNone || (n.currentDocument = function __PRIVATE_toPrecondition(e, t) {\n return void 0 !== t.updateTime ? {\n updateTime: __PRIVATE_toVersion(e, t.updateTime)\n } : void 0 !== t.exists ? {\n exists: t.exists\n } : fail();\n }(e, t.precondition)), n;\n}\nfunction __PRIVATE_fromMutation(e, t) {\n const n = t.currentDocument ? function __PRIVATE_fromPrecondition(e) {\n return void 0 !== e.updateTime ? Precondition.updateTime(__PRIVATE_fromVersion(e.updateTime)) : void 0 !== e.exists ? Precondition.exists(e.exists) : Precondition.none();\n }(t.currentDocument) : Precondition.none(),\n r = t.updateTransforms ? t.updateTransforms.map(t => function __PRIVATE_fromFieldTransform(e, t) {\n let n = null;\n if (\"setToServerValue\" in t) __PRIVATE_hardAssert(\"REQUEST_TIME\" === t.setToServerValue), n = new __PRIVATE_ServerTimestampTransform();else if (\"appendMissingElements\" in t) {\n const e = t.appendMissingElements.values || [];\n n = new __PRIVATE_ArrayUnionTransformOperation(e);\n } else if (\"removeAllFromArray\" in t) {\n const e = t.removeAllFromArray.values || [];\n n = new __PRIVATE_ArrayRemoveTransformOperation(e);\n } else \"increment\" in t ? n = new __PRIVATE_NumericIncrementTransformOperation(e, t.increment) : fail();\n const r = FieldPath$1.fromServerFormat(t.fieldPath);\n return new FieldTransform(r, n);\n }(e, t)) : [];\n if (t.update) {\n t.update.name;\n const i = fromName(e, t.update.name),\n s = new ObjectValue({\n mapValue: {\n fields: t.update.fields\n }\n });\n if (t.updateMask) {\n const e = function __PRIVATE_fromDocumentMask(e) {\n const t = e.fieldPaths || [];\n return new FieldMask(t.map(e => FieldPath$1.fromServerFormat(e)));\n }(t.updateMask);\n return new __PRIVATE_PatchMutation(i, s, e, n, r);\n }\n return new __PRIVATE_SetMutation(i, s, n, r);\n }\n if (t.delete) {\n const r = fromName(e, t.delete);\n return new __PRIVATE_DeleteMutation(r, n);\n }\n if (t.verify) {\n const r = fromName(e, t.verify);\n return new __PRIVATE_VerifyMutation(r, n);\n }\n return fail();\n}\nfunction __PRIVATE_fromWriteResults(e, t) {\n return e && e.length > 0 ? (__PRIVATE_hardAssert(void 0 !== t), e.map(e => function __PRIVATE_fromWriteResult(e, t) {\n // NOTE: Deletes don't have an updateTime.\n let n = e.updateTime ? __PRIVATE_fromVersion(e.updateTime) : __PRIVATE_fromVersion(t);\n return n.isEqual(SnapshotVersion.min()) && (\n // The Firestore Emulator currently returns an update time of 0 for\n // deletes of non-existing documents (rather than null). This breaks the\n // test \"get deleted doc while offline with source=cache\" as NoDocuments\n // with version 0 are filtered by IndexedDb's RemoteDocumentCache.\n // TODO(#2149): Remove this when Emulator is fixed\n n = __PRIVATE_fromVersion(t)), new MutationResult(n, e.transformResults || []);\n }(e, t))) : [];\n}\nfunction __PRIVATE_toDocumentsTarget(e, t) {\n return {\n documents: [__PRIVATE_toQueryPath(e, t.path)]\n };\n}\nfunction __PRIVATE_toQueryTarget(e, t) {\n // Dissect the path into parent, collectionId, and optional key filter.\n const n = {\n structuredQuery: {}\n },\n r = t.path;\n let i;\n null !== t.collectionGroup ? (i = r, n.structuredQuery.from = [{\n collectionId: t.collectionGroup,\n allDescendants: !0\n }]) : (i = r.popLast(), n.structuredQuery.from = [{\n collectionId: r.lastSegment()\n }]), n.parent = __PRIVATE_toQueryPath(e, i);\n const s = function __PRIVATE_toFilters(e) {\n if (0 === e.length) return;\n return __PRIVATE_toFilter(CompositeFilter.create(e, \"and\" /* CompositeOperator.AND */));\n }(t.filters);\n s && (n.structuredQuery.where = s);\n const o = function __PRIVATE_toOrder(e) {\n if (0 === e.length) return;\n return e.map(e =>\n // visible for testing\n function __PRIVATE_toPropertyOrder(e) {\n return {\n field: __PRIVATE_toFieldPathReference(e.field),\n direction: __PRIVATE_toDirection(e.dir)\n };\n }(e));\n }(t.orderBy);\n o && (n.structuredQuery.orderBy = o);\n const _ = __PRIVATE_toInt32Proto(e, t.limit);\n return null !== _ && (n.structuredQuery.limit = _), t.startAt && (n.structuredQuery.startAt = function __PRIVATE_toStartAtCursor(e) {\n return {\n before: e.inclusive,\n values: e.position\n };\n }(t.startAt)), t.endAt && (n.structuredQuery.endAt = function __PRIVATE_toEndAtCursor(e) {\n return {\n before: !e.inclusive,\n values: e.position\n };\n }(t.endAt)), {\n _t: n,\n parent: i\n };\n}\nfunction __PRIVATE_toRunAggregationQueryRequest(e, t, n, r) {\n const {\n _t: i,\n parent: s\n } = __PRIVATE_toQueryTarget(e, t),\n o = {},\n _ = [];\n let a = 0;\n return n.forEach(e => {\n // Map all client-side aliases to a unique short-form\n // alias. This avoids issues with client-side aliases that\n // exceed the 1500-byte string size limit.\n const t = r ? e.alias : \"aggregate_\" + a++;\n o[t] = e.alias, \"count\" === e.aggregateType ? _.push({\n alias: t,\n count: {}\n }) : \"avg\" === e.aggregateType ? _.push({\n alias: t,\n avg: {\n field: __PRIVATE_toFieldPathReference(e.fieldPath)\n }\n }) : \"sum\" === e.aggregateType && _.push({\n alias: t,\n sum: {\n field: __PRIVATE_toFieldPathReference(e.fieldPath)\n }\n });\n }), {\n request: {\n structuredAggregationQuery: {\n aggregations: _,\n structuredQuery: i.structuredQuery\n },\n parent: i.parent\n },\n ut: o,\n parent: s\n };\n}\nfunction __PRIVATE_convertQueryTargetToQuery(e) {\n let t = __PRIVATE_fromQueryPath(e.parent);\n const n = e.structuredQuery,\n r = n.from ? n.from.length : 0;\n let i = null;\n if (r > 0) {\n __PRIVATE_hardAssert(1 === r);\n const e = n.from[0];\n e.allDescendants ? i = e.collectionId : t = t.child(e.collectionId);\n }\n let s = [];\n n.where && (s = function __PRIVATE_fromFilters(e) {\n const t = __PRIVATE_fromFilter(e);\n if (t instanceof CompositeFilter && __PRIVATE_compositeFilterIsFlatConjunction(t)) return t.getFilters();\n return [t];\n }(n.where));\n let o = [];\n n.orderBy && (o = function __PRIVATE_fromOrder(e) {\n return e.map(e => function __PRIVATE_fromPropertyOrder(e) {\n return new OrderBy(__PRIVATE_fromFieldPathReference(e.field),\n // visible for testing\n function __PRIVATE_fromDirection(e) {\n switch (e) {\n case \"ASCENDING\":\n return \"asc\" /* Direction.ASCENDING */;\n case \"DESCENDING\":\n return \"desc\" /* Direction.DESCENDING */;\n default:\n return;\n }\n }\n // visible for testing\n (e.direction));\n }\n // visible for testing\n (e));\n }(n.orderBy));\n let _ = null;\n n.limit && (_ = function __PRIVATE_fromInt32Proto(e) {\n let t;\n return t = \"object\" == typeof e ? e.value : e, __PRIVATE_isNullOrUndefined(t) ? null : t;\n }(n.limit));\n let a = null;\n n.startAt && (a = function __PRIVATE_fromStartAtCursor(e) {\n const t = !!e.before,\n n = e.values || [];\n return new Bound(n, t);\n }(n.startAt));\n let u = null;\n return n.endAt && (u = function __PRIVATE_fromEndAtCursor(e) {\n const t = !e.before,\n n = e.values || [];\n return new Bound(n, t);\n }\n // visible for testing\n (n.endAt)), __PRIVATE_newQuery(t, i, o, s, _, \"F\" /* LimitType.First */, a, u);\n}\nfunction __PRIVATE_toListenRequestLabels(e, t) {\n const n = function __PRIVATE_toLabel(e) {\n switch (e) {\n case \"TargetPurposeListen\" /* TargetPurpose.Listen */:\n return null;\n case \"TargetPurposeExistenceFilterMismatch\" /* TargetPurpose.ExistenceFilterMismatch */:\n return \"existence-filter-mismatch\";\n case \"TargetPurposeExistenceFilterMismatchBloom\" /* TargetPurpose.ExistenceFilterMismatchBloom */:\n return \"existence-filter-mismatch-bloom\";\n case \"TargetPurposeLimboResolution\" /* TargetPurpose.LimboResolution */:\n return \"limbo-document\";\n default:\n return fail();\n }\n }(t.purpose);\n return null == n ? null : {\n \"goog-listen-tags\": n\n };\n}\nfunction __PRIVATE_fromFilter(e) {\n return void 0 !== e.unaryFilter ? function __PRIVATE_fromUnaryFilter(e) {\n switch (e.unaryFilter.op) {\n case \"IS_NAN\":\n const t = __PRIVATE_fromFieldPathReference(e.unaryFilter.field);\n return FieldFilter.create(t, \"==\" /* Operator.EQUAL */, {\n doubleValue: NaN\n });\n case \"IS_NULL\":\n const n = __PRIVATE_fromFieldPathReference(e.unaryFilter.field);\n return FieldFilter.create(n, \"==\" /* Operator.EQUAL */, {\n nullValue: \"NULL_VALUE\"\n });\n case \"IS_NOT_NAN\":\n const r = __PRIVATE_fromFieldPathReference(e.unaryFilter.field);\n return FieldFilter.create(r, \"!=\" /* Operator.NOT_EQUAL */, {\n doubleValue: NaN\n });\n case \"IS_NOT_NULL\":\n const i = __PRIVATE_fromFieldPathReference(e.unaryFilter.field);\n return FieldFilter.create(i, \"!=\" /* Operator.NOT_EQUAL */, {\n nullValue: \"NULL_VALUE\"\n });\n default:\n return fail();\n }\n }(e) : void 0 !== e.fieldFilter ? function __PRIVATE_fromFieldFilter(e) {\n return FieldFilter.create(__PRIVATE_fromFieldPathReference(e.fieldFilter.field), function __PRIVATE_fromOperatorName(e) {\n switch (e) {\n case \"EQUAL\":\n return \"==\" /* Operator.EQUAL */;\n case \"NOT_EQUAL\":\n return \"!=\" /* Operator.NOT_EQUAL */;\n case \"GREATER_THAN\":\n return \">\" /* Operator.GREATER_THAN */;\n case \"GREATER_THAN_OR_EQUAL\":\n return \">=\" /* Operator.GREATER_THAN_OR_EQUAL */;\n case \"LESS_THAN\":\n return \"<\" /* Operator.LESS_THAN */;\n case \"LESS_THAN_OR_EQUAL\":\n return \"<=\" /* Operator.LESS_THAN_OR_EQUAL */;\n case \"ARRAY_CONTAINS\":\n return \"array-contains\" /* Operator.ARRAY_CONTAINS */;\n case \"IN\":\n return \"in\" /* Operator.IN */;\n case \"NOT_IN\":\n return \"not-in\" /* Operator.NOT_IN */;\n case \"ARRAY_CONTAINS_ANY\":\n return \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */;\n default:\n return fail();\n }\n }(e.fieldFilter.op), e.fieldFilter.value);\n }(e) : void 0 !== e.compositeFilter ? function __PRIVATE_fromCompositeFilter(e) {\n return CompositeFilter.create(e.compositeFilter.filters.map(e => __PRIVATE_fromFilter(e)), function __PRIVATE_fromCompositeOperatorName(e) {\n switch (e) {\n case \"AND\":\n return \"and\" /* CompositeOperator.AND */;\n case \"OR\":\n return \"or\" /* CompositeOperator.OR */;\n default:\n return fail();\n }\n }(e.compositeFilter.op));\n }(e) : fail();\n}\nfunction __PRIVATE_toDirection(e) {\n return Ie[e];\n}\nfunction __PRIVATE_toOperatorName(e) {\n return Te[e];\n}\nfunction __PRIVATE_toCompositeOperatorName(e) {\n return Ee[e];\n}\nfunction __PRIVATE_toFieldPathReference(e) {\n return {\n fieldPath: e.canonicalString()\n };\n}\nfunction __PRIVATE_fromFieldPathReference(e) {\n return FieldPath$1.fromServerFormat(e.fieldPath);\n}\nfunction __PRIVATE_toFilter(e) {\n return e instanceof FieldFilter ? function __PRIVATE_toUnaryOrFieldFilter(e) {\n if (\"==\" /* Operator.EQUAL */ === e.op) {\n if (__PRIVATE_isNanValue(e.value)) return {\n unaryFilter: {\n field: __PRIVATE_toFieldPathReference(e.field),\n op: \"IS_NAN\"\n }\n };\n if (__PRIVATE_isNullValue(e.value)) return {\n unaryFilter: {\n field: __PRIVATE_toFieldPathReference(e.field),\n op: \"IS_NULL\"\n }\n };\n } else if (\"!=\" /* Operator.NOT_EQUAL */ === e.op) {\n if (__PRIVATE_isNanValue(e.value)) return {\n unaryFilter: {\n field: __PRIVATE_toFieldPathReference(e.field),\n op: \"IS_NOT_NAN\"\n }\n };\n if (__PRIVATE_isNullValue(e.value)) return {\n unaryFilter: {\n field: __PRIVATE_toFieldPathReference(e.field),\n op: \"IS_NOT_NULL\"\n }\n };\n }\n return {\n fieldFilter: {\n field: __PRIVATE_toFieldPathReference(e.field),\n op: __PRIVATE_toOperatorName(e.op),\n value: e.value\n }\n };\n }(e) : e instanceof CompositeFilter ? function __PRIVATE_toCompositeFilter(e) {\n const t = e.getFilters().map(e => __PRIVATE_toFilter(e));\n if (1 === t.length) return t[0];\n return {\n compositeFilter: {\n op: __PRIVATE_toCompositeOperatorName(e.op),\n filters: t\n }\n };\n }(e) : fail();\n}\nfunction __PRIVATE_toDocumentMask(e) {\n const t = [];\n return e.fields.forEach(e => t.push(e.canonicalString())), {\n fieldPaths: t\n };\n}\nfunction __PRIVATE_isValidResourceName(e) {\n // Resource names have at least 4 components (project ID, database ID)\n return e.length >= 4 && \"projects\" === e.get(0) && \"databases\" === e.get(2);\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * An immutable set of metadata that the local store tracks for each target.\n */\nclass TargetData {\n constructor(/** The target being listened to. */\n e,\n /**\n * The target ID to which the target corresponds; Assigned by the\n * LocalStore for user listens and by the SyncEngine for limbo watches.\n */\n t, /** The purpose of the target. */\n n,\n /**\n * The sequence number of the last transaction during which this target data\n * was modified.\n */\n r, /** The latest snapshot version seen for this target. */\n i = SnapshotVersion.min()\n /**\n * The maximum snapshot version at which the associated view\n * contained no limbo documents.\n */, s = SnapshotVersion.min()\n /**\n * An opaque, server-assigned token that allows watching a target to be\n * resumed after disconnecting without retransmitting all the data that\n * matches the target. The resume token essentially identifies a point in\n * time from which the server should resume sending results.\n */, o = ByteString.EMPTY_BYTE_STRING\n /**\n * The number of documents that last matched the query at the resume token or\n * read time. Documents are counted only when making a listen request with\n * resume token or read time, otherwise, keep it null.\n */, _ = null) {\n this.target = e, this.targetId = t, this.purpose = n, this.sequenceNumber = r, this.snapshotVersion = i, this.lastLimboFreeSnapshotVersion = s, this.resumeToken = o, this.expectedCount = _;\n }\n /** Creates a new target data instance with an updated sequence number. */\n withSequenceNumber(e) {\n return new TargetData(this.target, this.targetId, this.purpose, e, this.snapshotVersion, this.lastLimboFreeSnapshotVersion, this.resumeToken, this.expectedCount);\n }\n /**\n * Creates a new target data instance with an updated resume token and\n * snapshot version.\n */\n withResumeToken(e, t) {\n return new TargetData(this.target, this.targetId, this.purpose, this.sequenceNumber, t, this.lastLimboFreeSnapshotVersion, e, /* expectedCount= */null);\n }\n /**\n * Creates a new target data instance with an updated expected count.\n */\n withExpectedCount(e) {\n return new TargetData(this.target, this.targetId, this.purpose, this.sequenceNumber, this.snapshotVersion, this.lastLimboFreeSnapshotVersion, this.resumeToken, e);\n }\n /**\n * Creates a new target data instance with an updated last limbo free\n * snapshot version number.\n */\n withLastLimboFreeSnapshotVersion(e) {\n return new TargetData(this.target, this.targetId, this.purpose, this.sequenceNumber, this.snapshotVersion, e, this.resumeToken, this.expectedCount);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** Serializer for values stored in the LocalStore. */\nclass __PRIVATE_LocalSerializer {\n constructor(e) {\n this.ct = e;\n }\n}\n\n/** Decodes a remote document from storage locally to a Document. */\nfunction __PRIVATE_fromDbRemoteDocument(e, t) {\n let n;\n if (t.document) n = __PRIVATE_fromDocument(e.ct, t.document, !!t.hasCommittedMutations);else if (t.noDocument) {\n const e = DocumentKey.fromSegments(t.noDocument.path),\n r = __PRIVATE_fromDbTimestamp(t.noDocument.readTime);\n n = MutableDocument.newNoDocument(e, r), t.hasCommittedMutations && n.setHasCommittedMutations();\n } else {\n if (!t.unknownDocument) return fail();\n {\n const e = DocumentKey.fromSegments(t.unknownDocument.path),\n r = __PRIVATE_fromDbTimestamp(t.unknownDocument.version);\n n = MutableDocument.newUnknownDocument(e, r);\n }\n }\n return t.readTime && n.setReadTime(function __PRIVATE_fromDbTimestampKey(e) {\n const t = new Timestamp(e[0], e[1]);\n return SnapshotVersion.fromTimestamp(t);\n }(t.readTime)), n;\n}\n\n/** Encodes a document for storage locally. */\nfunction __PRIVATE_toDbRemoteDocument(e, t) {\n const n = t.key,\n r = {\n prefixPath: n.getCollectionPath().popLast().toArray(),\n collectionGroup: n.collectionGroup,\n documentId: n.path.lastSegment(),\n readTime: __PRIVATE_toDbTimestampKey(t.readTime),\n hasCommittedMutations: t.hasCommittedMutations\n };\n if (t.isFoundDocument()) r.document = function __PRIVATE_toDocument(e, t) {\n return {\n name: __PRIVATE_toName(e, t.key),\n fields: t.data.value.mapValue.fields,\n updateTime: toTimestamp(e, t.version.toTimestamp()),\n createTime: toTimestamp(e, t.createTime.toTimestamp())\n };\n }(e.ct, t);else if (t.isNoDocument()) r.noDocument = {\n path: n.path.toArray(),\n readTime: __PRIVATE_toDbTimestamp(t.version)\n };else {\n if (!t.isUnknownDocument()) return fail();\n r.unknownDocument = {\n path: n.path.toArray(),\n version: __PRIVATE_toDbTimestamp(t.version)\n };\n }\n return r;\n}\nfunction __PRIVATE_toDbTimestampKey(e) {\n const t = e.toTimestamp();\n return [t.seconds, t.nanoseconds];\n}\nfunction __PRIVATE_toDbTimestamp(e) {\n const t = e.toTimestamp();\n return {\n seconds: t.seconds,\n nanoseconds: t.nanoseconds\n };\n}\nfunction __PRIVATE_fromDbTimestamp(e) {\n const t = new Timestamp(e.seconds, e.nanoseconds);\n return SnapshotVersion.fromTimestamp(t);\n}\n\n/** Encodes a batch of mutations into a DbMutationBatch for local storage. */\n/** Decodes a DbMutationBatch into a MutationBatch */\nfunction __PRIVATE_fromDbMutationBatch(e, t) {\n const n = (t.baseMutations || []).map(t => __PRIVATE_fromMutation(e.ct, t));\n // Squash old transform mutations into existing patch or set mutations.\n // The replacement of representing `transforms` with `update_transforms`\n // on the SDK means that old `transform` mutations stored in IndexedDB need\n // to be updated to `update_transforms`.\n // TODO(b/174608374): Remove this code once we perform a schema migration.\n for (let e = 0; e < t.mutations.length - 1; ++e) {\n const n = t.mutations[e];\n if (e + 1 < t.mutations.length && void 0 !== t.mutations[e + 1].transform) {\n const r = t.mutations[e + 1];\n n.updateTransforms = r.transform.fieldTransforms, t.mutations.splice(e + 1, 1), ++e;\n }\n }\n const r = t.mutations.map(t => __PRIVATE_fromMutation(e.ct, t)),\n i = Timestamp.fromMillis(t.localWriteTimeMs);\n return new MutationBatch(t.batchId, i, n, r);\n}\n\n/** Decodes a DbTarget into TargetData */\nfunction __PRIVATE_fromDbTarget(e) {\n const t = __PRIVATE_fromDbTimestamp(e.readTime),\n n = void 0 !== e.lastLimboFreeSnapshotVersion ? __PRIVATE_fromDbTimestamp(e.lastLimboFreeSnapshotVersion) : SnapshotVersion.min();\n let r;\n return r =\n /**\n * A helper function for figuring out what kind of query has been stored.\n */\n function __PRIVATE_isDocumentQuery(e) {\n return void 0 !== e.documents;\n }\n /** Encodes a DbBundle to a BundleMetadata object. */(e.query) ? function __PRIVATE_fromDocumentsTarget(e) {\n return __PRIVATE_hardAssert(1 === e.documents.length), __PRIVATE_queryToTarget(__PRIVATE_newQueryForPath(__PRIVATE_fromQueryPath(e.documents[0])));\n }(e.query) : function __PRIVATE_fromQueryTarget(e) {\n return __PRIVATE_queryToTarget(__PRIVATE_convertQueryTargetToQuery(e));\n }(e.query), new TargetData(r, e.targetId, \"TargetPurposeListen\" /* TargetPurpose.Listen */, e.lastListenSequenceNumber, t, n, ByteString.fromBase64String(e.resumeToken));\n}\n\n/** Encodes TargetData into a DbTarget for storage locally. */\nfunction __PRIVATE_toDbTarget(e, t) {\n const n = __PRIVATE_toDbTimestamp(t.snapshotVersion),\n r = __PRIVATE_toDbTimestamp(t.lastLimboFreeSnapshotVersion);\n let i;\n i = __PRIVATE_targetIsDocumentTarget(t.target) ? __PRIVATE_toDocumentsTarget(e.ct, t.target) : __PRIVATE_toQueryTarget(e.ct, t.target)._t;\n // We can't store the resumeToken as a ByteString in IndexedDb, so we\n // convert it to a base64 string for storage.\n const s = t.resumeToken.toBase64();\n // lastListenSequenceNumber is always 0 until we do real GC.\n return {\n targetId: t.targetId,\n canonicalId: __PRIVATE_canonifyTarget(t.target),\n readTime: n,\n resumeToken: s,\n lastListenSequenceNumber: t.sequenceNumber,\n lastLimboFreeSnapshotVersion: r,\n query: i\n };\n}\n\n/**\n * Encodes a `BundledQuery` from bundle proto to a Query object.\n *\n * This reconstructs the original query used to build the bundle being loaded,\n * including features exists only in SDKs (for example: limit-to-last).\n */\nfunction __PRIVATE_fromBundledQuery(e) {\n const t = __PRIVATE_convertQueryTargetToQuery({\n parent: e.parent,\n structuredQuery: e.structuredQuery\n });\n return \"LAST\" === e.limitType ? __PRIVATE_queryWithLimit(t, t.limit, \"L\" /* LimitType.Last */) : t;\n}\n\n/** Encodes a NamedQuery proto object to a NamedQuery model object. */\n/** Encodes a DbDocumentOverlay object to an Overlay model object. */\nfunction __PRIVATE_fromDbDocumentOverlay(e, t) {\n return new Overlay(t.largestBatchId, __PRIVATE_fromMutation(e.ct, t.overlayMutation));\n}\n\n/** Decodes an Overlay model object into a DbDocumentOverlay object. */\n/**\n * Returns the DbDocumentOverlayKey corresponding to the given user and\n * document key.\n */\nfunction __PRIVATE_toDbDocumentOverlayKey(e, t) {\n const n = t.path.lastSegment();\n return [e, __PRIVATE_encodeResourcePath(t.path.popLast()), n];\n}\nfunction __PRIVATE_toDbIndexState(e, t, n, r) {\n return {\n indexId: e,\n uid: t,\n sequenceNumber: n,\n readTime: __PRIVATE_toDbTimestamp(r.readTime),\n documentKey: __PRIVATE_encodeResourcePath(r.documentKey.path),\n largestBatchId: r.largestBatchId\n };\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_IndexedDbBundleCache {\n getBundleMetadata(e, t) {\n return __PRIVATE_bundlesStore(e).get(t).next(e => {\n if (e) return function __PRIVATE_fromDbBundle(e) {\n return {\n id: e.bundleId,\n createTime: __PRIVATE_fromDbTimestamp(e.createTime),\n version: e.version\n };\n }\n /** Encodes a BundleMetadata to a DbBundle. */(e);\n });\n }\n saveBundleMetadata(e, t) {\n return __PRIVATE_bundlesStore(e).put(function __PRIVATE_toDbBundle(e) {\n return {\n bundleId: e.id,\n createTime: __PRIVATE_toDbTimestamp(__PRIVATE_fromVersion(e.createTime)),\n version: e.version\n };\n }\n /** Encodes a DbNamedQuery to a NamedQuery. */(t));\n }\n getNamedQuery(e, t) {\n return __PRIVATE_namedQueriesStore(e).get(t).next(e => {\n if (e) return function __PRIVATE_fromDbNamedQuery(e) {\n return {\n name: e.name,\n query: __PRIVATE_fromBundledQuery(e.bundledQuery),\n readTime: __PRIVATE_fromDbTimestamp(e.readTime)\n };\n }\n /** Encodes a NamedQuery from a bundle proto to a DbNamedQuery. */(e);\n });\n }\n saveNamedQuery(e, t) {\n return __PRIVATE_namedQueriesStore(e).put(function __PRIVATE_toDbNamedQuery(e) {\n return {\n name: e.name,\n readTime: __PRIVATE_toDbTimestamp(__PRIVATE_fromVersion(e.readTime)),\n bundledQuery: e.bundledQuery\n };\n }(t));\n }\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the bundles object store.\n */\nfunction __PRIVATE_bundlesStore(e) {\n return __PRIVATE_getStore(e, \"bundles\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the namedQueries object store.\n */\nfunction __PRIVATE_namedQueriesStore(e) {\n return __PRIVATE_getStore(e, \"namedQueries\");\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Implementation of DocumentOverlayCache using IndexedDb.\n */\nclass __PRIVATE_IndexedDbDocumentOverlayCache {\n /**\n * @param serializer - The document serializer.\n * @param userId - The userId for which we are accessing overlays.\n */\n constructor(e, t) {\n this.serializer = e, this.userId = t;\n }\n static lt(e, t) {\n const n = t.uid || \"\";\n return new __PRIVATE_IndexedDbDocumentOverlayCache(e, n);\n }\n getOverlay(e, t) {\n return __PRIVATE_documentOverlayStore(e).get(__PRIVATE_toDbDocumentOverlayKey(this.userId, t)).next(e => e ? __PRIVATE_fromDbDocumentOverlay(this.serializer, e) : null);\n }\n getOverlays(e, t) {\n const n = __PRIVATE_newOverlayMap();\n return PersistencePromise.forEach(t, t => this.getOverlay(e, t).next(e => {\n null !== e && n.set(t, e);\n })).next(() => n);\n }\n saveOverlays(e, t, n) {\n const r = [];\n return n.forEach((n, i) => {\n const s = new Overlay(t, i);\n r.push(this.ht(e, s));\n }), PersistencePromise.waitFor(r);\n }\n removeOverlaysForBatchId(e, t, n) {\n const r = new Set();\n // Get the set of unique collection paths.\n t.forEach(e => r.add(__PRIVATE_encodeResourcePath(e.getCollectionPath())));\n const i = [];\n return r.forEach(t => {\n const r = IDBKeyRange.bound([this.userId, t, n], [this.userId, t, n + 1], /*lowerOpen=*/!1, /*upperOpen=*/!0);\n i.push(__PRIVATE_documentOverlayStore(e).j(\"collectionPathOverlayIndex\", r));\n }), PersistencePromise.waitFor(i);\n }\n getOverlaysForCollection(e, t, n) {\n const r = __PRIVATE_newOverlayMap(),\n i = __PRIVATE_encodeResourcePath(t),\n s = IDBKeyRange.bound([this.userId, i, n], [this.userId, i, Number.POSITIVE_INFINITY], /*lowerOpen=*/!0);\n return __PRIVATE_documentOverlayStore(e).U(\"collectionPathOverlayIndex\", s).next(e => {\n for (const t of e) {\n const e = __PRIVATE_fromDbDocumentOverlay(this.serializer, t);\n r.set(e.getKey(), e);\n }\n return r;\n });\n }\n getOverlaysForCollectionGroup(e, t, n, r) {\n const i = __PRIVATE_newOverlayMap();\n let s;\n // We want batch IDs larger than `sinceBatchId`, and so the lower bound\n // is not inclusive.\n const o = IDBKeyRange.bound([this.userId, t, n], [this.userId, t, Number.POSITIVE_INFINITY], /*lowerOpen=*/!0);\n return __PRIVATE_documentOverlayStore(e).J({\n index: \"collectionGroupOverlayIndex\",\n range: o\n }, (e, t, n) => {\n // We do not want to return partial batch overlays, even if the size\n // of the result set exceeds the given `count` argument. Therefore, we\n // continue to aggregate results even after the result size exceeds\n // `count` if there are more overlays from the `currentBatchId`.\n const o = __PRIVATE_fromDbDocumentOverlay(this.serializer, t);\n i.size() < r || o.largestBatchId === s ? (i.set(o.getKey(), o), s = o.largestBatchId) : n.done();\n }).next(() => i);\n }\n ht(e, t) {\n return __PRIVATE_documentOverlayStore(e).put(function __PRIVATE_toDbDocumentOverlay(e, t, n) {\n const [r, i, s] = __PRIVATE_toDbDocumentOverlayKey(t, n.mutation.key);\n return {\n userId: t,\n collectionPath: i,\n documentId: s,\n collectionGroup: n.mutation.key.getCollectionGroup(),\n largestBatchId: n.largestBatchId,\n overlayMutation: toMutation(e.ct, n.mutation)\n };\n }(this.serializer, this.userId, t));\n }\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the document overlay object store.\n */\nfunction __PRIVATE_documentOverlayStore(e) {\n return __PRIVATE_getStore(e, \"documentOverlays\");\n}\n\n/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n// Note: This code is copied from the backend. Code that is not used by\n// Firestore was removed.\n/** Firestore index value writer. */\nclass __PRIVATE_FirestoreIndexValueWriter {\n constructor() {}\n // The write methods below short-circuit writing terminators for values\n // containing a (terminating) truncated value.\n // As an example, consider the resulting encoding for:\n // [\"bar\", [2, \"foo\"]] -> (STRING, \"bar\", TERM, ARRAY, NUMBER, 2, STRING, \"foo\", TERM, TERM, TERM)\n // [\"bar\", [2, truncated(\"foo\")]] -> (STRING, \"bar\", TERM, ARRAY, NUMBER, 2, STRING, \"foo\", TRUNC)\n // [\"bar\", truncated([\"foo\"])] -> (STRING, \"bar\", TERM, ARRAY. STRING, \"foo\", TERM, TRUNC)\n /** Writes an index value. */\n Pt(e, t) {\n this.It(e, t),\n // Write separator to split index values\n // (see go/firestore-storage-format#encodings).\n t.Tt();\n }\n It(e, t) {\n if (\"nullValue\" in e) this.Et(t, 5);else if (\"booleanValue\" in e) this.Et(t, 10), t.dt(e.booleanValue ? 1 : 0);else if (\"integerValue\" in e) this.Et(t, 15), t.dt(__PRIVATE_normalizeNumber(e.integerValue));else if (\"doubleValue\" in e) {\n const n = __PRIVATE_normalizeNumber(e.doubleValue);\n isNaN(n) ? this.Et(t, 13) : (this.Et(t, 15), __PRIVATE_isNegativeZero(n) ?\n // -0.0, 0 and 0.0 are all considered the same\n t.dt(0) : t.dt(n));\n } else if (\"timestampValue\" in e) {\n let n = e.timestampValue;\n this.Et(t, 20), \"string\" == typeof n && (n = __PRIVATE_normalizeTimestamp(n)), t.At(`${n.seconds || \"\"}`), t.dt(n.nanos || 0);\n } else if (\"stringValue\" in e) this.Rt(e.stringValue, t), this.Vt(t);else if (\"bytesValue\" in e) this.Et(t, 30), t.ft(__PRIVATE_normalizeByteString(e.bytesValue)), this.Vt(t);else if (\"referenceValue\" in e) this.gt(e.referenceValue, t);else if (\"geoPointValue\" in e) {\n const n = e.geoPointValue;\n this.Et(t, 45), t.dt(n.latitude || 0), t.dt(n.longitude || 0);\n } else \"mapValue\" in e ? __PRIVATE_isMaxValue(e) ? this.Et(t, Number.MAX_SAFE_INTEGER) : (this.yt(e.mapValue, t), this.Vt(t)) : \"arrayValue\" in e ? (this.wt(e.arrayValue, t), this.Vt(t)) : fail();\n }\n Rt(e, t) {\n this.Et(t, 25), this.St(e, t);\n }\n St(e, t) {\n t.At(e);\n }\n yt(e, t) {\n const n = e.fields || {};\n this.Et(t, 55);\n for (const e of Object.keys(n)) this.Rt(e, t), this.It(n[e], t);\n }\n wt(e, t) {\n const n = e.values || [];\n this.Et(t, 50);\n for (const e of n) this.It(e, t);\n }\n gt(e, t) {\n this.Et(t, 37);\n DocumentKey.fromName(e).path.forEach(e => {\n this.Et(t, 60), this.St(e, t);\n });\n }\n Et(e, t) {\n e.dt(t);\n }\n Vt(e) {\n // While the SDK does not implement truncation, the truncation marker is\n // used to terminate all variable length values (which are strings, bytes,\n // references, arrays and maps).\n e.dt(2);\n }\n}\n__PRIVATE_FirestoreIndexValueWriter.bt = new __PRIVATE_FirestoreIndexValueWriter();\n\n/**\n * Counts the number of zeros in a byte.\n *\n * Visible for testing.\n */\nfunction __PRIVATE_numberOfLeadingZerosInByte(e) {\n if (0 === e) return 8;\n let t = 0;\n return e >> 4 == 0 && (\n // Test if the first four bits are zero.\n t += 4, e <<= 4), e >> 6 == 0 && (\n // Test if the first two (or next two) bits are zero.\n t += 2, e <<= 2), e >> 7 == 0 && (\n // Test if the remaining bit is zero.\n t += 1), t;\n}\n\n/** Counts the number of leading zeros in the given byte array. */\n/**\n * Returns the number of bytes required to store \"value\". Leading zero bytes\n * are skipped.\n */\nfunction __PRIVATE_unsignedNumLength(e) {\n // This is just the number of bytes for the unsigned representation of the number.\n const t = 64 - function __PRIVATE_numberOfLeadingZeros(e) {\n let t = 0;\n for (let n = 0; n < 8; ++n) {\n const r = __PRIVATE_numberOfLeadingZerosInByte(255 & e[n]);\n if (t += r, 8 !== r) break;\n }\n return t;\n }(e);\n return Math.ceil(t / 8);\n}\n\n/**\n * OrderedCodeWriter is a minimal-allocation implementation of the writing\n * behavior defined by the backend.\n *\n * The code is ported from its Java counterpart.\n */\nclass __PRIVATE_OrderedCodeWriter {\n constructor() {\n this.buffer = new Uint8Array(1024), this.position = 0;\n }\n Dt(e) {\n const t = e[Symbol.iterator]();\n let n = t.next();\n for (; !n.done;) this.Ct(n.value), n = t.next();\n this.vt();\n }\n Ft(e) {\n const t = e[Symbol.iterator]();\n let n = t.next();\n for (; !n.done;) this.Mt(n.value), n = t.next();\n this.xt();\n }\n /** Writes utf8 bytes into this byte sequence, ascending. */\n Ot(e) {\n for (const t of e) {\n const e = t.charCodeAt(0);\n if (e < 128) this.Ct(e);else if (e < 2048) this.Ct(960 | e >>> 6), this.Ct(128 | 63 & e);else if (t < \"\\ud800\" || \"\\udbff\" < t) this.Ct(480 | e >>> 12), this.Ct(128 | 63 & e >>> 6), this.Ct(128 | 63 & e);else {\n const e = t.codePointAt(0);\n this.Ct(240 | e >>> 18), this.Ct(128 | 63 & e >>> 12), this.Ct(128 | 63 & e >>> 6), this.Ct(128 | 63 & e);\n }\n }\n this.vt();\n }\n /** Writes utf8 bytes into this byte sequence, descending */\n Nt(e) {\n for (const t of e) {\n const e = t.charCodeAt(0);\n if (e < 128) this.Mt(e);else if (e < 2048) this.Mt(960 | e >>> 6), this.Mt(128 | 63 & e);else if (t < \"\\ud800\" || \"\\udbff\" < t) this.Mt(480 | e >>> 12), this.Mt(128 | 63 & e >>> 6), this.Mt(128 | 63 & e);else {\n const e = t.codePointAt(0);\n this.Mt(240 | e >>> 18), this.Mt(128 | 63 & e >>> 12), this.Mt(128 | 63 & e >>> 6), this.Mt(128 | 63 & e);\n }\n }\n this.xt();\n }\n Lt(e) {\n // Values are encoded with a single byte length prefix, followed by the\n // actual value in big-endian format with leading 0 bytes dropped.\n const t = this.Bt(e),\n n = __PRIVATE_unsignedNumLength(t);\n this.kt(1 + n), this.buffer[this.position++] = 255 & n;\n // Write the length\n for (let e = t.length - n; e < t.length; ++e) this.buffer[this.position++] = 255 & t[e];\n }\n qt(e) {\n // Values are encoded with a single byte length prefix, followed by the\n // inverted value in big-endian format with leading 0 bytes dropped.\n const t = this.Bt(e),\n n = __PRIVATE_unsignedNumLength(t);\n this.kt(1 + n), this.buffer[this.position++] = ~(255 & n);\n // Write the length\n for (let e = t.length - n; e < t.length; ++e) this.buffer[this.position++] = ~(255 & t[e]);\n }\n /**\n * Writes the \"infinity\" byte sequence that sorts after all other byte\n * sequences written in ascending order.\n */\n Qt() {\n this.Kt(255), this.Kt(255);\n }\n /**\n * Writes the \"infinity\" byte sequence that sorts before all other byte\n * sequences written in descending order.\n */\n $t() {\n this.Ut(255), this.Ut(255);\n }\n /**\n * Resets the buffer such that it is the same as when it was newly\n * constructed.\n */\n reset() {\n this.position = 0;\n }\n seed(e) {\n this.kt(e.length), this.buffer.set(e, this.position), this.position += e.length;\n }\n /** Makes a copy of the encoded bytes in this buffer. */\n Wt() {\n return this.buffer.slice(0, this.position);\n }\n /**\n * Encodes `val` into an encoding so that the order matches the IEEE 754\n * floating-point comparison results with the following exceptions:\n * -0.0 < 0.0\n * all non-NaN < NaN\n * NaN = NaN\n */\n Bt(e) {\n const t = /** Converts a JavaScript number to a byte array (using big endian encoding). */\n function __PRIVATE_doubleToLongBits(e) {\n const t = new DataView(new ArrayBuffer(8));\n return t.setFloat64(0, e, /* littleEndian= */!1), new Uint8Array(t.buffer);\n }(e),\n n = 0 != (128 & t[0]);\n // Check if the first bit is set. We use a bit mask since value[0] is\n // encoded as a number from 0 to 255.\n // Revert the two complement to get natural ordering\n t[0] ^= n ? 255 : 128;\n for (let e = 1; e < t.length; ++e) t[e] ^= n ? 255 : 0;\n return t;\n }\n /** Writes a single byte ascending to the buffer. */\n Ct(e) {\n const t = 255 & e;\n 0 === t ? (this.Kt(0), this.Kt(255)) : 255 === t ? (this.Kt(255), this.Kt(0)) : this.Kt(t);\n }\n /** Writes a single byte descending to the buffer. */\n Mt(e) {\n const t = 255 & e;\n 0 === t ? (this.Ut(0), this.Ut(255)) : 255 === t ? (this.Ut(255), this.Ut(0)) : this.Ut(e);\n }\n vt() {\n this.Kt(0), this.Kt(1);\n }\n xt() {\n this.Ut(0), this.Ut(1);\n }\n Kt(e) {\n this.kt(1), this.buffer[this.position++] = e;\n }\n Ut(e) {\n this.kt(1), this.buffer[this.position++] = ~e;\n }\n kt(e) {\n const t = e + this.position;\n if (t <= this.buffer.length) return;\n // Try doubling.\n let n = 2 * this.buffer.length;\n // Still not big enough? Just allocate the right size.\n n < t && (n = t);\n // Create the new buffer.\n const r = new Uint8Array(n);\n r.set(this.buffer),\n // copy old data\n this.buffer = r;\n }\n}\nclass __PRIVATE_AscendingIndexByteEncoder {\n constructor(e) {\n this.Gt = e;\n }\n ft(e) {\n this.Gt.Dt(e);\n }\n At(e) {\n this.Gt.Ot(e);\n }\n dt(e) {\n this.Gt.Lt(e);\n }\n Tt() {\n this.Gt.Qt();\n }\n}\nclass __PRIVATE_DescendingIndexByteEncoder {\n constructor(e) {\n this.Gt = e;\n }\n ft(e) {\n this.Gt.Ft(e);\n }\n At(e) {\n this.Gt.Nt(e);\n }\n dt(e) {\n this.Gt.qt(e);\n }\n Tt() {\n this.Gt.$t();\n }\n}\n\n/**\n * Implements `DirectionalIndexByteEncoder` using `OrderedCodeWriter` for the\n * actual encoding.\n */\nclass __PRIVATE_IndexByteEncoder {\n constructor() {\n this.Gt = new __PRIVATE_OrderedCodeWriter(), this.zt = new __PRIVATE_AscendingIndexByteEncoder(this.Gt), this.jt = new __PRIVATE_DescendingIndexByteEncoder(this.Gt);\n }\n seed(e) {\n this.Gt.seed(e);\n }\n Ht(e) {\n return 0 /* IndexKind.ASCENDING */ === e ? this.zt : this.jt;\n }\n Wt() {\n return this.Gt.Wt();\n }\n reset() {\n this.Gt.reset();\n }\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** Represents an index entry saved by the SDK in persisted storage. */\nclass __PRIVATE_IndexEntry {\n constructor(e, t, n, r) {\n this.indexId = e, this.documentKey = t, this.arrayValue = n, this.directionalValue = r;\n }\n /**\n * Returns an IndexEntry entry that sorts immediately after the current\n * directional value.\n */\n Jt() {\n const e = this.directionalValue.length,\n t = 0 === e || 255 === this.directionalValue[e - 1] ? e + 1 : e,\n n = new Uint8Array(t);\n return n.set(this.directionalValue, 0), t !== e ? n.set([0], this.directionalValue.length) : ++n[n.length - 1], new __PRIVATE_IndexEntry(this.indexId, this.documentKey, this.arrayValue, n);\n }\n}\nfunction __PRIVATE_indexEntryComparator(e, t) {\n let n = e.indexId - t.indexId;\n return 0 !== n ? n : (n = __PRIVATE_compareByteArrays(e.arrayValue, t.arrayValue), 0 !== n ? n : (n = __PRIVATE_compareByteArrays(e.directionalValue, t.directionalValue), 0 !== n ? n : DocumentKey.comparator(e.documentKey, t.documentKey)));\n}\nfunction __PRIVATE_compareByteArrays(e, t) {\n for (let n = 0; n < e.length && n < t.length; ++n) {\n const r = e[n] - t[n];\n if (0 !== r) return r;\n }\n return e.length - t.length;\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A light query planner for Firestore.\n *\n * This class matches a `FieldIndex` against a Firestore Query `Target`. It\n * determines whether a given index can be used to serve the specified target.\n *\n * The following table showcases some possible index configurations:\n *\n * Query | Index\n * -----------------------------------------------------------------------------\n * where('a', '==', 'a').where('b', '==', 'b') | a ASC, b DESC\n * where('a', '==', 'a').where('b', '==', 'b') | a ASC\n * where('a', '==', 'a').where('b', '==', 'b') | b DESC\n * where('a', '>=', 'a').orderBy('a') | a ASC\n * where('a', '>=', 'a').orderBy('a', 'desc') | a DESC\n * where('a', '>=', 'a').orderBy('a').orderBy('b') | a ASC, b ASC\n * where('a', '>=', 'a').orderBy('a').orderBy('b') | a ASC\n * where('a', 'array-contains', 'a').orderBy('b') | a CONTAINS, b ASCENDING\n * where('a', 'array-contains', 'a').orderBy('b') | a CONTAINS\n */\nclass __PRIVATE_TargetIndexMatcher {\n constructor(e) {\n // The inequality filters of the target (if it exists).\n // Note: The sort on FieldFilters is not required. Using SortedSet here just to utilize the custom\n // comparator.\n this.Yt = new SortedSet((e, t) => FieldPath$1.comparator(e.field, t.field)), this.collectionId = null != e.collectionGroup ? e.collectionGroup : e.path.lastSegment(), this.Zt = e.orderBy, this.Xt = [];\n for (const t of e.filters) {\n const e = t;\n e.isInequality() ? this.Yt = this.Yt.add(e) : this.Xt.push(e);\n }\n }\n get en() {\n return this.Yt.size > 1;\n }\n /**\n * Returns whether the index can be used to serve the TargetIndexMatcher's\n * target.\n *\n * An index is considered capable of serving the target when:\n * - The target uses all index segments for its filters and orderBy clauses.\n * The target can have additional filter and orderBy clauses, but not\n * fewer.\n * - If an ArrayContains/ArrayContainsAnyfilter is used, the index must also\n * have a corresponding `CONTAINS` segment.\n * - All directional index segments can be mapped to the target as a series of\n * equality filters, a single inequality filter and a series of orderBy\n * clauses.\n * - The segments that represent the equality filters may appear out of order.\n * - The optional segment for the inequality filter must appear after all\n * equality segments.\n * - The segments that represent that orderBy clause of the target must appear\n * in order after all equality and inequality segments. Single orderBy\n * clauses cannot be skipped, but a continuous orderBy suffix may be\n * omitted.\n */\n tn(e) {\n if (__PRIVATE_hardAssert(e.collectionGroup === this.collectionId), this.en)\n // Only single inequality is supported for now.\n // TODO(Add support for multiple inequality query): b/298441043\n return !1;\n // If there is an array element, find a matching filter.\n const t = __PRIVATE_fieldIndexGetArraySegment(e);\n if (void 0 !== t && !this.nn(t)) return !1;\n const n = __PRIVATE_fieldIndexGetDirectionalSegments(e);\n let r = new Set(),\n i = 0,\n s = 0;\n // Process all equalities first. Equalities can appear out of order.\n for (; i < n.length && this.nn(n[i]); ++i) r = r.add(n[i].fieldPath.canonicalString());\n // If we already have processed all segments, all segments are used to serve\n // the equality filters and we do not need to map any segments to the\n // target's inequality and orderBy clauses.\n if (i === n.length) return !0;\n if (this.Yt.size > 0) {\n // Only a single inequality is currently supported. Get the only entry in the set.\n const e = this.Yt.getIterator().getNext();\n // If there is an inequality filter and the field was not in one of the\n // equality filters above, the next segment must match both the filter\n // and the first orderBy clause.\n if (!r.has(e.field.canonicalString())) {\n const t = n[i];\n if (!this.rn(e, t) || !this.sn(this.Zt[s++], t)) return !1;\n }\n ++i;\n }\n // All remaining segments need to represent the prefix of the target's\n // orderBy.\n for (; i < n.length; ++i) {\n const e = n[i];\n if (s >= this.Zt.length || !this.sn(this.Zt[s++], e)) return !1;\n }\n return !0;\n }\n /**\n * Returns a full matched field index for this target. Currently multiple\n * inequality query is not supported so function returns null.\n */\n on() {\n if (this.en) return null;\n // We want to make sure only one segment created for one field. For example,\n // in case like a == 3 and a > 2, Index {a ASCENDING} will only be created\n // once.\n let e = new SortedSet(FieldPath$1.comparator);\n const t = [];\n for (const n of this.Xt) {\n if (n.field.isKeyField()) continue;\n if (\"array-contains\" /* Operator.ARRAY_CONTAINS */ === n.op || \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */ === n.op) t.push(new IndexSegment(n.field, 2 /* IndexKind.CONTAINS */));else {\n if (e.has(n.field)) continue;\n e = e.add(n.field), t.push(new IndexSegment(n.field, 0 /* IndexKind.ASCENDING */));\n }\n }\n // Note: We do not explicitly check `this.inequalityFilter` but rather rely\n // on the target defining an appropriate \"order by\" to ensure that the\n // required index segment is added. The query engine would reject a query\n // with an inequality filter that lacks the required order-by clause.\n for (const n of this.Zt)\n // Stop adding more segments if we see a order-by on key. Typically this\n // is the default implicit order-by which is covered in the index_entry\n // table as a separate column. If it is not the default order-by, the\n // generated index will be missing some segments optimized for order-bys,\n // which is probably fine.\n n.field.isKeyField() || e.has(n.field) || (e = e.add(n.field), t.push(new IndexSegment(n.field, \"asc\" /* Direction.ASCENDING */ === n.dir ? 0 /* IndexKind.ASCENDING */ : 1 /* IndexKind.DESCENDING */)));\n return new FieldIndex(FieldIndex.UNKNOWN_ID, this.collectionId, t, IndexState.empty());\n }\n nn(e) {\n for (const t of this.Xt) if (this.rn(t, e)) return !0;\n return !1;\n }\n rn(e, t) {\n if (void 0 === e || !e.field.isEqual(t.fieldPath)) return !1;\n const n = \"array-contains\" /* Operator.ARRAY_CONTAINS */ === e.op || \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */ === e.op;\n return 2 /* IndexKind.CONTAINS */ === t.kind === n;\n }\n sn(e, t) {\n return !!e.field.isEqual(t.fieldPath) && (0 /* IndexKind.ASCENDING */ === t.kind && \"asc\" /* Direction.ASCENDING */ === e.dir || 1 /* IndexKind.DESCENDING */ === t.kind && \"desc\" /* Direction.DESCENDING */ === e.dir);\n }\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Provides utility functions that help with boolean logic transformations needed for handling\n * complex filters used in queries.\n */\n/**\n * The `in` filter is only a syntactic sugar over a disjunction of equalities. For instance: `a in\n * [1,2,3]` is in fact `a==1 || a==2 || a==3`. This method expands any `in` filter in the given\n * input into a disjunction of equality filters and returns the expanded filter.\n */\nfunction __PRIVATE_computeInExpansion(e) {\n var t, n;\n if (__PRIVATE_hardAssert(e instanceof FieldFilter || e instanceof CompositeFilter), e instanceof FieldFilter) {\n if (e instanceof __PRIVATE_InFilter) {\n const r = (null === (n = null === (t = e.value.arrayValue) || void 0 === t ? void 0 : t.values) || void 0 === n ? void 0 : n.map(t => FieldFilter.create(e.field, \"==\" /* Operator.EQUAL */, t))) || [];\n return CompositeFilter.create(r, \"or\" /* CompositeOperator.OR */);\n }\n // We have reached other kinds of field filters.\n return e;\n }\n // We have a composite filter.\n const r = e.filters.map(e => __PRIVATE_computeInExpansion(e));\n return CompositeFilter.create(r, e.op);\n}\n\n/**\n * Given a composite filter, returns the list of terms in its disjunctive normal form.\n *\n * Each element in the return value is one term of the resulting DNF. For instance: For the\n * input: (A || B) && C, the DNF form is: (A && C) || (B && C), and the return value is a list\n * with two elements: a composite filter that performs (A && C), and a composite filter that\n * performs (B && C).\n *\n * @param filter the composite filter to calculate DNF transform for.\n * @return the terms in the DNF transform.\n */\nfunction __PRIVATE_getDnfTerms(e) {\n if (0 === e.getFilters().length) return [];\n const t = __PRIVATE_computeDistributedNormalForm(__PRIVATE_computeInExpansion(e));\n return __PRIVATE_hardAssert(__PRIVATE_isDisjunctiveNormalForm(t)), __PRIVATE_isSingleFieldFilter(t) || __PRIVATE_isFlatConjunction(t) ? [t] : t.getFilters();\n}\n\n/** Returns true if the given filter is a single field filter. e.g. (a == 10). */\nfunction __PRIVATE_isSingleFieldFilter(e) {\n return e instanceof FieldFilter;\n}\n\n/**\n * Returns true if the given filter is the conjunction of one or more field filters. e.g. (a == 10\n * && b == 20)\n */\nfunction __PRIVATE_isFlatConjunction(e) {\n return e instanceof CompositeFilter && __PRIVATE_compositeFilterIsFlatConjunction(e);\n}\n\n/**\n * Returns whether or not the given filter is in disjunctive normal form (DNF).\n *\n *
In boolean logic, a disjunctive normal form (DNF) is a canonical normal form of a logical\n * formula consisting of a disjunction of conjunctions; it can also be described as an OR of ANDs.\n *\n *
For more info, visit: https://en.wikipedia.org/wiki/Disjunctive_normal_form\n */\nfunction __PRIVATE_isDisjunctiveNormalForm(e) {\n return __PRIVATE_isSingleFieldFilter(e) || __PRIVATE_isFlatConjunction(e) ||\n /**\n * Returns true if the given filter is the disjunction of one or more \"flat conjunctions\" and\n * field filters. e.g. (a == 10) || (b==20 && c==30)\n */\n function __PRIVATE_isDisjunctionOfFieldFiltersAndFlatConjunctions(e) {\n if (e instanceof CompositeFilter && __PRIVATE_compositeFilterIsDisjunction(e)) {\n for (const t of e.getFilters()) if (!__PRIVATE_isSingleFieldFilter(t) && !__PRIVATE_isFlatConjunction(t)) return !1;\n return !0;\n }\n return !1;\n }(e);\n}\nfunction __PRIVATE_computeDistributedNormalForm(e) {\n if (__PRIVATE_hardAssert(e instanceof FieldFilter || e instanceof CompositeFilter), e instanceof FieldFilter) return e;\n if (1 === e.filters.length) return __PRIVATE_computeDistributedNormalForm(e.filters[0]);\n // Compute DNF for each of the subfilters first\n const t = e.filters.map(e => __PRIVATE_computeDistributedNormalForm(e));\n let n = CompositeFilter.create(t, e.op);\n return n = __PRIVATE_applyAssociation(n), __PRIVATE_isDisjunctiveNormalForm(n) ? n : (__PRIVATE_hardAssert(n instanceof CompositeFilter), __PRIVATE_hardAssert(__PRIVATE_compositeFilterIsConjunction(n)), __PRIVATE_hardAssert(n.filters.length > 1), n.filters.reduce((e, t) => __PRIVATE_applyDistribution(e, t)));\n}\nfunction __PRIVATE_applyDistribution(e, t) {\n let n;\n return __PRIVATE_hardAssert(e instanceof FieldFilter || e instanceof CompositeFilter), __PRIVATE_hardAssert(t instanceof FieldFilter || t instanceof CompositeFilter),\n // FieldFilter FieldFilter\n n = e instanceof FieldFilter ? t instanceof FieldFilter ? function __PRIVATE_applyDistributionFieldFilters(e, t) {\n // Conjunction distribution for two field filters is the conjunction of them.\n return CompositeFilter.create([e, t], \"and\" /* CompositeOperator.AND */);\n }(e, t) : __PRIVATE_applyDistributionFieldAndCompositeFilters(e, t) : t instanceof FieldFilter ? __PRIVATE_applyDistributionFieldAndCompositeFilters(t, e) : function __PRIVATE_applyDistributionCompositeFilters(e, t) {\n // There are four cases:\n // (A & B) & (C & D) --> (A & B & C & D)\n // (A & B) & (C | D) --> (A & B & C) | (A & B & D)\n // (A | B) & (C & D) --> (C & D & A) | (C & D & B)\n // (A | B) & (C | D) --> (A & C) | (A & D) | (B & C) | (B & D)\n // Case 1 is a merge.\n if (__PRIVATE_hardAssert(e.filters.length > 0 && t.filters.length > 0), __PRIVATE_compositeFilterIsConjunction(e) && __PRIVATE_compositeFilterIsConjunction(t)) return __PRIVATE_compositeFilterWithAddedFilters(e, t.getFilters());\n // Case 2,3,4 all have at least one side (lhs or rhs) that is a disjunction. In all three cases\n // we should take each element of the disjunction and distribute it over the other side, and\n // return the disjunction of the distribution results.\n const n = __PRIVATE_compositeFilterIsDisjunction(e) ? e : t,\n r = __PRIVATE_compositeFilterIsDisjunction(e) ? t : e,\n i = n.filters.map(e => __PRIVATE_applyDistribution(e, r));\n return CompositeFilter.create(i, \"or\" /* CompositeOperator.OR */);\n }(e, t), __PRIVATE_applyAssociation(n);\n}\nfunction __PRIVATE_applyDistributionFieldAndCompositeFilters(e, t) {\n // There are two cases:\n // A & (B & C) --> (A & B & C)\n // A & (B | C) --> (A & B) | (A & C)\n if (__PRIVATE_compositeFilterIsConjunction(t))\n // Case 1\n return __PRIVATE_compositeFilterWithAddedFilters(t, e.getFilters());\n {\n // Case 2\n const n = t.filters.map(t => __PRIVATE_applyDistribution(e, t));\n return CompositeFilter.create(n, \"or\" /* CompositeOperator.OR */);\n }\n}\n\n/**\n * Applies the associativity property to the given filter and returns the resulting filter.\n *\n *
\n * A | (B | C) == (A | B) | C == (A | B | C)\n * A & (B & C) == (A & B) & C == (A & B & C)\n * \n *\n * For more info, visit: https://en.wikipedia.org/wiki/Associative_property#Propositional_logic\n */\nfunction __PRIVATE_applyAssociation(e) {\n if (__PRIVATE_hardAssert(e instanceof FieldFilter || e instanceof CompositeFilter), e instanceof FieldFilter) return e;\n const t = e.getFilters();\n // If the composite filter only contains 1 filter, apply associativity to it.\n if (1 === t.length) return __PRIVATE_applyAssociation(t[0]);\n // Associativity applied to a flat composite filter results is itself.\n if (__PRIVATE_compositeFilterIsFlat(e)) return e;\n // First apply associativity to all subfilters. This will in turn recursively apply\n // associativity to all nested composite filters and field filters.\n const n = t.map(e => __PRIVATE_applyAssociation(e)),\n r = [];\n // For composite subfilters that perform the same kind of logical operation as `compositeFilter`\n // take out their filters and add them to `compositeFilter`. For example:\n // compositeFilter = (A | (B | C | D))\n // compositeSubfilter = (B | C | D)\n // Result: (A | B | C | D)\n // Note that the `compositeSubfilter` has been eliminated, and its filters (B, C, D) have been\n // added to the top-level \"compositeFilter\".\n return n.forEach(t => {\n t instanceof FieldFilter ? r.push(t) : t instanceof CompositeFilter && (t.op === e.op ?\n // compositeFilter: (A | (B | C))\n // compositeSubfilter: (B | C)\n // Result: (A | B | C)\n r.push(...t.filters) :\n // compositeFilter: (A | (B & C))\n // compositeSubfilter: (B & C)\n // Result: (A | (B & C))\n r.push(t));\n }), 1 === r.length ? r[0] : CompositeFilter.create(r, e.op);\n}\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * An in-memory implementation of IndexManager.\n */\nclass __PRIVATE_MemoryIndexManager {\n constructor() {\n this._n = new __PRIVATE_MemoryCollectionParentIndex();\n }\n addToCollectionParentIndex(e, t) {\n return this._n.add(t), PersistencePromise.resolve();\n }\n getCollectionParents(e, t) {\n return PersistencePromise.resolve(this._n.getEntries(t));\n }\n addFieldIndex(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve();\n }\n deleteFieldIndex(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve();\n }\n deleteAllFieldIndexes(e) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve();\n }\n createTargetIndexes(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve();\n }\n getDocumentsMatchingTarget(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve(null);\n }\n getIndexType(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve(0 /* IndexType.NONE */);\n }\n getFieldIndexes(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve([]);\n }\n getNextCollectionGroupToUpdate(e) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve(null);\n }\n getMinOffset(e, t) {\n return PersistencePromise.resolve(IndexOffset.min());\n }\n getMinOffsetFromCollectionGroup(e, t) {\n return PersistencePromise.resolve(IndexOffset.min());\n }\n updateCollectionGroup(e, t, n) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve();\n }\n updateIndexEntries(e, t) {\n // Field indices are not supported with memory persistence.\n return PersistencePromise.resolve();\n }\n}\n\n/**\n * Internal implementation of the collection-parent index exposed by MemoryIndexManager.\n * Also used for in-memory caching by IndexedDbIndexManager and initial index population\n * in indexeddb_schema.ts\n */\nclass __PRIVATE_MemoryCollectionParentIndex {\n constructor() {\n this.index = {};\n }\n // Returns false if the entry already existed.\n add(e) {\n const t = e.lastSegment(),\n n = e.popLast(),\n r = this.index[t] || new SortedSet(ResourcePath.comparator),\n i = !r.has(n);\n return this.index[t] = r.add(n), i;\n }\n has(e) {\n const t = e.lastSegment(),\n n = e.popLast(),\n r = this.index[t];\n return r && r.has(n);\n }\n getEntries(e) {\n return (this.index[e] || new SortedSet(ResourcePath.comparator)).toArray();\n }\n}\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst de = new Uint8Array(0);\n\n/**\n * A persisted implementation of IndexManager.\n *\n * PORTING NOTE: Unlike iOS and Android, the Web SDK does not memoize index\n * data as it supports multi-tab access.\n */\nclass __PRIVATE_IndexedDbIndexManager {\n constructor(e, t) {\n this.databaseId = t,\n /**\n * An in-memory copy of the index entries we've already written since the SDK\n * launched. Used to avoid re-writing the same entry repeatedly.\n *\n * This is *NOT* a complete cache of what's in persistence and so can never be\n * used to satisfy reads.\n */\n this.an = new __PRIVATE_MemoryCollectionParentIndex(),\n /**\n * Maps from a target to its equivalent list of sub-targets. Each sub-target\n * contains only one term from the target's disjunctive normal form (DNF).\n */\n this.un = new ObjectMap(e => __PRIVATE_canonifyTarget(e), (e, t) => __PRIVATE_targetEquals(e, t)), this.uid = e.uid || \"\";\n }\n /**\n * Adds a new entry to the collection parent index.\n *\n * Repeated calls for the same collectionPath should be avoided within a\n * transaction as IndexedDbIndexManager only caches writes once a transaction\n * has been committed.\n */\n addToCollectionParentIndex(e, t) {\n if (!this.an.has(t)) {\n const n = t.lastSegment(),\n r = t.popLast();\n e.addOnCommittedListener(() => {\n // Add the collection to the in memory cache only if the transaction was\n // successfully committed.\n this.an.add(t);\n });\n const i = {\n collectionId: n,\n parent: __PRIVATE_encodeResourcePath(r)\n };\n return __PRIVATE_collectionParentsStore(e).put(i);\n }\n return PersistencePromise.resolve();\n }\n getCollectionParents(e, t) {\n const n = [],\n r = IDBKeyRange.bound([t, \"\"], [__PRIVATE_immediateSuccessor(t), \"\"], /*lowerOpen=*/!1, /*upperOpen=*/!0);\n return __PRIVATE_collectionParentsStore(e).U(r).next(e => {\n for (const r of e) {\n // This collectionId guard shouldn't be necessary (and isn't as long\n // as we're running in a real browser), but there's a bug in\n // indexeddbshim that breaks our range in our tests running in node:\n // https://github.com/axemclion/IndexedDBShim/issues/334\n if (r.collectionId !== t) break;\n n.push(__PRIVATE_decodeResourcePath(r.parent));\n }\n return n;\n });\n }\n addFieldIndex(e, t) {\n // TODO(indexing): Verify that the auto-incrementing index ID works in\n // Safari & Firefox.\n const n = __PRIVATE_indexConfigurationStore(e),\n r = function __PRIVATE_toDbIndexConfiguration(e) {\n return {\n indexId: e.indexId,\n collectionGroup: e.collectionGroup,\n fields: e.fields.map(e => [e.fieldPath.canonicalString(), e.kind])\n };\n }(t);\n delete r.indexId;\n // `indexId` is auto-populated by IndexedDb\n const i = n.add(r);\n if (t.indexState) {\n const n = __PRIVATE_indexStateStore(e);\n return i.next(e => {\n n.put(__PRIVATE_toDbIndexState(e, this.uid, t.indexState.sequenceNumber, t.indexState.offset));\n });\n }\n return i.next();\n }\n deleteFieldIndex(e, t) {\n const n = __PRIVATE_indexConfigurationStore(e),\n r = __PRIVATE_indexStateStore(e),\n i = __PRIVATE_indexEntriesStore(e);\n return n.delete(t.indexId).next(() => r.delete(IDBKeyRange.bound([t.indexId], [t.indexId + 1], /*lowerOpen=*/!1, /*upperOpen=*/!0))).next(() => i.delete(IDBKeyRange.bound([t.indexId], [t.indexId + 1], /*lowerOpen=*/!1, /*upperOpen=*/!0)));\n }\n deleteAllFieldIndexes(e) {\n const t = __PRIVATE_indexConfigurationStore(e),\n n = __PRIVATE_indexEntriesStore(e),\n r = __PRIVATE_indexStateStore(e);\n return t.j().next(() => n.j()).next(() => r.j());\n }\n createTargetIndexes(e, t) {\n return PersistencePromise.forEach(this.cn(t), t => this.getIndexType(e, t).next(n => {\n if (0 /* IndexType.NONE */ === n || 1 /* IndexType.PARTIAL */ === n) {\n const n = new __PRIVATE_TargetIndexMatcher(t).on();\n if (null != n) return this.addFieldIndex(e, n);\n }\n }));\n }\n getDocumentsMatchingTarget(e, t) {\n const n = __PRIVATE_indexEntriesStore(e);\n let r = !0;\n const i = new Map();\n return PersistencePromise.forEach(this.cn(t), t => this.ln(e, t).next(e => {\n r && (r = !!e), i.set(t, e);\n })).next(() => {\n if (r) {\n let e = __PRIVATE_documentKeySet();\n const r = [];\n return PersistencePromise.forEach(i, (i, s) => {\n __PRIVATE_logDebug(\"IndexedDbIndexManager\", `Using index ${function __PRIVATE_fieldIndexToString(e) {\n return `id=${e.indexId}|cg=${e.collectionGroup}|f=${e.fields.map(e => `${e.fieldPath}:${e.kind}`).join(\",\")}`;\n }(i)} to execute ${__PRIVATE_canonifyTarget(t)}`);\n const o = function __PRIVATE_targetGetArrayValues(e, t) {\n const n = __PRIVATE_fieldIndexGetArraySegment(t);\n if (void 0 === n) return null;\n for (const t of __PRIVATE_targetGetFieldFiltersForPath(e, n.fieldPath)) switch (t.op) {\n case \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */:\n return t.value.arrayValue.values || [];\n case \"array-contains\" /* Operator.ARRAY_CONTAINS */:\n return [t.value];\n // Remaining filters are not array filters.\n }\n return null;\n }\n /**\n * Returns the list of values that are used in != or NOT_IN filters. Returns\n * `null` if there are no such filters.\n */(s, i),\n _ = function __PRIVATE_targetGetNotInValues(e, t) {\n const n = new Map();\n for (const r of __PRIVATE_fieldIndexGetDirectionalSegments(t)) for (const t of __PRIVATE_targetGetFieldFiltersForPath(e, r.fieldPath)) switch (t.op) {\n case \"==\" /* Operator.EQUAL */:\n case \"in\" /* Operator.IN */:\n // Encode equality prefix, which is encoded in the index value before\n // the inequality (e.g. `a == 'a' && b != 'b'` is encoded to\n // `value != 'ab'`).\n n.set(r.fieldPath.canonicalString(), t.value);\n break;\n case \"not-in\" /* Operator.NOT_IN */:\n case \"!=\" /* Operator.NOT_EQUAL */:\n // NotIn/NotEqual is always a suffix. There cannot be any remaining\n // segments and hence we can return early here.\n return n.set(r.fieldPath.canonicalString(), t.value), Array.from(n.values());\n // Remaining filters cannot be used as notIn bounds.\n }\n return null;\n }\n /**\n * Returns a lower bound of field values that can be used as a starting point to\n * scan the index defined by `fieldIndex`. Returns `MIN_VALUE` if no lower bound\n * exists.\n */(s, i),\n a = function __PRIVATE_targetGetLowerBound(e, t) {\n const n = [];\n let r = !0;\n // For each segment, retrieve a lower bound if there is a suitable filter or\n // startAt.\n for (const i of __PRIVATE_fieldIndexGetDirectionalSegments(t)) {\n const t = 0 /* IndexKind.ASCENDING */ === i.kind ? __PRIVATE_targetGetAscendingBound(e, i.fieldPath, e.startAt) : __PRIVATE_targetGetDescendingBound(e, i.fieldPath, e.startAt);\n n.push(t.value), r && (r = t.inclusive);\n }\n return new Bound(n, r);\n }\n /**\n * Returns an upper bound of field values that can be used as an ending point\n * when scanning the index defined by `fieldIndex`. Returns `MAX_VALUE` if no\n * upper bound exists.\n */(s, i),\n u = function __PRIVATE_targetGetUpperBound(e, t) {\n const n = [];\n let r = !0;\n // For each segment, retrieve an upper bound if there is a suitable filter or\n // endAt.\n for (const i of __PRIVATE_fieldIndexGetDirectionalSegments(t)) {\n const t = 0 /* IndexKind.ASCENDING */ === i.kind ? __PRIVATE_targetGetDescendingBound(e, i.fieldPath, e.endAt) : __PRIVATE_targetGetAscendingBound(e, i.fieldPath, e.endAt);\n n.push(t.value), r && (r = t.inclusive);\n }\n return new Bound(n, r);\n }(s, i),\n c = this.hn(i, s, a),\n l = this.hn(i, s, u),\n h = this.Pn(i, s, _),\n P = this.In(i.indexId, o, c, a.inclusive, l, u.inclusive, h);\n return PersistencePromise.forEach(P, i => n.G(i, t.limit).next(t => {\n t.forEach(t => {\n const n = DocumentKey.fromSegments(t.documentKey);\n e.has(n) || (e = e.add(n), r.push(n));\n });\n }));\n }).next(() => r);\n }\n return PersistencePromise.resolve(null);\n });\n }\n cn(e) {\n let t = this.un.get(e);\n if (t) return t;\n if (0 === e.filters.length) t = [e];else {\n t = __PRIVATE_getDnfTerms(CompositeFilter.create(e.filters, \"and\" /* CompositeOperator.AND */)).map(t => __PRIVATE_newTarget(e.path, e.collectionGroup, e.orderBy, t.getFilters(), e.limit, e.startAt, e.endAt));\n }\n return this.un.set(e, t), t;\n }\n /**\n * Constructs a key range query on `DbIndexEntryStore` that unions all\n * bounds.\n */\n In(e, t, n, r, i, s, o) {\n // The number of total index scans we union together. This is similar to a\n // distributed normal form, but adapted for array values. We create a single\n // index range per value in an ARRAY_CONTAINS or ARRAY_CONTAINS_ANY filter\n // combined with the values from the query bounds.\n const _ = (null != t ? t.length : 1) * Math.max(n.length, i.length),\n a = _ / (null != t ? t.length : 1),\n u = [];\n for (let c = 0; c < _; ++c) {\n const _ = t ? this.Tn(t[c / a]) : de,\n l = this.En(e, _, n[c % a], r),\n h = this.dn(e, _, i[c % a], s),\n P = o.map(t => this.En(e, _, t, /* inclusive= */!0));\n u.push(...this.createRange(l, h, P));\n }\n return u;\n }\n /** Generates the lower bound for `arrayValue` and `directionalValue`. */\n En(e, t, n, r) {\n const i = new __PRIVATE_IndexEntry(e, DocumentKey.empty(), t, n);\n return r ? i : i.Jt();\n }\n /** Generates the upper bound for `arrayValue` and `directionalValue`. */\n dn(e, t, n, r) {\n const i = new __PRIVATE_IndexEntry(e, DocumentKey.empty(), t, n);\n return r ? i.Jt() : i;\n }\n ln(e, t) {\n const n = new __PRIVATE_TargetIndexMatcher(t),\n r = null != t.collectionGroup ? t.collectionGroup : t.path.lastSegment();\n return this.getFieldIndexes(e, r).next(e => {\n // Return the index with the most number of segments.\n let t = null;\n for (const r of e) {\n n.tn(r) && (!t || r.fields.length > t.fields.length) && (t = r);\n }\n return t;\n });\n }\n getIndexType(e, t) {\n let n = 2 /* IndexType.FULL */;\n const r = this.cn(t);\n return PersistencePromise.forEach(r, t => this.ln(e, t).next(e => {\n e ? 0 /* IndexType.NONE */ !== n && e.fields.length < function __PRIVATE_targetGetSegmentCount(e) {\n let t = new SortedSet(FieldPath$1.comparator),\n n = !1;\n for (const r of e.filters) for (const e of r.getFlattenedFilters())\n // __name__ is not an explicit segment of any index, so we don't need to\n // count it.\n e.field.isKeyField() || (\n // ARRAY_CONTAINS or ARRAY_CONTAINS_ANY filters must be counted separately.\n // For instance, it is possible to have an index for \"a ARRAY a ASC\". Even\n // though these are on the same field, they should be counted as two\n // separate segments in an index.\n \"array-contains\" /* Operator.ARRAY_CONTAINS */ === e.op || \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */ === e.op ? n = !0 : t = t.add(e.field));\n for (const n of e.orderBy)\n // __name__ is not an explicit segment of any index, so we don't need to\n // count it.\n n.field.isKeyField() || (t = t.add(n.field));\n return t.size + (n ? 1 : 0);\n }(t) && (n = 1 /* IndexType.PARTIAL */) : n = 0 /* IndexType.NONE */;\n })).next(() =>\n // OR queries have more than one sub-target (one sub-target per DNF term). We currently consider\n // OR queries that have a `limit` to have a partial index. For such queries we perform sorting\n // and apply the limit in memory as a post-processing step.\n function __PRIVATE_targetHasLimit(e) {\n return null !== e.limit;\n }(t) && r.length > 1 && 2 /* IndexType.FULL */ === n ? 1 /* IndexType.PARTIAL */ : n);\n }\n /**\n * Returns the byte encoded form of the directional values in the field index.\n * Returns `null` if the document does not have all fields specified in the\n * index.\n */\n An(e, t) {\n const n = new __PRIVATE_IndexByteEncoder();\n for (const r of __PRIVATE_fieldIndexGetDirectionalSegments(e)) {\n const e = t.data.field(r.fieldPath);\n if (null == e) return null;\n const i = n.Ht(r.kind);\n __PRIVATE_FirestoreIndexValueWriter.bt.Pt(e, i);\n }\n return n.Wt();\n }\n /** Encodes a single value to the ascending index format. */\n Tn(e) {\n const t = new __PRIVATE_IndexByteEncoder();\n return __PRIVATE_FirestoreIndexValueWriter.bt.Pt(e, t.Ht(0 /* IndexKind.ASCENDING */)), t.Wt();\n }\n /**\n * Returns an encoded form of the document key that sorts based on the key\n * ordering of the field index.\n */\n Rn(e, t) {\n const n = new __PRIVATE_IndexByteEncoder();\n return __PRIVATE_FirestoreIndexValueWriter.bt.Pt(__PRIVATE_refValue(this.databaseId, t), n.Ht(function __PRIVATE_fieldIndexGetKeyOrder(e) {\n const t = __PRIVATE_fieldIndexGetDirectionalSegments(e);\n return 0 === t.length ? 0 /* IndexKind.ASCENDING */ : t[t.length - 1].kind;\n }(e))), n.Wt();\n }\n /**\n * Encodes the given field values according to the specification in `target`.\n * For IN queries, a list of possible values is returned.\n */\n Pn(e, t, n) {\n if (null === n) return [];\n let r = [];\n r.push(new __PRIVATE_IndexByteEncoder());\n let i = 0;\n for (const s of __PRIVATE_fieldIndexGetDirectionalSegments(e)) {\n const e = n[i++];\n for (const n of r) if (this.Vn(t, s.fieldPath) && isArray(e)) r = this.mn(r, s, e);else {\n const t = n.Ht(s.kind);\n __PRIVATE_FirestoreIndexValueWriter.bt.Pt(e, t);\n }\n }\n return this.fn(r);\n }\n /**\n * Encodes the given bounds according to the specification in `target`. For IN\n * queries, a list of possible values is returned.\n */\n hn(e, t, n) {\n return this.Pn(e, t, n.position);\n }\n /** Returns the byte representation for the provided encoders. */\n fn(e) {\n const t = [];\n for (let n = 0; n < e.length; ++n) t[n] = e[n].Wt();\n return t;\n }\n /**\n * Creates a separate encoder for each element of an array.\n *\n * The method appends each value to all existing encoders (e.g. filter(\"a\",\n * \"==\", \"a1\").filter(\"b\", \"in\", [\"b1\", \"b2\"]) becomes [\"a1,b1\", \"a1,b2\"]). A\n * list of new encoders is returned.\n */\n mn(e, t, n) {\n const r = [...e],\n i = [];\n for (const e of n.arrayValue.values || []) for (const n of r) {\n const r = new __PRIVATE_IndexByteEncoder();\n r.seed(n.Wt()), __PRIVATE_FirestoreIndexValueWriter.bt.Pt(e, r.Ht(t.kind)), i.push(r);\n }\n return i;\n }\n Vn(e, t) {\n return !!e.filters.find(e => e instanceof FieldFilter && e.field.isEqual(t) && (\"in\" /* Operator.IN */ === e.op || \"not-in\" /* Operator.NOT_IN */ === e.op));\n }\n getFieldIndexes(e, t) {\n const n = __PRIVATE_indexConfigurationStore(e),\n r = __PRIVATE_indexStateStore(e);\n return (t ? n.U(\"collectionGroupIndex\", IDBKeyRange.bound(t, t)) : n.U()).next(e => {\n const t = [];\n return PersistencePromise.forEach(e, e => r.get([e.indexId, this.uid]).next(n => {\n t.push(function __PRIVATE_fromDbIndexConfiguration(e, t) {\n const n = t ? new IndexState(t.sequenceNumber, new IndexOffset(__PRIVATE_fromDbTimestamp(t.readTime), new DocumentKey(__PRIVATE_decodeResourcePath(t.documentKey)), t.largestBatchId)) : IndexState.empty(),\n r = e.fields.map(([e, t]) => new IndexSegment(FieldPath$1.fromServerFormat(e), t));\n return new FieldIndex(e.indexId, e.collectionGroup, r, n);\n }(e, n));\n })).next(() => t);\n });\n }\n getNextCollectionGroupToUpdate(e) {\n return this.getFieldIndexes(e).next(e => 0 === e.length ? null : (e.sort((e, t) => {\n const n = e.indexState.sequenceNumber - t.indexState.sequenceNumber;\n return 0 !== n ? n : __PRIVATE_primitiveComparator(e.collectionGroup, t.collectionGroup);\n }), e[0].collectionGroup));\n }\n updateCollectionGroup(e, t, n) {\n const r = __PRIVATE_indexConfigurationStore(e),\n i = __PRIVATE_indexStateStore(e);\n return this.gn(e).next(e => r.U(\"collectionGroupIndex\", IDBKeyRange.bound(t, t)).next(t => PersistencePromise.forEach(t, t => i.put(__PRIVATE_toDbIndexState(t.indexId, this.uid, e, n)))));\n }\n updateIndexEntries(e, t) {\n // Porting Note: `getFieldIndexes()` on Web does not cache index lookups as\n // it could be used across different IndexedDB transactions. As any cached\n // data might be invalidated by other multi-tab clients, we can only trust\n // data within a single IndexedDB transaction. We therefore add a cache\n // here.\n const n = new Map();\n return PersistencePromise.forEach(t, (t, r) => {\n const i = n.get(t.collectionGroup);\n return (i ? PersistencePromise.resolve(i) : this.getFieldIndexes(e, t.collectionGroup)).next(i => (n.set(t.collectionGroup, i), PersistencePromise.forEach(i, n => this.pn(e, t, n).next(t => {\n const i = this.yn(r, n);\n return t.isEqual(i) ? PersistencePromise.resolve() : this.wn(e, r, n, t, i);\n }))));\n });\n }\n Sn(e, t, n, r) {\n return __PRIVATE_indexEntriesStore(e).put({\n indexId: r.indexId,\n uid: this.uid,\n arrayValue: r.arrayValue,\n directionalValue: r.directionalValue,\n orderedDocumentKey: this.Rn(n, t.key),\n documentKey: t.key.path.toArray()\n });\n }\n bn(e, t, n, r) {\n return __PRIVATE_indexEntriesStore(e).delete([r.indexId, this.uid, r.arrayValue, r.directionalValue, this.Rn(n, t.key), t.key.path.toArray()]);\n }\n pn(e, t, n) {\n const r = __PRIVATE_indexEntriesStore(e);\n let i = new SortedSet(__PRIVATE_indexEntryComparator);\n return r.J({\n index: \"documentKeyIndex\",\n range: IDBKeyRange.only([n.indexId, this.uid, this.Rn(n, t)])\n }, (e, r) => {\n i = i.add(new __PRIVATE_IndexEntry(n.indexId, t, r.arrayValue, r.directionalValue));\n }).next(() => i);\n }\n /** Creates the index entries for the given document. */\n yn(e, t) {\n let n = new SortedSet(__PRIVATE_indexEntryComparator);\n const r = this.An(t, e);\n if (null == r) return n;\n const i = __PRIVATE_fieldIndexGetArraySegment(t);\n if (null != i) {\n const s = e.data.field(i.fieldPath);\n if (isArray(s)) for (const i of s.arrayValue.values || []) n = n.add(new __PRIVATE_IndexEntry(t.indexId, e.key, this.Tn(i), r));\n } else n = n.add(new __PRIVATE_IndexEntry(t.indexId, e.key, de, r));\n return n;\n }\n /**\n * Updates the index entries for the provided document by deleting entries\n * that are no longer referenced in `newEntries` and adding all newly added\n * entries.\n */\n wn(e, t, n, r, i) {\n __PRIVATE_logDebug(\"IndexedDbIndexManager\", \"Updating index entries for document '%s'\", t.key);\n const s = [];\n return function __PRIVATE_diffSortedSets(e, t, n, r, i) {\n const s = e.getIterator(),\n o = t.getIterator();\n let _ = __PRIVATE_advanceIterator(s),\n a = __PRIVATE_advanceIterator(o);\n // Walk through the two sets at the same time, using the ordering defined by\n // `comparator`.\n for (; _ || a;) {\n let e = !1,\n t = !1;\n if (_ && a) {\n const r = n(_, a);\n r < 0 ?\n // The element was removed if the next element in our ordered\n // walkthrough is only in `before`.\n t = !0 : r > 0 && (\n // The element was added if the next element in our ordered walkthrough\n // is only in `after`.\n e = !0);\n } else null != _ ? t = !0 : e = !0;\n e ? (r(a), a = __PRIVATE_advanceIterator(o)) : t ? (i(_), _ = __PRIVATE_advanceIterator(s)) : (_ = __PRIVATE_advanceIterator(s), a = __PRIVATE_advanceIterator(o));\n }\n }(r, i, __PRIVATE_indexEntryComparator, /* onAdd= */r => {\n s.push(this.Sn(e, t, n, r));\n }, /* onRemove= */r => {\n s.push(this.bn(e, t, n, r));\n }), PersistencePromise.waitFor(s);\n }\n gn(e) {\n let t = 1;\n return __PRIVATE_indexStateStore(e).J({\n index: \"sequenceNumberIndex\",\n reverse: !0,\n range: IDBKeyRange.upperBound([this.uid, Number.MAX_SAFE_INTEGER])\n }, (e, n, r) => {\n r.done(), t = n.sequenceNumber + 1;\n }).next(() => t);\n }\n /**\n * Returns a new set of IDB ranges that splits the existing range and excludes\n * any values that match the `notInValue` from these ranges. As an example,\n * '[foo > 2 && foo != 3]` becomes `[foo > 2 && < 3, foo > 3]`.\n */\n createRange(e, t, n) {\n // The notIn values need to be sorted and unique so that we can return a\n // sorted set of non-overlapping ranges.\n n = n.sort((e, t) => __PRIVATE_indexEntryComparator(e, t)).filter((e, t, n) => !t || 0 !== __PRIVATE_indexEntryComparator(e, n[t - 1]));\n const r = [];\n r.push(e);\n for (const i of n) {\n const n = __PRIVATE_indexEntryComparator(i, e),\n s = __PRIVATE_indexEntryComparator(i, t);\n if (0 === n)\n // `notInValue` is the lower bound. We therefore need to raise the bound\n // to the next value.\n r[0] = e.Jt();else if (n > 0 && s < 0)\n // `notInValue` is in the middle of the range\n r.push(i), r.push(i.Jt());else if (s > 0)\n // `notInValue` (and all following values) are out of the range\n break;\n }\n r.push(t);\n const i = [];\n for (let e = 0; e < r.length; e += 2) {\n // If we encounter two bounds that will create an unmatchable key range,\n // then we return an empty set of key ranges.\n if (this.Dn(r[e], r[e + 1])) return [];\n const t = [r[e].indexId, this.uid, r[e].arrayValue, r[e].directionalValue, de, []],\n n = [r[e + 1].indexId, this.uid, r[e + 1].arrayValue, r[e + 1].directionalValue, de, []];\n i.push(IDBKeyRange.bound(t, n));\n }\n return i;\n }\n Dn(e, t) {\n // If lower bound is greater than the upper bound, then the key\n // range can never be matched.\n return __PRIVATE_indexEntryComparator(e, t) > 0;\n }\n getMinOffsetFromCollectionGroup(e, t) {\n return this.getFieldIndexes(e, t).next(__PRIVATE_getMinOffsetFromFieldIndexes);\n }\n getMinOffset(e, t) {\n return PersistencePromise.mapArray(this.cn(t), t => this.ln(e, t).next(e => e || fail())).next(__PRIVATE_getMinOffsetFromFieldIndexes);\n }\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the collectionParents\n * document store.\n */\nfunction __PRIVATE_collectionParentsStore(e) {\n return __PRIVATE_getStore(e, \"collectionParents\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the index entry object store.\n */\nfunction __PRIVATE_indexEntriesStore(e) {\n return __PRIVATE_getStore(e, \"indexEntries\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the index configuration object store.\n */\nfunction __PRIVATE_indexConfigurationStore(e) {\n return __PRIVATE_getStore(e, \"indexConfiguration\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the index state object store.\n */\nfunction __PRIVATE_indexStateStore(e) {\n return __PRIVATE_getStore(e, \"indexState\");\n}\nfunction __PRIVATE_getMinOffsetFromFieldIndexes(e) {\n __PRIVATE_hardAssert(0 !== e.length);\n let t = e[0].indexState.offset,\n n = t.largestBatchId;\n for (let r = 1; r < e.length; r++) {\n const i = e[r].indexState.offset;\n __PRIVATE_indexOffsetComparator(i, t) < 0 && (t = i), n < i.largestBatchId && (n = i.largestBatchId);\n }\n return new IndexOffset(t.readTime, t.documentKey, n);\n}\n\n/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst Ae = {\n didRun: !1,\n sequenceNumbersCollected: 0,\n targetsRemoved: 0,\n documentsRemoved: 0\n};\nclass LruParams {\n constructor(\n // When we attempt to collect, we will only do so if the cache size is greater than this\n // threshold. Passing `COLLECTION_DISABLED` here will cause collection to always be skipped.\n e,\n // The percentage of sequence numbers that we will attempt to collect\n t,\n // A cap on the total number of sequence numbers that will be collected. This prevents\n // us from collecting a huge number of sequence numbers if the cache has grown very large.\n n) {\n this.cacheSizeCollectionThreshold = e, this.percentileToCollect = t, this.maximumSequenceNumbersToCollect = n;\n }\n static withCacheSize(e) {\n return new LruParams(e, LruParams.DEFAULT_COLLECTION_PERCENTILE, LruParams.DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT);\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Delete a mutation batch and the associated document mutations.\n * @returns A PersistencePromise of the document mutations that were removed.\n */\nfunction removeMutationBatch(e, t, n) {\n const r = e.store(\"mutations\"),\n i = e.store(\"documentMutations\"),\n s = [],\n o = IDBKeyRange.only(n.batchId);\n let _ = 0;\n const a = r.J({\n range: o\n }, (e, t, n) => (_++, n.delete()));\n s.push(a.next(() => {\n __PRIVATE_hardAssert(1 === _);\n }));\n const u = [];\n for (const e of n.mutations) {\n const r = __PRIVATE_newDbDocumentMutationKey(t, e.key.path, n.batchId);\n s.push(i.delete(r)), u.push(e.key);\n }\n return PersistencePromise.waitFor(s).next(() => u);\n}\n\n/**\n * Returns an approximate size for the given document.\n */\nfunction __PRIVATE_dbDocumentSize(e) {\n if (!e) return 0;\n let t;\n if (e.document) t = e.document;else if (e.unknownDocument) t = e.unknownDocument;else {\n if (!e.noDocument) throw fail();\n t = e.noDocument;\n }\n return JSON.stringify(t).length;\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** A mutation queue for a specific user, backed by IndexedDB. */\nLruParams.DEFAULT_COLLECTION_PERCENTILE = 10, LruParams.DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT = 1e3, LruParams.DEFAULT = new LruParams(41943040, LruParams.DEFAULT_COLLECTION_PERCENTILE, LruParams.DEFAULT_MAX_SEQUENCE_NUMBERS_TO_COLLECT), LruParams.DISABLED = new LruParams(-1, 0, 0);\nclass __PRIVATE_IndexedDbMutationQueue {\n constructor(\n /**\n * The normalized userId (e.g. null UID => \"\" userId) used to store /\n * retrieve mutations.\n */\n e, t, n, r) {\n this.userId = e, this.serializer = t, this.indexManager = n, this.referenceDelegate = r,\n /**\n * Caches the document keys for pending mutation batches. If the mutation\n * has been removed from IndexedDb, the cached value may continue to\n * be used to retrieve the batch's document keys. To remove a cached value\n * locally, `removeCachedMutationKeys()` should be invoked either directly\n * or through `removeMutationBatches()`.\n *\n * With multi-tab, when the primary client acknowledges or rejects a mutation,\n * this cache is used by secondary clients to invalidate the local\n * view of the documents that were previously affected by the mutation.\n */\n // PORTING NOTE: Multi-tab only.\n this.Cn = {};\n }\n /**\n * Creates a new mutation queue for the given user.\n * @param user - The user for which to create a mutation queue.\n * @param serializer - The serializer to use when persisting to IndexedDb.\n */\n static lt(e, t, n, r) {\n // TODO(mcg): Figure out what constraints there are on userIDs\n // In particular, are there any reserved characters? are empty ids allowed?\n // For the moment store these together in the same mutations table assuming\n // that empty userIDs aren't allowed.\n __PRIVATE_hardAssert(\"\" !== e.uid);\n const i = e.isAuthenticated() ? e.uid : \"\";\n return new __PRIVATE_IndexedDbMutationQueue(i, t, n, r);\n }\n checkEmpty(e) {\n let t = !0;\n const n = IDBKeyRange.bound([this.userId, Number.NEGATIVE_INFINITY], [this.userId, Number.POSITIVE_INFINITY]);\n return __PRIVATE_mutationsStore(e).J({\n index: \"userMutationsIndex\",\n range: n\n }, (e, n, r) => {\n t = !1, r.done();\n }).next(() => t);\n }\n addMutationBatch(e, t, n, r) {\n const i = __PRIVATE_documentMutationsStore(e),\n s = __PRIVATE_mutationsStore(e);\n // The IndexedDb implementation in Chrome (and Firefox) does not handle\n // compound indices that include auto-generated keys correctly. To ensure\n // that the index entry is added correctly in all browsers, we perform two\n // writes: The first write is used to retrieve the next auto-generated Batch\n // ID, and the second write populates the index and stores the actual\n // mutation batch.\n // See: https://bugs.chromium.org/p/chromium/issues/detail?id=701972\n // We write an empty object to obtain key\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return s.add({}).next(o => {\n __PRIVATE_hardAssert(\"number\" == typeof o);\n const _ = new MutationBatch(o, t, n, r),\n a = function __PRIVATE_toDbMutationBatch(e, t, n) {\n const r = n.baseMutations.map(t => toMutation(e.ct, t)),\n i = n.mutations.map(t => toMutation(e.ct, t));\n return {\n userId: t,\n batchId: n.batchId,\n localWriteTimeMs: n.localWriteTime.toMillis(),\n baseMutations: r,\n mutations: i\n };\n }(this.serializer, this.userId, _),\n u = [];\n let c = new SortedSet((e, t) => __PRIVATE_primitiveComparator(e.canonicalString(), t.canonicalString()));\n for (const e of r) {\n const t = __PRIVATE_newDbDocumentMutationKey(this.userId, e.key.path, o);\n c = c.add(e.key.path.popLast()), u.push(s.put(a)), u.push(i.put(t, O));\n }\n return c.forEach(t => {\n u.push(this.indexManager.addToCollectionParentIndex(e, t));\n }), e.addOnCommittedListener(() => {\n this.Cn[o] = _.keys();\n }), PersistencePromise.waitFor(u).next(() => _);\n });\n }\n lookupMutationBatch(e, t) {\n return __PRIVATE_mutationsStore(e).get(t).next(e => e ? (__PRIVATE_hardAssert(e.userId === this.userId), __PRIVATE_fromDbMutationBatch(this.serializer, e)) : null);\n }\n /**\n * Returns the document keys for the mutation batch with the given batchId.\n * For primary clients, this method returns `null` after\n * `removeMutationBatches()` has been called. Secondary clients return a\n * cached result until `removeCachedMutationKeys()` is invoked.\n */\n // PORTING NOTE: Multi-tab only.\n vn(e, t) {\n return this.Cn[t] ? PersistencePromise.resolve(this.Cn[t]) : this.lookupMutationBatch(e, t).next(e => {\n if (e) {\n const n = e.keys();\n return this.Cn[t] = n, n;\n }\n return null;\n });\n }\n getNextMutationBatchAfterBatchId(e, t) {\n const n = t + 1,\n r = IDBKeyRange.lowerBound([this.userId, n]);\n let i = null;\n return __PRIVATE_mutationsStore(e).J({\n index: \"userMutationsIndex\",\n range: r\n }, (e, t, r) => {\n t.userId === this.userId && (__PRIVATE_hardAssert(t.batchId >= n), i = __PRIVATE_fromDbMutationBatch(this.serializer, t)), r.done();\n }).next(() => i);\n }\n getHighestUnacknowledgedBatchId(e) {\n const t = IDBKeyRange.upperBound([this.userId, Number.POSITIVE_INFINITY]);\n let n = -1;\n return __PRIVATE_mutationsStore(e).J({\n index: \"userMutationsIndex\",\n range: t,\n reverse: !0\n }, (e, t, r) => {\n n = t.batchId, r.done();\n }).next(() => n);\n }\n getAllMutationBatches(e) {\n const t = IDBKeyRange.bound([this.userId, -1], [this.userId, Number.POSITIVE_INFINITY]);\n return __PRIVATE_mutationsStore(e).U(\"userMutationsIndex\", t).next(e => e.map(e => __PRIVATE_fromDbMutationBatch(this.serializer, e)));\n }\n getAllMutationBatchesAffectingDocumentKey(e, t) {\n // Scan the document-mutation index starting with a prefix starting with\n // the given documentKey.\n const n = __PRIVATE_newDbDocumentMutationPrefixForPath(this.userId, t.path),\n r = IDBKeyRange.lowerBound(n),\n i = [];\n return __PRIVATE_documentMutationsStore(e).J({\n range: r\n }, (n, r, s) => {\n const [o, _, a] = n,\n u = __PRIVATE_decodeResourcePath(_);\n // Only consider rows matching exactly the specific key of\n // interest. Note that because we order by path first, and we\n // order terminators before path separators, we'll encounter all\n // the index rows for documentKey contiguously. In particular, all\n // the rows for documentKey will occur before any rows for\n // documents nested in a subcollection beneath documentKey so we\n // can stop as soon as we hit any such row.\n if (o === this.userId && t.path.isEqual(u))\n // Look up the mutation batch in the store.\n return __PRIVATE_mutationsStore(e).get(a).next(e => {\n if (!e) throw fail();\n __PRIVATE_hardAssert(e.userId === this.userId), i.push(__PRIVATE_fromDbMutationBatch(this.serializer, e));\n });\n s.done();\n }).next(() => i);\n }\n getAllMutationBatchesAffectingDocumentKeys(e, t) {\n let n = new SortedSet(__PRIVATE_primitiveComparator);\n const r = [];\n return t.forEach(t => {\n const i = __PRIVATE_newDbDocumentMutationPrefixForPath(this.userId, t.path),\n s = IDBKeyRange.lowerBound(i),\n o = __PRIVATE_documentMutationsStore(e).J({\n range: s\n }, (e, r, i) => {\n const [s, o, _] = e,\n a = __PRIVATE_decodeResourcePath(o);\n // Only consider rows matching exactly the specific key of\n // interest. Note that because we order by path first, and we\n // order terminators before path separators, we'll encounter all\n // the index rows for documentKey contiguously. In particular, all\n // the rows for documentKey will occur before any rows for\n // documents nested in a subcollection beneath documentKey so we\n // can stop as soon as we hit any such row.\n s === this.userId && t.path.isEqual(a) ? n = n.add(_) : i.done();\n });\n r.push(o);\n }), PersistencePromise.waitFor(r).next(() => this.Fn(e, n));\n }\n getAllMutationBatchesAffectingQuery(e, t) {\n const n = t.path,\n r = n.length + 1,\n i = __PRIVATE_newDbDocumentMutationPrefixForPath(this.userId, n),\n s = IDBKeyRange.lowerBound(i);\n // Collect up unique batchIDs encountered during a scan of the index. Use a\n // SortedSet to accumulate batch IDs so they can be traversed in order in a\n // scan of the main table.\n let o = new SortedSet(__PRIVATE_primitiveComparator);\n return __PRIVATE_documentMutationsStore(e).J({\n range: s\n }, (e, t, i) => {\n const [s, _, a] = e,\n u = __PRIVATE_decodeResourcePath(_);\n s === this.userId && n.isPrefixOf(u) ?\n // Rows with document keys more than one segment longer than the\n // query path can't be matches. For example, a query on 'rooms'\n // can't match the document /rooms/abc/messages/xyx.\n // TODO(mcg): we'll need a different scanner when we implement\n // ancestor queries.\n u.length === r && (o = o.add(a)) : i.done();\n }).next(() => this.Fn(e, o));\n }\n Fn(e, t) {\n const n = [],\n r = [];\n // TODO(rockwood): Implement this using iterate.\n return t.forEach(t => {\n r.push(__PRIVATE_mutationsStore(e).get(t).next(e => {\n if (null === e) throw fail();\n __PRIVATE_hardAssert(e.userId === this.userId), n.push(__PRIVATE_fromDbMutationBatch(this.serializer, e));\n }));\n }), PersistencePromise.waitFor(r).next(() => n);\n }\n removeMutationBatch(e, t) {\n return removeMutationBatch(e._e, this.userId, t).next(n => (e.addOnCommittedListener(() => {\n this.Mn(t.batchId);\n }), PersistencePromise.forEach(n, t => this.referenceDelegate.markPotentiallyOrphaned(e, t))));\n }\n /**\n * Clears the cached keys for a mutation batch. This method should be\n * called by secondary clients after they process mutation updates.\n *\n * Note that this method does not have to be called from primary clients as\n * the corresponding cache entries are cleared when an acknowledged or\n * rejected batch is removed from the mutation queue.\n */\n // PORTING NOTE: Multi-tab only\n Mn(e) {\n delete this.Cn[e];\n }\n performConsistencyCheck(e) {\n return this.checkEmpty(e).next(t => {\n if (!t) return PersistencePromise.resolve();\n // Verify that there are no entries in the documentMutations index if\n // the queue is empty.\n const n = IDBKeyRange.lowerBound(\n /**\n * Creates a [userId] key for use in the DbDocumentMutations index to iterate\n * over all of a user's document mutations.\n */\n function __PRIVATE_newDbDocumentMutationPrefixForUser(e) {\n return [e];\n }(this.userId)),\n r = [];\n return __PRIVATE_documentMutationsStore(e).J({\n range: n\n }, (e, t, n) => {\n if (e[0] === this.userId) {\n const t = __PRIVATE_decodeResourcePath(e[1]);\n r.push(t);\n } else n.done();\n }).next(() => {\n __PRIVATE_hardAssert(0 === r.length);\n });\n });\n }\n containsKey(e, t) {\n return __PRIVATE_mutationQueueContainsKey(e, this.userId, t);\n }\n // PORTING NOTE: Multi-tab only (state is held in memory in other clients).\n /** Returns the mutation queue's metadata from IndexedDb. */\n xn(e) {\n return __PRIVATE_mutationQueuesStore(e).get(this.userId).next(e => e || {\n userId: this.userId,\n lastAcknowledgedBatchId: -1,\n lastStreamToken: \"\"\n });\n }\n}\n\n/**\n * @returns true if the mutation queue for the given user contains a pending\n * mutation for the given key.\n */\nfunction __PRIVATE_mutationQueueContainsKey(e, t, n) {\n const r = __PRIVATE_newDbDocumentMutationPrefixForPath(t, n.path),\n i = r[1],\n s = IDBKeyRange.lowerBound(r);\n let o = !1;\n return __PRIVATE_documentMutationsStore(e).J({\n range: s,\n H: !0\n }, (e, n, r) => {\n const [s, _, /*batchID*/a] = e;\n s === t && _ === i && (o = !0), r.done();\n }).next(() => o);\n}\n\n/** Returns true if any mutation queue contains the given document. */\n/**\n * Helper to get a typed SimpleDbStore for the mutations object store.\n */\nfunction __PRIVATE_mutationsStore(e) {\n return __PRIVATE_getStore(e, \"mutations\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the mutationQueues object store.\n */\nfunction __PRIVATE_documentMutationsStore(e) {\n return __PRIVATE_getStore(e, \"documentMutations\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the mutationQueues object store.\n */\nfunction __PRIVATE_mutationQueuesStore(e) {\n return __PRIVATE_getStore(e, \"mutationQueues\");\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** Offset to ensure non-overlapping target ids. */\n/**\n * Generates monotonically increasing target IDs for sending targets to the\n * watch stream.\n *\n * The client constructs two generators, one for the target cache, and one for\n * for the sync engine (to generate limbo documents targets). These\n * generators produce non-overlapping IDs (by using even and odd IDs\n * respectively).\n *\n * By separating the target ID space, the query cache can generate target IDs\n * that persist across client restarts, while sync engine can independently\n * generate in-memory target IDs that are transient and can be reused after a\n * restart.\n */\nclass __PRIVATE_TargetIdGenerator {\n constructor(e) {\n this.On = e;\n }\n next() {\n return this.On += 2, this.On;\n }\n static Nn() {\n // The target cache generator must return '2' in its first call to `next()`\n // as there is no differentiation in the protocol layer between an unset\n // number and the number '0'. If we were to sent a target with target ID\n // '0', the backend would consider it unset and replace it with its own ID.\n return new __PRIVATE_TargetIdGenerator(0);\n }\n static Ln() {\n // Sync engine assigns target IDs for limbo document detection.\n return new __PRIVATE_TargetIdGenerator(-1);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_IndexedDbTargetCache {\n constructor(e, t) {\n this.referenceDelegate = e, this.serializer = t;\n }\n // PORTING NOTE: We don't cache global metadata for the target cache, since\n // some of it (in particular `highestTargetId`) can be modified by secondary\n // tabs. We could perhaps be more granular (and e.g. still cache\n // `lastRemoteSnapshotVersion` in memory) but for simplicity we currently go\n // to IndexedDb whenever we need to read metadata. We can revisit if it turns\n // out to have a meaningful performance impact.\n allocateTargetId(e) {\n return this.Bn(e).next(t => {\n const n = new __PRIVATE_TargetIdGenerator(t.highestTargetId);\n return t.highestTargetId = n.next(), this.kn(e, t).next(() => t.highestTargetId);\n });\n }\n getLastRemoteSnapshotVersion(e) {\n return this.Bn(e).next(e => SnapshotVersion.fromTimestamp(new Timestamp(e.lastRemoteSnapshotVersion.seconds, e.lastRemoteSnapshotVersion.nanoseconds)));\n }\n getHighestSequenceNumber(e) {\n return this.Bn(e).next(e => e.highestListenSequenceNumber);\n }\n setTargetsMetadata(e, t, n) {\n return this.Bn(e).next(r => (r.highestListenSequenceNumber = t, n && (r.lastRemoteSnapshotVersion = n.toTimestamp()), t > r.highestListenSequenceNumber && (r.highestListenSequenceNumber = t), this.kn(e, r)));\n }\n addTargetData(e, t) {\n return this.qn(e, t).next(() => this.Bn(e).next(n => (n.targetCount += 1, this.Qn(t, n), this.kn(e, n))));\n }\n updateTargetData(e, t) {\n return this.qn(e, t);\n }\n removeTargetData(e, t) {\n return this.removeMatchingKeysForTargetId(e, t.targetId).next(() => __PRIVATE_targetsStore(e).delete(t.targetId)).next(() => this.Bn(e)).next(t => (__PRIVATE_hardAssert(t.targetCount > 0), t.targetCount -= 1, this.kn(e, t)));\n }\n /**\n * Drops any targets with sequence number less than or equal to the upper bound, excepting those\n * present in `activeTargetIds`. Document associations for the removed targets are also removed.\n * Returns the number of targets removed.\n */\n removeTargets(e, t, n) {\n let r = 0;\n const i = [];\n return __PRIVATE_targetsStore(e).J((s, o) => {\n const _ = __PRIVATE_fromDbTarget(o);\n _.sequenceNumber <= t && null === n.get(_.targetId) && (r++, i.push(this.removeTargetData(e, _)));\n }).next(() => PersistencePromise.waitFor(i)).next(() => r);\n }\n /**\n * Call provided function with each `TargetData` that we have cached.\n */\n forEachTarget(e, t) {\n return __PRIVATE_targetsStore(e).J((e, n) => {\n const r = __PRIVATE_fromDbTarget(n);\n t(r);\n });\n }\n Bn(e) {\n return __PRIVATE_globalTargetStore(e).get(\"targetGlobalKey\").next(e => (__PRIVATE_hardAssert(null !== e), e));\n }\n kn(e, t) {\n return __PRIVATE_globalTargetStore(e).put(\"targetGlobalKey\", t);\n }\n qn(e, t) {\n return __PRIVATE_targetsStore(e).put(__PRIVATE_toDbTarget(this.serializer, t));\n }\n /**\n * In-place updates the provided metadata to account for values in the given\n * TargetData. Saving is done separately. Returns true if there were any\n * changes to the metadata.\n */\n Qn(e, t) {\n let n = !1;\n return e.targetId > t.highestTargetId && (t.highestTargetId = e.targetId, n = !0), e.sequenceNumber > t.highestListenSequenceNumber && (t.highestListenSequenceNumber = e.sequenceNumber, n = !0), n;\n }\n getTargetCount(e) {\n return this.Bn(e).next(e => e.targetCount);\n }\n getTargetData(e, t) {\n // Iterating by the canonicalId may yield more than one result because\n // canonicalId values are not required to be unique per target. This query\n // depends on the queryTargets index to be efficient.\n const n = __PRIVATE_canonifyTarget(t),\n r = IDBKeyRange.bound([n, Number.NEGATIVE_INFINITY], [n, Number.POSITIVE_INFINITY]);\n let i = null;\n return __PRIVATE_targetsStore(e).J({\n range: r,\n index: \"queryTargetsIndex\"\n }, (e, n, r) => {\n const s = __PRIVATE_fromDbTarget(n);\n // After finding a potential match, check that the target is\n // actually equal to the requested target.\n __PRIVATE_targetEquals(t, s.target) && (i = s, r.done());\n }).next(() => i);\n }\n addMatchingKeys(e, t, n) {\n // PORTING NOTE: The reverse index (documentsTargets) is maintained by\n // IndexedDb.\n const r = [],\n i = __PRIVATE_documentTargetStore(e);\n return t.forEach(t => {\n const s = __PRIVATE_encodeResourcePath(t.path);\n r.push(i.put({\n targetId: n,\n path: s\n })), r.push(this.referenceDelegate.addReference(e, n, t));\n }), PersistencePromise.waitFor(r);\n }\n removeMatchingKeys(e, t, n) {\n // PORTING NOTE: The reverse index (documentsTargets) is maintained by\n // IndexedDb.\n const r = __PRIVATE_documentTargetStore(e);\n return PersistencePromise.forEach(t, t => {\n const i = __PRIVATE_encodeResourcePath(t.path);\n return PersistencePromise.waitFor([r.delete([n, i]), this.referenceDelegate.removeReference(e, n, t)]);\n });\n }\n removeMatchingKeysForTargetId(e, t) {\n const n = __PRIVATE_documentTargetStore(e),\n r = IDBKeyRange.bound([t], [t + 1], /*lowerOpen=*/!1, /*upperOpen=*/!0);\n return n.delete(r);\n }\n getMatchingKeysForTargetId(e, t) {\n const n = IDBKeyRange.bound([t], [t + 1], /*lowerOpen=*/!1, /*upperOpen=*/!0),\n r = __PRIVATE_documentTargetStore(e);\n let i = __PRIVATE_documentKeySet();\n return r.J({\n range: n,\n H: !0\n }, (e, t, n) => {\n const r = __PRIVATE_decodeResourcePath(e[1]),\n s = new DocumentKey(r);\n i = i.add(s);\n }).next(() => i);\n }\n containsKey(e, t) {\n const n = __PRIVATE_encodeResourcePath(t.path),\n r = IDBKeyRange.bound([n], [__PRIVATE_immediateSuccessor(n)], /*lowerOpen=*/!1, /*upperOpen=*/!0);\n let i = 0;\n return __PRIVATE_documentTargetStore(e).J({\n index: \"documentTargetsIndex\",\n H: !0,\n range: r\n }, ([e, t], n, r) => {\n // Having a sentinel row for a document does not count as containing that document;\n // For the target cache, containing the document means the document is part of some\n // target.\n 0 !== e && (i++, r.done());\n }).next(() => i > 0);\n }\n /**\n * Looks up a TargetData entry by target ID.\n *\n * @param targetId - The target ID of the TargetData entry to look up.\n * @returns The cached TargetData entry, or null if the cache has no entry for\n * the target.\n */\n // PORTING NOTE: Multi-tab only.\n ot(e, t) {\n return __PRIVATE_targetsStore(e).get(t).next(e => e ? __PRIVATE_fromDbTarget(e) : null);\n }\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the queries object store.\n */\nfunction __PRIVATE_targetsStore(e) {\n return __PRIVATE_getStore(e, \"targets\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the target globals object store.\n */\nfunction __PRIVATE_globalTargetStore(e) {\n return __PRIVATE_getStore(e, \"targetGlobal\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the document target object store.\n */\nfunction __PRIVATE_documentTargetStore(e) {\n return __PRIVATE_getStore(e, \"targetDocuments\");\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nfunction __PRIVATE_bufferEntryComparator([e, t], [n, r]) {\n const i = __PRIVATE_primitiveComparator(e, n);\n return 0 === i ? __PRIVATE_primitiveComparator(t, r) : i;\n}\n\n/**\n * Used to calculate the nth sequence number. Keeps a rolling buffer of the\n * lowest n values passed to `addElement`, and finally reports the largest of\n * them in `maxValue`.\n */\nclass __PRIVATE_RollingSequenceNumberBuffer {\n constructor(e) {\n this.Kn = e, this.buffer = new SortedSet(__PRIVATE_bufferEntryComparator), this.$n = 0;\n }\n Un() {\n return ++this.$n;\n }\n Wn(e) {\n const t = [e, this.Un()];\n if (this.buffer.size < this.Kn) this.buffer = this.buffer.add(t);else {\n const e = this.buffer.last();\n __PRIVATE_bufferEntryComparator(t, e) < 0 && (this.buffer = this.buffer.delete(e).add(t));\n }\n }\n get maxValue() {\n // Guaranteed to be non-empty. If we decide we are not collecting any\n // sequence numbers, nthSequenceNumber below short-circuits. If we have\n // decided that we are collecting n sequence numbers, it's because n is some\n // percentage of the existing sequence numbers. That means we should never\n // be in a situation where we are collecting sequence numbers but don't\n // actually have any.\n return this.buffer.last()[0];\n }\n}\n\n/**\n * This class is responsible for the scheduling of LRU garbage collection. It handles checking\n * whether or not GC is enabled, as well as which delay to use before the next run.\n */\nclass __PRIVATE_LruScheduler {\n constructor(e, t, n) {\n this.garbageCollector = e, this.asyncQueue = t, this.localStore = n, this.Gn = null;\n }\n start() {\n -1 !== this.garbageCollector.params.cacheSizeCollectionThreshold && this.zn(6e4);\n }\n stop() {\n this.Gn && (this.Gn.cancel(), this.Gn = null);\n }\n get started() {\n return null !== this.Gn;\n }\n zn(e) {\n __PRIVATE_logDebug(\"LruGarbageCollector\", `Garbage collection scheduled in ${e}ms`), this.Gn = this.asyncQueue.enqueueAfterDelay(\"lru_garbage_collection\" /* TimerId.LruGarbageCollection */, e, async () => {\n this.Gn = null;\n try {\n await this.localStore.collectGarbage(this.garbageCollector);\n } catch (e) {\n __PRIVATE_isIndexedDbTransactionError(e) ? __PRIVATE_logDebug(\"LruGarbageCollector\", \"Ignoring IndexedDB error during garbage collection: \", e) : await __PRIVATE_ignoreIfPrimaryLeaseLoss(e);\n }\n await this.zn(3e5);\n });\n }\n}\n\n/**\n * Implements the steps for LRU garbage collection.\n */\nclass __PRIVATE_LruGarbageCollectorImpl {\n constructor(e, t) {\n this.jn = e, this.params = t;\n }\n calculateTargetCount(e, t) {\n return this.jn.Hn(e).next(e => Math.floor(t / 100 * e));\n }\n nthSequenceNumber(e, t) {\n if (0 === t) return PersistencePromise.resolve(__PRIVATE_ListenSequence.oe);\n const n = new __PRIVATE_RollingSequenceNumberBuffer(t);\n return this.jn.forEachTarget(e, e => n.Wn(e.sequenceNumber)).next(() => this.jn.Jn(e, e => n.Wn(e))).next(() => n.maxValue);\n }\n removeTargets(e, t, n) {\n return this.jn.removeTargets(e, t, n);\n }\n removeOrphanedDocuments(e, t) {\n return this.jn.removeOrphanedDocuments(e, t);\n }\n collect(e, t) {\n return -1 === this.params.cacheSizeCollectionThreshold ? (__PRIVATE_logDebug(\"LruGarbageCollector\", \"Garbage collection skipped; disabled\"), PersistencePromise.resolve(Ae)) : this.getCacheSize(e).next(n => n < this.params.cacheSizeCollectionThreshold ? (__PRIVATE_logDebug(\"LruGarbageCollector\", `Garbage collection skipped; Cache size ${n} is lower than threshold ${this.params.cacheSizeCollectionThreshold}`), Ae) : this.Yn(e, t));\n }\n getCacheSize(e) {\n return this.jn.getCacheSize(e);\n }\n Yn(e, t) {\n let n, r, i, s, o, a, u;\n const c = Date.now();\n return this.calculateTargetCount(e, this.params.percentileToCollect).next(t => (\n // Cap at the configured max\n t > this.params.maximumSequenceNumbersToCollect ? (__PRIVATE_logDebug(\"LruGarbageCollector\", `Capping sequence numbers to collect down to the maximum of ${this.params.maximumSequenceNumbersToCollect} from ${t}`), r = this.params.maximumSequenceNumbersToCollect) : r = t, s = Date.now(), this.nthSequenceNumber(e, r))).next(r => (n = r, o = Date.now(), this.removeTargets(e, n, t))).next(t => (i = t, a = Date.now(), this.removeOrphanedDocuments(e, n))).next(e => {\n if (u = Date.now(), __PRIVATE_getLogLevel() <= LogLevel.DEBUG) {\n __PRIVATE_logDebug(\"LruGarbageCollector\", `LRU Garbage Collection\\n\\tCounted targets in ${s - c}ms\\n\\tDetermined least recently used ${r} in ` + (o - s) + \"ms\\n\" + `\\tRemoved ${i} targets in ` + (a - o) + \"ms\\n\" + `\\tRemoved ${e} documents in ` + (u - a) + \"ms\\n\" + `Total Duration: ${u - c}ms`);\n }\n return PersistencePromise.resolve({\n didRun: !0,\n sequenceNumbersCollected: r,\n targetsRemoved: i,\n documentsRemoved: e\n });\n });\n }\n}\nfunction __PRIVATE_newLruGarbageCollector(e, t) {\n return new __PRIVATE_LruGarbageCollectorImpl(e, t);\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** Provides LRU functionality for IndexedDB persistence. */\nclass __PRIVATE_IndexedDbLruDelegateImpl {\n constructor(e, t) {\n this.db = e, this.garbageCollector = __PRIVATE_newLruGarbageCollector(this, t);\n }\n Hn(e) {\n const t = this.Zn(e);\n return this.db.getTargetCache().getTargetCount(e).next(e => t.next(t => e + t));\n }\n Zn(e) {\n let t = 0;\n return this.Jn(e, e => {\n t++;\n }).next(() => t);\n }\n forEachTarget(e, t) {\n return this.db.getTargetCache().forEachTarget(e, t);\n }\n Jn(e, t) {\n return this.Xn(e, (e, n) => t(n));\n }\n addReference(e, t, n) {\n return __PRIVATE_writeSentinelKey(e, n);\n }\n removeReference(e, t, n) {\n return __PRIVATE_writeSentinelKey(e, n);\n }\n removeTargets(e, t, n) {\n return this.db.getTargetCache().removeTargets(e, t, n);\n }\n markPotentiallyOrphaned(e, t) {\n return __PRIVATE_writeSentinelKey(e, t);\n }\n /**\n * Returns true if anything would prevent this document from being garbage\n * collected, given that the document in question is not present in any\n * targets and has a sequence number less than or equal to the upper bound for\n * the collection run.\n */\n er(e, t) {\n return function __PRIVATE_mutationQueuesContainKey(e, t) {\n let n = !1;\n return __PRIVATE_mutationQueuesStore(e).Y(r => __PRIVATE_mutationQueueContainsKey(e, r, t).next(e => (e && (n = !0), PersistencePromise.resolve(!e)))).next(() => n);\n }(e, t);\n }\n removeOrphanedDocuments(e, t) {\n const n = this.db.getRemoteDocumentCache().newChangeBuffer(),\n r = [];\n let i = 0;\n return this.Xn(e, (s, o) => {\n if (o <= t) {\n const t = this.er(e, s).next(t => {\n if (!t)\n // Our size accounting requires us to read all documents before\n // removing them.\n return i++, n.getEntry(e, s).next(() => (n.removeEntry(s, SnapshotVersion.min()), __PRIVATE_documentTargetStore(e).delete(function __PRIVATE_sentinelKey$1(e) {\n return [0, __PRIVATE_encodeResourcePath(e.path)];\n }\n /**\n * @returns A value suitable for writing a sentinel row in the target-document\n * store.\n */(s))));\n });\n r.push(t);\n }\n }).next(() => PersistencePromise.waitFor(r)).next(() => n.apply(e)).next(() => i);\n }\n removeTarget(e, t) {\n const n = t.withSequenceNumber(e.currentSequenceNumber);\n return this.db.getTargetCache().updateTargetData(e, n);\n }\n updateLimboDocument(e, t) {\n return __PRIVATE_writeSentinelKey(e, t);\n }\n /**\n * Call provided function for each document in the cache that is 'orphaned'. Orphaned\n * means not a part of any target, so the only entry in the target-document index for\n * that document will be the sentinel row (targetId 0), which will also have the sequence\n * number for the last time the document was accessed.\n */\n Xn(e, t) {\n const n = __PRIVATE_documentTargetStore(e);\n let r,\n i = __PRIVATE_ListenSequence.oe;\n return n.J({\n index: \"documentTargetsIndex\"\n }, ([e, n], {\n path: s,\n sequenceNumber: o\n }) => {\n 0 === e ? (\n // if nextToReport is valid, report it, this is a new key so the\n // last one must not be a member of any targets.\n i !== __PRIVATE_ListenSequence.oe && t(new DocumentKey(__PRIVATE_decodeResourcePath(r)), i),\n // set nextToReport to be this sequence number. It's the next one we\n // might report, if we don't find any targets for this document.\n // Note that the sequence number must be defined when the targetId\n // is 0.\n i = o, r = s) :\n // set nextToReport to be invalid, we know we don't need to report\n // this one since we found a target for it.\n i = __PRIVATE_ListenSequence.oe;\n }).next(() => {\n // Since we report sequence numbers after getting to the next key, we\n // need to check if the last key we iterated over was an orphaned\n // document and report it.\n i !== __PRIVATE_ListenSequence.oe && t(new DocumentKey(__PRIVATE_decodeResourcePath(r)), i);\n });\n }\n getCacheSize(e) {\n return this.db.getRemoteDocumentCache().getSize(e);\n }\n}\nfunction __PRIVATE_writeSentinelKey(e, t) {\n return __PRIVATE_documentTargetStore(e).put(function __PRIVATE_sentinelRow(e, t) {\n return {\n targetId: 0,\n path: __PRIVATE_encodeResourcePath(e.path),\n sequenceNumber: t\n };\n }(t, e.currentSequenceNumber));\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * An in-memory buffer of entries to be written to a RemoteDocumentCache.\n * It can be used to batch up a set of changes to be written to the cache, but\n * additionally supports reading entries back with the `getEntry()` method,\n * falling back to the underlying RemoteDocumentCache if no entry is\n * buffered.\n *\n * Entries added to the cache *must* be read first. This is to facilitate\n * calculating the size delta of the pending changes.\n *\n * PORTING NOTE: This class was implemented then removed from other platforms.\n * If byte-counting ends up being needed on the other platforms, consider\n * porting this class as part of that implementation work.\n */\nclass RemoteDocumentChangeBuffer {\n constructor() {\n // A mapping of document key to the new cache entry that should be written.\n this.changes = new ObjectMap(e => e.toString(), (e, t) => e.isEqual(t)), this.changesApplied = !1;\n }\n /**\n * Buffers a `RemoteDocumentCache.addEntry()` call.\n *\n * You can only modify documents that have already been retrieved via\n * `getEntry()/getEntries()` (enforced via IndexedDbs `apply()`).\n */\n addEntry(e) {\n this.assertNotApplied(), this.changes.set(e.key, e);\n }\n /**\n * Buffers a `RemoteDocumentCache.removeEntry()` call.\n *\n * You can only remove documents that have already been retrieved via\n * `getEntry()/getEntries()` (enforced via IndexedDbs `apply()`).\n */\n removeEntry(e, t) {\n this.assertNotApplied(), this.changes.set(e, MutableDocument.newInvalidDocument(e).setReadTime(t));\n }\n /**\n * Looks up an entry in the cache. The buffered changes will first be checked,\n * and if no buffered change applies, this will forward to\n * `RemoteDocumentCache.getEntry()`.\n *\n * @param transaction - The transaction in which to perform any persistence\n * operations.\n * @param documentKey - The key of the entry to look up.\n * @returns The cached document or an invalid document if we have nothing\n * cached.\n */\n getEntry(e, t) {\n this.assertNotApplied();\n const n = this.changes.get(t);\n return void 0 !== n ? PersistencePromise.resolve(n) : this.getFromCache(e, t);\n }\n /**\n * Looks up several entries in the cache, forwarding to\n * `RemoteDocumentCache.getEntry()`.\n *\n * @param transaction - The transaction in which to perform any persistence\n * operations.\n * @param documentKeys - The keys of the entries to look up.\n * @returns A map of cached documents, indexed by key. If an entry cannot be\n * found, the corresponding key will be mapped to an invalid document.\n */\n getEntries(e, t) {\n return this.getAllFromCache(e, t);\n }\n /**\n * Applies buffered changes to the underlying RemoteDocumentCache, using\n * the provided transaction.\n */\n apply(e) {\n return this.assertNotApplied(), this.changesApplied = !0, this.applyChanges(e);\n }\n /** Helper to assert this.changes is not null */\n assertNotApplied() {}\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * The RemoteDocumentCache for IndexedDb. To construct, invoke\n * `newIndexedDbRemoteDocumentCache()`.\n */\nclass __PRIVATE_IndexedDbRemoteDocumentCacheImpl {\n constructor(e) {\n this.serializer = e;\n }\n setIndexManager(e) {\n this.indexManager = e;\n }\n /**\n * Adds the supplied entries to the cache.\n *\n * All calls of `addEntry` are required to go through the RemoteDocumentChangeBuffer\n * returned by `newChangeBuffer()` to ensure proper accounting of metadata.\n */\n addEntry(e, t, n) {\n return __PRIVATE_remoteDocumentsStore(e).put(n);\n }\n /**\n * Removes a document from the cache.\n *\n * All calls of `removeEntry` are required to go through the RemoteDocumentChangeBuffer\n * returned by `newChangeBuffer()` to ensure proper accounting of metadata.\n */\n removeEntry(e, t, n) {\n return __PRIVATE_remoteDocumentsStore(e).delete(\n /**\n * Returns a key that can be used for document lookups via the primary key of\n * the DbRemoteDocument object store.\n */\n function __PRIVATE_dbReadTimeKey(e, t) {\n const n = e.path.toArray();\n return [/* prefix path */n.slice(0, n.length - 2), /* collection id */n[n.length - 2], __PRIVATE_toDbTimestampKey(t), /* document id */n[n.length - 1]];\n }\n /**\n * Returns a key that can be used for document lookups on the\n * `DbRemoteDocumentDocumentCollectionGroupIndex` index.\n */(t, n));\n }\n /**\n * Updates the current cache size.\n *\n * Callers to `addEntry()` and `removeEntry()` *must* call this afterwards to update the\n * cache's metadata.\n */\n updateMetadata(e, t) {\n return this.getMetadata(e).next(n => (n.byteSize += t, this.tr(e, n)));\n }\n getEntry(e, t) {\n let n = MutableDocument.newInvalidDocument(t);\n return __PRIVATE_remoteDocumentsStore(e).J({\n index: \"documentKeyIndex\",\n range: IDBKeyRange.only(__PRIVATE_dbKey(t))\n }, (e, r) => {\n n = this.nr(t, r);\n }).next(() => n);\n }\n /**\n * Looks up an entry in the cache.\n *\n * @param documentKey - The key of the entry to look up.\n * @returns The cached document entry and its size.\n */\n rr(e, t) {\n let n = {\n size: 0,\n document: MutableDocument.newInvalidDocument(t)\n };\n return __PRIVATE_remoteDocumentsStore(e).J({\n index: \"documentKeyIndex\",\n range: IDBKeyRange.only(__PRIVATE_dbKey(t))\n }, (e, r) => {\n n = {\n document: this.nr(t, r),\n size: __PRIVATE_dbDocumentSize(r)\n };\n }).next(() => n);\n }\n getEntries(e, t) {\n let n = __PRIVATE_mutableDocumentMap();\n return this.ir(e, t, (e, t) => {\n const r = this.nr(e, t);\n n = n.insert(e, r);\n }).next(() => n);\n }\n /**\n * Looks up several entries in the cache.\n *\n * @param documentKeys - The set of keys entries to look up.\n * @returns A map of documents indexed by key and a map of sizes indexed by\n * key (zero if the document does not exist).\n */\n sr(e, t) {\n let n = __PRIVATE_mutableDocumentMap(),\n r = new SortedMap(DocumentKey.comparator);\n return this.ir(e, t, (e, t) => {\n const i = this.nr(e, t);\n n = n.insert(e, i), r = r.insert(e, __PRIVATE_dbDocumentSize(t));\n }).next(() => ({\n documents: n,\n _r: r\n }));\n }\n ir(e, t, n) {\n if (t.isEmpty()) return PersistencePromise.resolve();\n let r = new SortedSet(__PRIVATE_dbKeyComparator);\n t.forEach(e => r = r.add(e));\n const i = IDBKeyRange.bound(__PRIVATE_dbKey(r.first()), __PRIVATE_dbKey(r.last())),\n s = r.getIterator();\n let o = s.getNext();\n return __PRIVATE_remoteDocumentsStore(e).J({\n index: \"documentKeyIndex\",\n range: i\n }, (e, t, r) => {\n const i = DocumentKey.fromSegments([...t.prefixPath, t.collectionGroup, t.documentId]);\n // Go through keys not found in cache.\n for (; o && __PRIVATE_dbKeyComparator(o, i) < 0;) n(o, null), o = s.getNext();\n o && o.isEqual(i) && (\n // Key found in cache.\n n(o, t), o = s.hasNext() ? s.getNext() : null),\n // Skip to the next key (if there is one).\n o ? r.$(__PRIVATE_dbKey(o)) : r.done();\n }).next(() => {\n // The rest of the keys are not in the cache. One case where `iterate`\n // above won't go through them is when the cache is empty.\n for (; o;) n(o, null), o = s.hasNext() ? s.getNext() : null;\n });\n }\n getDocumentsMatchingQuery(e, t, n, r, i) {\n const s = t.path,\n o = [s.popLast().toArray(), s.lastSegment(), __PRIVATE_toDbTimestampKey(n.readTime), n.documentKey.path.isEmpty() ? \"\" : n.documentKey.path.lastSegment()],\n _ = [s.popLast().toArray(), s.lastSegment(), [Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER], \"\"];\n return __PRIVATE_remoteDocumentsStore(e).U(IDBKeyRange.bound(o, _, !0)).next(e => {\n null == i || i.incrementDocumentReadCount(e.length);\n let n = __PRIVATE_mutableDocumentMap();\n for (const i of e) {\n const e = this.nr(DocumentKey.fromSegments(i.prefixPath.concat(i.collectionGroup, i.documentId)), i);\n e.isFoundDocument() && (__PRIVATE_queryMatches(t, e) || r.has(e.key)) && (\n // Either the document matches the given query, or it is mutated.\n n = n.insert(e.key, e));\n }\n return n;\n });\n }\n getAllFromCollectionGroup(e, t, n, r) {\n let i = __PRIVATE_mutableDocumentMap();\n const s = __PRIVATE_dbCollectionGroupKey(t, n),\n o = __PRIVATE_dbCollectionGroupKey(t, IndexOffset.max());\n return __PRIVATE_remoteDocumentsStore(e).J({\n index: \"collectionGroupIndex\",\n range: IDBKeyRange.bound(s, o, !0)\n }, (e, t, n) => {\n const s = this.nr(DocumentKey.fromSegments(t.prefixPath.concat(t.collectionGroup, t.documentId)), t);\n i = i.insert(s.key, s), i.size === r && n.done();\n }).next(() => i);\n }\n newChangeBuffer(e) {\n return new __PRIVATE_IndexedDbRemoteDocumentChangeBuffer(this, !!e && e.trackRemovals);\n }\n getSize(e) {\n return this.getMetadata(e).next(e => e.byteSize);\n }\n getMetadata(e) {\n return __PRIVATE_documentGlobalStore(e).get(\"remoteDocumentGlobalKey\").next(e => (__PRIVATE_hardAssert(!!e), e));\n }\n tr(e, t) {\n return __PRIVATE_documentGlobalStore(e).put(\"remoteDocumentGlobalKey\", t);\n }\n /**\n * Decodes `dbRemoteDoc` and returns the document (or an invalid document if\n * the document corresponds to the format used for sentinel deletes).\n */\n nr(e, t) {\n if (t) {\n const e = __PRIVATE_fromDbRemoteDocument(this.serializer, t);\n // Whether the document is a sentinel removal and should only be used in the\n // `getNewDocumentChanges()`\n if (!(e.isNoDocument() && e.version.isEqual(SnapshotVersion.min()))) return e;\n }\n return MutableDocument.newInvalidDocument(e);\n }\n}\n\n/** Creates a new IndexedDbRemoteDocumentCache. */\nfunction __PRIVATE_newIndexedDbRemoteDocumentCache(e) {\n return new __PRIVATE_IndexedDbRemoteDocumentCacheImpl(e);\n}\n\n/**\n * Handles the details of adding and updating documents in the IndexedDbRemoteDocumentCache.\n *\n * Unlike the MemoryRemoteDocumentChangeBuffer, the IndexedDb implementation computes the size\n * delta for all submitted changes. This avoids having to re-read all documents from IndexedDb\n * when we apply the changes.\n */\nclass __PRIVATE_IndexedDbRemoteDocumentChangeBuffer extends RemoteDocumentChangeBuffer {\n /**\n * @param documentCache - The IndexedDbRemoteDocumentCache to apply the changes to.\n * @param trackRemovals - Whether to create sentinel deletes that can be tracked by\n * `getNewDocumentChanges()`.\n */\n constructor(e, t) {\n super(), this.ar = e, this.trackRemovals = t,\n // A map of document sizes and read times prior to applying the changes in\n // this buffer.\n this.ur = new ObjectMap(e => e.toString(), (e, t) => e.isEqual(t));\n }\n applyChanges(e) {\n const t = [];\n let n = 0,\n r = new SortedSet((e, t) => __PRIVATE_primitiveComparator(e.canonicalString(), t.canonicalString()));\n return this.changes.forEach((i, s) => {\n const o = this.ur.get(i);\n if (t.push(this.ar.removeEntry(e, i, o.readTime)), s.isValidDocument()) {\n const _ = __PRIVATE_toDbRemoteDocument(this.ar.serializer, s);\n r = r.add(i.path.popLast());\n const a = __PRIVATE_dbDocumentSize(_);\n n += a - o.size, t.push(this.ar.addEntry(e, i, _));\n } else if (n -= o.size, this.trackRemovals) {\n // In order to track removals, we store a \"sentinel delete\" in the\n // RemoteDocumentCache. This entry is represented by a NoDocument\n // with a version of 0 and ignored by `maybeDecodeDocument()` but\n // preserved in `getNewDocumentChanges()`.\n const n = __PRIVATE_toDbRemoteDocument(this.ar.serializer, s.convertToNoDocument(SnapshotVersion.min()));\n t.push(this.ar.addEntry(e, i, n));\n }\n }), r.forEach(n => {\n t.push(this.ar.indexManager.addToCollectionParentIndex(e, n));\n }), t.push(this.ar.updateMetadata(e, n)), PersistencePromise.waitFor(t);\n }\n getFromCache(e, t) {\n // Record the size of everything we load from the cache so we can compute a delta later.\n return this.ar.rr(e, t).next(e => (this.ur.set(t, {\n size: e.size,\n readTime: e.document.readTime\n }), e.document));\n }\n getAllFromCache(e, t) {\n // Record the size of everything we load from the cache so we can compute\n // a delta later.\n return this.ar.sr(e, t).next(({\n documents: e,\n _r: t\n }) => (\n // Note: `getAllFromCache` returns two maps instead of a single map from\n // keys to `DocumentSizeEntry`s. This is to allow returning the\n // `MutableDocumentMap` directly, without a conversion.\n t.forEach((t, n) => {\n this.ur.set(t, {\n size: n,\n readTime: e.get(t).readTime\n });\n }), e));\n }\n}\nfunction __PRIVATE_documentGlobalStore(e) {\n return __PRIVATE_getStore(e, \"remoteDocumentGlobal\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the remoteDocuments object store.\n */\nfunction __PRIVATE_remoteDocumentsStore(e) {\n return __PRIVATE_getStore(e, \"remoteDocumentsV14\");\n}\n\n/**\n * Returns a key that can be used for document lookups on the\n * `DbRemoteDocumentDocumentKeyIndex` index.\n */\nfunction __PRIVATE_dbKey(e) {\n const t = e.path.toArray();\n return [/* prefix path */t.slice(0, t.length - 2), /* collection id */t[t.length - 2], /* document id */t[t.length - 1]];\n}\nfunction __PRIVATE_dbCollectionGroupKey(e, t) {\n const n = t.documentKey.path.toArray();\n return [/* collection id */e, __PRIVATE_toDbTimestampKey(t.readTime), /* prefix path */n.slice(0, n.length - 2), /* document id */n.length > 0 ? n[n.length - 1] : \"\"];\n}\n\n/**\n * Comparator that compares document keys according to the primary key sorting\n * used by the `DbRemoteDocumentDocument` store (by prefix path, collection id\n * and then document ID).\n *\n * Visible for testing.\n */\nfunction __PRIVATE_dbKeyComparator(e, t) {\n const n = e.path.toArray(),\n r = t.path.toArray();\n // The ordering is based on https://chromium.googlesource.com/chromium/blink/+/fe5c21fef94dae71c1c3344775b8d8a7f7e6d9ec/Source/modules/indexeddb/IDBKey.cpp#74\n let i = 0;\n for (let e = 0; e < n.length - 2 && e < r.length - 2; ++e) if (i = __PRIVATE_primitiveComparator(n[e], r[e]), i) return i;\n return i = __PRIVATE_primitiveComparator(n.length, r.length), i || (i = __PRIVATE_primitiveComparator(n[n.length - 2], r[r.length - 2]), i || __PRIVATE_primitiveComparator(n[n.length - 1], r[r.length - 1]));\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Schema Version for the Web client:\n * 1. Initial version including Mutation Queue, Query Cache, and Remote\n * Document Cache\n * 2. Used to ensure a targetGlobal object exists and add targetCount to it. No\n * longer required because migration 3 unconditionally clears it.\n * 3. Dropped and re-created Query Cache to deal with cache corruption related\n * to limbo resolution. Addresses\n * https://github.com/firebase/firebase-ios-sdk/issues/1548\n * 4. Multi-Tab Support.\n * 5. Removal of held write acks.\n * 6. Create document global for tracking document cache size.\n * 7. Ensure every cached document has a sentinel row with a sequence number.\n * 8. Add collection-parent index for Collection Group queries.\n * 9. Change RemoteDocumentChanges store to be keyed by readTime rather than\n * an auto-incrementing ID. This is required for Index-Free queries.\n * 10. Rewrite the canonical IDs to the explicit Protobuf-based format.\n * 11. Add bundles and named_queries for bundle support.\n * 12. Add document overlays.\n * 13. Rewrite the keys of the remote document cache to allow for efficient\n * document lookup via `getAll()`.\n * 14. Add overlays.\n * 15. Add indexing support.\n * 16. Parse timestamp strings before creating index entries.\n */\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Represents a local view (overlay) of a document, and the fields that are\n * locally mutated.\n */\nclass OverlayedDocument {\n constructor(e,\n /**\n * The fields that are locally mutated by patch mutations.\n *\n * If the overlayed\tdocument is from set or delete mutations, this is `null`.\n * If there is no overlay (mutation) for the document, this is an empty `FieldMask`.\n */\n t) {\n this.overlayedDocument = e, this.mutatedFields = t;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A readonly view of the local state of all documents we're tracking (i.e. we\n * have a cached version in remoteDocumentCache or local mutations for the\n * document). The view is computed by applying the mutations in the\n * MutationQueue to the RemoteDocumentCache.\n */\nclass LocalDocumentsView {\n constructor(e, t, n, r) {\n this.remoteDocumentCache = e, this.mutationQueue = t, this.documentOverlayCache = n, this.indexManager = r;\n }\n /**\n * Get the local view of the document identified by `key`.\n *\n * @returns Local view of the document or null if we don't have any cached\n * state for it.\n */\n getDocument(e, t) {\n let n = null;\n return this.documentOverlayCache.getOverlay(e, t).next(r => (n = r, this.remoteDocumentCache.getEntry(e, t))).next(e => (null !== n && __PRIVATE_mutationApplyToLocalView(n.mutation, e, FieldMask.empty(), Timestamp.now()), e));\n }\n /**\n * Gets the local view of the documents identified by `keys`.\n *\n * If we don't have cached state for a document in `keys`, a NoDocument will\n * be stored for that key in the resulting set.\n */\n getDocuments(e, t) {\n return this.remoteDocumentCache.getEntries(e, t).next(t => this.getLocalViewOfDocuments(e, t, __PRIVATE_documentKeySet()).next(() => t));\n }\n /**\n * Similar to `getDocuments`, but creates the local view from the given\n * `baseDocs` without retrieving documents from the local store.\n *\n * @param transaction - The transaction this operation is scoped to.\n * @param docs - The documents to apply local mutations to get the local views.\n * @param existenceStateChanged - The set of document keys whose existence state\n * is changed. This is useful to determine if some documents overlay needs\n * to be recalculated.\n */\n getLocalViewOfDocuments(e, t, n = __PRIVATE_documentKeySet()) {\n const r = __PRIVATE_newOverlayMap();\n return this.populateOverlays(e, r, t).next(() => this.computeViews(e, t, r, n).next(e => {\n let t = documentMap();\n return e.forEach((e, n) => {\n t = t.insert(e, n.overlayedDocument);\n }), t;\n }));\n }\n /**\n * Gets the overlayed documents for the given document map, which will include\n * the local view of those documents and a `FieldMask` indicating which fields\n * are mutated locally, `null` if overlay is a Set or Delete mutation.\n */\n getOverlayedDocuments(e, t) {\n const n = __PRIVATE_newOverlayMap();\n return this.populateOverlays(e, n, t).next(() => this.computeViews(e, t, n, __PRIVATE_documentKeySet()));\n }\n /**\n * Fetches the overlays for {@code docs} and adds them to provided overlay map\n * if the map does not already contain an entry for the given document key.\n */\n populateOverlays(e, t, n) {\n const r = [];\n return n.forEach(e => {\n t.has(e) || r.push(e);\n }), this.documentOverlayCache.getOverlays(e, r).next(e => {\n e.forEach((e, n) => {\n t.set(e, n);\n });\n });\n }\n /**\n * Computes the local view for the given documents.\n *\n * @param docs - The documents to compute views for. It also has the base\n * version of the documents.\n * @param overlays - The overlays that need to be applied to the given base\n * version of the documents.\n * @param existenceStateChanged - A set of documents whose existence states\n * might have changed. This is used to determine if we need to re-calculate\n * overlays from mutation queues.\n * @return A map represents the local documents view.\n */\n computeViews(e, t, n, r) {\n let i = __PRIVATE_mutableDocumentMap();\n const s = __PRIVATE_newDocumentKeyMap(),\n o = function __PRIVATE_newOverlayedDocumentMap() {\n return __PRIVATE_newDocumentKeyMap();\n }();\n return t.forEach((e, t) => {\n const o = n.get(t.key);\n // Recalculate an overlay if the document's existence state changed due to\n // a remote event *and* the overlay is a PatchMutation. This is because\n // document existence state can change if some patch mutation's\n // preconditions are met.\n // NOTE: we recalculate when `overlay` is undefined as well, because there\n // might be a patch mutation whose precondition does not match before the\n // change (hence overlay is undefined), but would now match.\n r.has(t.key) && (void 0 === o || o.mutation instanceof __PRIVATE_PatchMutation) ? i = i.insert(t.key, t) : void 0 !== o ? (s.set(t.key, o.mutation.getFieldMask()), __PRIVATE_mutationApplyToLocalView(o.mutation, t, o.mutation.getFieldMask(), Timestamp.now())) :\n // no overlay exists\n // Using EMPTY to indicate there is no overlay for the document.\n s.set(t.key, FieldMask.empty());\n }), this.recalculateAndSaveOverlays(e, i).next(e => (e.forEach((e, t) => s.set(e, t)), t.forEach((e, t) => {\n var n;\n return o.set(e, new OverlayedDocument(t, null !== (n = s.get(e)) && void 0 !== n ? n : null));\n }), o));\n }\n recalculateAndSaveOverlays(e, t) {\n const n = __PRIVATE_newDocumentKeyMap();\n // A reverse lookup map from batch id to the documents within that batch.\n let r = new SortedMap((e, t) => e - t),\n i = __PRIVATE_documentKeySet();\n return this.mutationQueue.getAllMutationBatchesAffectingDocumentKeys(e, t).next(e => {\n for (const i of e) i.keys().forEach(e => {\n const s = t.get(e);\n if (null === s) return;\n let o = n.get(e) || FieldMask.empty();\n o = i.applyToLocalView(s, o), n.set(e, o);\n const _ = (r.get(i.batchId) || __PRIVATE_documentKeySet()).add(e);\n r = r.insert(i.batchId, _);\n });\n }).next(() => {\n const s = [],\n o = r.getReverseIterator();\n // Iterate in descending order of batch IDs, and skip documents that are\n // already saved.\n for (; o.hasNext();) {\n const r = o.getNext(),\n _ = r.key,\n a = r.value,\n u = __PRIVATE_newMutationMap();\n a.forEach(e => {\n if (!i.has(e)) {\n const r = __PRIVATE_calculateOverlayMutation(t.get(e), n.get(e));\n null !== r && u.set(e, r), i = i.add(e);\n }\n }), s.push(this.documentOverlayCache.saveOverlays(e, _, u));\n }\n return PersistencePromise.waitFor(s);\n }).next(() => n);\n }\n /**\n * Recalculates overlays by reading the documents from remote document cache\n * first, and saves them after they are calculated.\n */\n recalculateAndSaveOverlaysForDocumentKeys(e, t) {\n return this.remoteDocumentCache.getEntries(e, t).next(t => this.recalculateAndSaveOverlays(e, t));\n }\n /**\n * Performs a query against the local view of all documents.\n *\n * @param transaction - The persistence transaction.\n * @param query - The query to match documents against.\n * @param offset - Read time and key to start scanning by (exclusive).\n * @param context - A optional tracker to keep a record of important details\n * during database local query execution.\n */\n getDocumentsMatchingQuery(e, t, n, r) {\n /**\n * Returns whether the query matches a single document by path (rather than a\n * collection).\n */\n return function __PRIVATE_isDocumentQuery$1(e) {\n return DocumentKey.isDocumentKey(e.path) && null === e.collectionGroup && 0 === e.filters.length;\n }(t) ? this.getDocumentsMatchingDocumentQuery(e, t.path) : __PRIVATE_isCollectionGroupQuery(t) ? this.getDocumentsMatchingCollectionGroupQuery(e, t, n, r) : this.getDocumentsMatchingCollectionQuery(e, t, n, r);\n }\n /**\n * Given a collection group, returns the next documents that follow the provided offset, along\n * with an updated batch ID.\n *\n *
The documents returned by this method are ordered by remote version from the provided\n * offset. If there are no more remote documents after the provided offset, documents with\n * mutations in order of batch id from the offset are returned. Since all documents in a batch are\n * returned together, the total number of documents returned can exceed {@code count}.\n *\n * @param transaction\n * @param collectionGroup The collection group for the documents.\n * @param offset The offset to index into.\n * @param count The number of documents to return\n * @return A LocalWriteResult with the documents that follow the provided offset and the last processed batch id.\n */\n getNextDocuments(e, t, n, r) {\n return this.remoteDocumentCache.getAllFromCollectionGroup(e, t, n, r).next(i => {\n const s = r - i.size > 0 ? this.documentOverlayCache.getOverlaysForCollectionGroup(e, t, n.largestBatchId, r - i.size) : PersistencePromise.resolve(__PRIVATE_newOverlayMap());\n // The callsite will use the largest batch ID together with the latest read time to create\n // a new index offset. Since we only process batch IDs if all remote documents have been read,\n // no overlay will increase the overall read time. This is why we only need to special case\n // the batch id.\n let o = -1,\n _ = i;\n return s.next(t => PersistencePromise.forEach(t, (t, n) => (o < n.largestBatchId && (o = n.largestBatchId), i.get(t) ? PersistencePromise.resolve() : this.remoteDocumentCache.getEntry(e, t).next(e => {\n _ = _.insert(t, e);\n }))).next(() => this.populateOverlays(e, t, i)).next(() => this.computeViews(e, _, t, __PRIVATE_documentKeySet())).next(e => ({\n batchId: o,\n changes: __PRIVATE_convertOverlayedDocumentMapToDocumentMap(e)\n })));\n });\n }\n getDocumentsMatchingDocumentQuery(e, t) {\n // Just do a simple document lookup.\n return this.getDocument(e, new DocumentKey(t)).next(e => {\n let t = documentMap();\n return e.isFoundDocument() && (t = t.insert(e.key, e)), t;\n });\n }\n getDocumentsMatchingCollectionGroupQuery(e, t, n, r) {\n const i = t.collectionGroup;\n let s = documentMap();\n return this.indexManager.getCollectionParents(e, i).next(o => PersistencePromise.forEach(o, o => {\n const _ = function __PRIVATE_asCollectionQueryAtPath(e, t) {\n return new __PRIVATE_QueryImpl(t, /*collectionGroup=*/null, e.explicitOrderBy.slice(), e.filters.slice(), e.limit, e.limitType, e.startAt, e.endAt);\n }(t, o.child(i));\n return this.getDocumentsMatchingCollectionQuery(e, _, n, r).next(e => {\n e.forEach((e, t) => {\n s = s.insert(e, t);\n });\n });\n }).next(() => s));\n }\n getDocumentsMatchingCollectionQuery(e, t, n, r) {\n // Query the remote documents and overlay mutations.\n let i;\n return this.documentOverlayCache.getOverlaysForCollection(e, t.path, n.largestBatchId).next(s => (i = s, this.remoteDocumentCache.getDocumentsMatchingQuery(e, t, n, i, r))).next(e => {\n // As documents might match the query because of their overlay we need to\n // include documents for all overlays in the initial document set.\n i.forEach((t, n) => {\n const r = n.getKey();\n null === e.get(r) && (e = e.insert(r, MutableDocument.newInvalidDocument(r)));\n });\n // Apply the overlays and match against the query.\n let n = documentMap();\n return e.forEach((e, r) => {\n const s = i.get(e);\n void 0 !== s && __PRIVATE_mutationApplyToLocalView(s.mutation, r, FieldMask.empty(), Timestamp.now()),\n // Finally, insert the documents that still match the query\n __PRIVATE_queryMatches(t, r) && (n = n.insert(e, r));\n }), n;\n });\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_MemoryBundleCache {\n constructor(e) {\n this.serializer = e, this.cr = new Map(), this.lr = new Map();\n }\n getBundleMetadata(e, t) {\n return PersistencePromise.resolve(this.cr.get(t));\n }\n saveBundleMetadata(e, t) {\n return this.cr.set(t.id, /** Decodes a BundleMetadata proto into a BundleMetadata object. */\n function __PRIVATE_fromBundleMetadata(e) {\n return {\n id: e.id,\n version: e.version,\n createTime: __PRIVATE_fromVersion(e.createTime)\n };\n }(t)), PersistencePromise.resolve();\n }\n getNamedQuery(e, t) {\n return PersistencePromise.resolve(this.lr.get(t));\n }\n saveNamedQuery(e, t) {\n return this.lr.set(t.name, function __PRIVATE_fromProtoNamedQuery(e) {\n return {\n name: e.name,\n query: __PRIVATE_fromBundledQuery(e.bundledQuery),\n readTime: __PRIVATE_fromVersion(e.readTime)\n };\n }(t)), PersistencePromise.resolve();\n }\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * An in-memory implementation of DocumentOverlayCache.\n */\nclass __PRIVATE_MemoryDocumentOverlayCache {\n constructor() {\n // A map sorted by DocumentKey, whose value is a pair of the largest batch id\n // for the overlay and the overlay itself.\n this.overlays = new SortedMap(DocumentKey.comparator), this.hr = new Map();\n }\n getOverlay(e, t) {\n return PersistencePromise.resolve(this.overlays.get(t));\n }\n getOverlays(e, t) {\n const n = __PRIVATE_newOverlayMap();\n return PersistencePromise.forEach(t, t => this.getOverlay(e, t).next(e => {\n null !== e && n.set(t, e);\n })).next(() => n);\n }\n saveOverlays(e, t, n) {\n return n.forEach((n, r) => {\n this.ht(e, t, r);\n }), PersistencePromise.resolve();\n }\n removeOverlaysForBatchId(e, t, n) {\n const r = this.hr.get(n);\n return void 0 !== r && (r.forEach(e => this.overlays = this.overlays.remove(e)), this.hr.delete(n)), PersistencePromise.resolve();\n }\n getOverlaysForCollection(e, t, n) {\n const r = __PRIVATE_newOverlayMap(),\n i = t.length + 1,\n s = new DocumentKey(t.child(\"\")),\n o = this.overlays.getIteratorFrom(s);\n for (; o.hasNext();) {\n const e = o.getNext().value,\n s = e.getKey();\n if (!t.isPrefixOf(s.path)) break;\n // Documents from sub-collections\n s.path.length === i && e.largestBatchId > n && r.set(e.getKey(), e);\n }\n return PersistencePromise.resolve(r);\n }\n getOverlaysForCollectionGroup(e, t, n, r) {\n let i = new SortedMap((e, t) => e - t);\n const s = this.overlays.getIterator();\n for (; s.hasNext();) {\n const e = s.getNext().value;\n if (e.getKey().getCollectionGroup() === t && e.largestBatchId > n) {\n let t = i.get(e.largestBatchId);\n null === t && (t = __PRIVATE_newOverlayMap(), i = i.insert(e.largestBatchId, t)), t.set(e.getKey(), e);\n }\n }\n const o = __PRIVATE_newOverlayMap(),\n _ = i.getIterator();\n for (; _.hasNext();) {\n if (_.getNext().value.forEach((e, t) => o.set(e, t)), o.size() >= r) break;\n }\n return PersistencePromise.resolve(o);\n }\n ht(e, t, n) {\n // Remove the association of the overlay to its batch id.\n const r = this.overlays.get(n.key);\n if (null !== r) {\n const e = this.hr.get(r.largestBatchId).delete(n.key);\n this.hr.set(r.largestBatchId, e);\n }\n this.overlays = this.overlays.insert(n.key, new Overlay(t, n));\n // Create the association of this overlay to the given largestBatchId.\n let i = this.hr.get(t);\n void 0 === i && (i = __PRIVATE_documentKeySet(), this.hr.set(t, i)), this.hr.set(t, i.add(n.key));\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A collection of references to a document from some kind of numbered entity\n * (either a target ID or batch ID). As references are added to or removed from\n * the set corresponding events are emitted to a registered garbage collector.\n *\n * Each reference is represented by a DocumentReference object. Each of them\n * contains enough information to uniquely identify the reference. They are all\n * stored primarily in a set sorted by key. A document is considered garbage if\n * there's no references in that set (this can be efficiently checked thanks to\n * sorting by key).\n *\n * ReferenceSet also keeps a secondary set that contains references sorted by\n * IDs. This one is used to efficiently implement removal of all references by\n * some target ID.\n */\nclass __PRIVATE_ReferenceSet {\n constructor() {\n // A set of outstanding references to a document sorted by key.\n this.Pr = new SortedSet(__PRIVATE_DocReference.Ir),\n // A set of outstanding references to a document sorted by target id.\n this.Tr = new SortedSet(__PRIVATE_DocReference.Er);\n }\n /** Returns true if the reference set contains no references. */\n isEmpty() {\n return this.Pr.isEmpty();\n }\n /** Adds a reference to the given document key for the given ID. */\n addReference(e, t) {\n const n = new __PRIVATE_DocReference(e, t);\n this.Pr = this.Pr.add(n), this.Tr = this.Tr.add(n);\n }\n /** Add references to the given document keys for the given ID. */\n dr(e, t) {\n e.forEach(e => this.addReference(e, t));\n }\n /**\n * Removes a reference to the given document key for the given\n * ID.\n */\n removeReference(e, t) {\n this.Ar(new __PRIVATE_DocReference(e, t));\n }\n Rr(e, t) {\n e.forEach(e => this.removeReference(e, t));\n }\n /**\n * Clears all references with a given ID. Calls removeRef() for each key\n * removed.\n */\n Vr(e) {\n const t = new DocumentKey(new ResourcePath([])),\n n = new __PRIVATE_DocReference(t, e),\n r = new __PRIVATE_DocReference(t, e + 1),\n i = [];\n return this.Tr.forEachInRange([n, r], e => {\n this.Ar(e), i.push(e.key);\n }), i;\n }\n mr() {\n this.Pr.forEach(e => this.Ar(e));\n }\n Ar(e) {\n this.Pr = this.Pr.delete(e), this.Tr = this.Tr.delete(e);\n }\n gr(e) {\n const t = new DocumentKey(new ResourcePath([])),\n n = new __PRIVATE_DocReference(t, e),\n r = new __PRIVATE_DocReference(t, e + 1);\n let i = __PRIVATE_documentKeySet();\n return this.Tr.forEachInRange([n, r], e => {\n i = i.add(e.key);\n }), i;\n }\n containsKey(e) {\n const t = new __PRIVATE_DocReference(e, 0),\n n = this.Pr.firstAfterOrEqual(t);\n return null !== n && e.isEqual(n.key);\n }\n}\nclass __PRIVATE_DocReference {\n constructor(e, t) {\n this.key = e, this.pr = t;\n }\n /** Compare by key then by ID */\n static Ir(e, t) {\n return DocumentKey.comparator(e.key, t.key) || __PRIVATE_primitiveComparator(e.pr, t.pr);\n }\n /** Compare by ID then by key */\n static Er(e, t) {\n return __PRIVATE_primitiveComparator(e.pr, t.pr) || DocumentKey.comparator(e.key, t.key);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_MemoryMutationQueue {\n constructor(e, t) {\n this.indexManager = e, this.referenceDelegate = t,\n /**\n * The set of all mutations that have been sent but not yet been applied to\n * the backend.\n */\n this.mutationQueue = [], /** Next value to use when assigning sequential IDs to each mutation batch. */\n this.yr = 1, /** An ordered mapping between documents and the mutations batch IDs. */\n this.wr = new SortedSet(__PRIVATE_DocReference.Ir);\n }\n checkEmpty(e) {\n return PersistencePromise.resolve(0 === this.mutationQueue.length);\n }\n addMutationBatch(e, t, n, r) {\n const i = this.yr;\n this.yr++, this.mutationQueue.length > 0 && this.mutationQueue[this.mutationQueue.length - 1];\n const s = new MutationBatch(i, t, n, r);\n this.mutationQueue.push(s);\n // Track references by document key and index collection parents.\n for (const t of r) this.wr = this.wr.add(new __PRIVATE_DocReference(t.key, i)), this.indexManager.addToCollectionParentIndex(e, t.key.path.popLast());\n return PersistencePromise.resolve(s);\n }\n lookupMutationBatch(e, t) {\n return PersistencePromise.resolve(this.Sr(t));\n }\n getNextMutationBatchAfterBatchId(e, t) {\n const n = t + 1,\n r = this.br(n),\n i = r < 0 ? 0 : r;\n // The requested batchId may still be out of range so normalize it to the\n // start of the queue.\n return PersistencePromise.resolve(this.mutationQueue.length > i ? this.mutationQueue[i] : null);\n }\n getHighestUnacknowledgedBatchId() {\n return PersistencePromise.resolve(0 === this.mutationQueue.length ? -1 : this.yr - 1);\n }\n getAllMutationBatches(e) {\n return PersistencePromise.resolve(this.mutationQueue.slice());\n }\n getAllMutationBatchesAffectingDocumentKey(e, t) {\n const n = new __PRIVATE_DocReference(t, 0),\n r = new __PRIVATE_DocReference(t, Number.POSITIVE_INFINITY),\n i = [];\n return this.wr.forEachInRange([n, r], e => {\n const t = this.Sr(e.pr);\n i.push(t);\n }), PersistencePromise.resolve(i);\n }\n getAllMutationBatchesAffectingDocumentKeys(e, t) {\n let n = new SortedSet(__PRIVATE_primitiveComparator);\n return t.forEach(e => {\n const t = new __PRIVATE_DocReference(e, 0),\n r = new __PRIVATE_DocReference(e, Number.POSITIVE_INFINITY);\n this.wr.forEachInRange([t, r], e => {\n n = n.add(e.pr);\n });\n }), PersistencePromise.resolve(this.Dr(n));\n }\n getAllMutationBatchesAffectingQuery(e, t) {\n // Use the query path as a prefix for testing if a document matches the\n // query.\n const n = t.path,\n r = n.length + 1;\n // Construct a document reference for actually scanning the index. Unlike\n // the prefix the document key in this reference must have an even number of\n // segments. The empty segment can be used a suffix of the query path\n // because it precedes all other segments in an ordered traversal.\n let i = n;\n DocumentKey.isDocumentKey(i) || (i = i.child(\"\"));\n const s = new __PRIVATE_DocReference(new DocumentKey(i), 0);\n // Find unique batchIDs referenced by all documents potentially matching the\n // query.\n let o = new SortedSet(__PRIVATE_primitiveComparator);\n return this.wr.forEachWhile(e => {\n const t = e.key.path;\n return !!n.isPrefixOf(t) && (\n // Rows with document keys more than one segment longer than the query\n // path can't be matches. For example, a query on 'rooms' can't match\n // the document /rooms/abc/messages/xyx.\n // TODO(mcg): we'll need a different scanner when we implement\n // ancestor queries.\n t.length === r && (o = o.add(e.pr)), !0);\n }, s), PersistencePromise.resolve(this.Dr(o));\n }\n Dr(e) {\n // Construct an array of matching batches, sorted by batchID to ensure that\n // multiple mutations affecting the same document key are applied in order.\n const t = [];\n return e.forEach(e => {\n const n = this.Sr(e);\n null !== n && t.push(n);\n }), t;\n }\n removeMutationBatch(e, t) {\n __PRIVATE_hardAssert(0 === this.Cr(t.batchId, \"removed\")), this.mutationQueue.shift();\n let n = this.wr;\n return PersistencePromise.forEach(t.mutations, r => {\n const i = new __PRIVATE_DocReference(r.key, t.batchId);\n return n = n.delete(i), this.referenceDelegate.markPotentiallyOrphaned(e, r.key);\n }).next(() => {\n this.wr = n;\n });\n }\n Mn(e) {\n // No-op since the memory mutation queue does not maintain a separate cache.\n }\n containsKey(e, t) {\n const n = new __PRIVATE_DocReference(t, 0),\n r = this.wr.firstAfterOrEqual(n);\n return PersistencePromise.resolve(t.isEqual(r && r.key));\n }\n performConsistencyCheck(e) {\n return this.mutationQueue.length, PersistencePromise.resolve();\n }\n /**\n * Finds the index of the given batchId in the mutation queue and asserts that\n * the resulting index is within the bounds of the queue.\n *\n * @param batchId - The batchId to search for\n * @param action - A description of what the caller is doing, phrased in passive\n * form (e.g. \"acknowledged\" in a routine that acknowledges batches).\n */\n Cr(e, t) {\n return this.br(e);\n }\n /**\n * Finds the index of the given batchId in the mutation queue. This operation\n * is O(1).\n *\n * @returns The computed index of the batch with the given batchId, based on\n * the state of the queue. Note this index can be negative if the requested\n * batchId has already been remvoed from the queue or past the end of the\n * queue if the batchId is larger than the last added batch.\n */\n br(e) {\n if (0 === this.mutationQueue.length)\n // As an index this is past the end of the queue\n return 0;\n // Examine the front of the queue to figure out the difference between the\n // batchId and indexes in the array. Note that since the queue is ordered\n // by batchId, if the first batch has a larger batchId then the requested\n // batchId doesn't exist in the queue.\n return e - this.mutationQueue[0].batchId;\n }\n /**\n * A version of lookupMutationBatch that doesn't return a promise, this makes\n * other functions that uses this code easier to read and more efficent.\n */\n Sr(e) {\n const t = this.br(e);\n if (t < 0 || t >= this.mutationQueue.length) return null;\n return this.mutationQueue[t];\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * The memory-only RemoteDocumentCache for IndexedDb. To construct, invoke\n * `newMemoryRemoteDocumentCache()`.\n */\nclass __PRIVATE_MemoryRemoteDocumentCacheImpl {\n /**\n * @param sizer - Used to assess the size of a document. For eager GC, this is\n * expected to just return 0 to avoid unnecessarily doing the work of\n * calculating the size.\n */\n constructor(e) {\n this.vr = e, /** Underlying cache of documents and their read times. */\n this.docs = function __PRIVATE_documentEntryMap() {\n return new SortedMap(DocumentKey.comparator);\n }(), /** Size of all cached documents. */\n this.size = 0;\n }\n setIndexManager(e) {\n this.indexManager = e;\n }\n /**\n * Adds the supplied entry to the cache and updates the cache size as appropriate.\n *\n * All calls of `addEntry` are required to go through the RemoteDocumentChangeBuffer\n * returned by `newChangeBuffer()`.\n */\n addEntry(e, t) {\n const n = t.key,\n r = this.docs.get(n),\n i = r ? r.size : 0,\n s = this.vr(t);\n return this.docs = this.docs.insert(n, {\n document: t.mutableCopy(),\n size: s\n }), this.size += s - i, this.indexManager.addToCollectionParentIndex(e, n.path.popLast());\n }\n /**\n * Removes the specified entry from the cache and updates the cache size as appropriate.\n *\n * All calls of `removeEntry` are required to go through the RemoteDocumentChangeBuffer\n * returned by `newChangeBuffer()`.\n */\n removeEntry(e) {\n const t = this.docs.get(e);\n t && (this.docs = this.docs.remove(e), this.size -= t.size);\n }\n getEntry(e, t) {\n const n = this.docs.get(t);\n return PersistencePromise.resolve(n ? n.document.mutableCopy() : MutableDocument.newInvalidDocument(t));\n }\n getEntries(e, t) {\n let n = __PRIVATE_mutableDocumentMap();\n return t.forEach(e => {\n const t = this.docs.get(e);\n n = n.insert(e, t ? t.document.mutableCopy() : MutableDocument.newInvalidDocument(e));\n }), PersistencePromise.resolve(n);\n }\n getDocumentsMatchingQuery(e, t, n, r) {\n let i = __PRIVATE_mutableDocumentMap();\n // Documents are ordered by key, so we can use a prefix scan to narrow down\n // the documents we need to match the query against.\n const s = t.path,\n o = new DocumentKey(s.child(\"\")),\n _ = this.docs.getIteratorFrom(o);\n for (; _.hasNext();) {\n const {\n key: e,\n value: {\n document: o\n }\n } = _.getNext();\n if (!s.isPrefixOf(e.path)) break;\n e.path.length > s.length + 1 || __PRIVATE_indexOffsetComparator(__PRIVATE_newIndexOffsetFromDocument(o), n) <= 0 || (r.has(o.key) || __PRIVATE_queryMatches(t, o)) && (i = i.insert(o.key, o.mutableCopy()));\n }\n return PersistencePromise.resolve(i);\n }\n getAllFromCollectionGroup(e, t, n, r) {\n // This method should only be called from the IndexBackfiller if persistence\n // is enabled.\n fail();\n }\n Fr(e, t) {\n return PersistencePromise.forEach(this.docs, e => t(e));\n }\n newChangeBuffer(e) {\n // `trackRemovals` is ignores since the MemoryRemoteDocumentCache keeps\n // a separate changelog and does not need special handling for removals.\n return new __PRIVATE_MemoryRemoteDocumentChangeBuffer(this);\n }\n getSize(e) {\n return PersistencePromise.resolve(this.size);\n }\n}\n\n/**\n * Creates a new memory-only RemoteDocumentCache.\n *\n * @param sizer - Used to assess the size of a document. For eager GC, this is\n * expected to just return 0 to avoid unnecessarily doing the work of\n * calculating the size.\n */\n/**\n * Handles the details of adding and updating documents in the MemoryRemoteDocumentCache.\n */\nclass __PRIVATE_MemoryRemoteDocumentChangeBuffer extends RemoteDocumentChangeBuffer {\n constructor(e) {\n super(), this.ar = e;\n }\n applyChanges(e) {\n const t = [];\n return this.changes.forEach((n, r) => {\n r.isValidDocument() ? t.push(this.ar.addEntry(e, r)) : this.ar.removeEntry(n);\n }), PersistencePromise.waitFor(t);\n }\n getFromCache(e, t) {\n return this.ar.getEntry(e, t);\n }\n getAllFromCache(e, t) {\n return this.ar.getEntries(e, t);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_MemoryTargetCache {\n constructor(e) {\n this.persistence = e,\n /**\n * Maps a target to the data about that target\n */\n this.Mr = new ObjectMap(e => __PRIVATE_canonifyTarget(e), __PRIVATE_targetEquals), /** The last received snapshot version. */\n this.lastRemoteSnapshotVersion = SnapshotVersion.min(), /** The highest numbered target ID encountered. */\n this.highestTargetId = 0, /** The highest sequence number encountered. */\n this.Or = 0,\n /**\n * A ordered bidirectional mapping between documents and the remote target\n * IDs.\n */\n this.Nr = new __PRIVATE_ReferenceSet(), this.targetCount = 0, this.Lr = __PRIVATE_TargetIdGenerator.Nn();\n }\n forEachTarget(e, t) {\n return this.Mr.forEach((e, n) => t(n)), PersistencePromise.resolve();\n }\n getLastRemoteSnapshotVersion(e) {\n return PersistencePromise.resolve(this.lastRemoteSnapshotVersion);\n }\n getHighestSequenceNumber(e) {\n return PersistencePromise.resolve(this.Or);\n }\n allocateTargetId(e) {\n return this.highestTargetId = this.Lr.next(), PersistencePromise.resolve(this.highestTargetId);\n }\n setTargetsMetadata(e, t, n) {\n return n && (this.lastRemoteSnapshotVersion = n), t > this.Or && (this.Or = t), PersistencePromise.resolve();\n }\n qn(e) {\n this.Mr.set(e.target, e);\n const t = e.targetId;\n t > this.highestTargetId && (this.Lr = new __PRIVATE_TargetIdGenerator(t), this.highestTargetId = t), e.sequenceNumber > this.Or && (this.Or = e.sequenceNumber);\n }\n addTargetData(e, t) {\n return this.qn(t), this.targetCount += 1, PersistencePromise.resolve();\n }\n updateTargetData(e, t) {\n return this.qn(t), PersistencePromise.resolve();\n }\n removeTargetData(e, t) {\n return this.Mr.delete(t.target), this.Nr.Vr(t.targetId), this.targetCount -= 1, PersistencePromise.resolve();\n }\n removeTargets(e, t, n) {\n let r = 0;\n const i = [];\n return this.Mr.forEach((s, o) => {\n o.sequenceNumber <= t && null === n.get(o.targetId) && (this.Mr.delete(s), i.push(this.removeMatchingKeysForTargetId(e, o.targetId)), r++);\n }), PersistencePromise.waitFor(i).next(() => r);\n }\n getTargetCount(e) {\n return PersistencePromise.resolve(this.targetCount);\n }\n getTargetData(e, t) {\n const n = this.Mr.get(t) || null;\n return PersistencePromise.resolve(n);\n }\n addMatchingKeys(e, t, n) {\n return this.Nr.dr(t, n), PersistencePromise.resolve();\n }\n removeMatchingKeys(e, t, n) {\n this.Nr.Rr(t, n);\n const r = this.persistence.referenceDelegate,\n i = [];\n return r && t.forEach(t => {\n i.push(r.markPotentiallyOrphaned(e, t));\n }), PersistencePromise.waitFor(i);\n }\n removeMatchingKeysForTargetId(e, t) {\n return this.Nr.Vr(t), PersistencePromise.resolve();\n }\n getMatchingKeysForTargetId(e, t) {\n const n = this.Nr.gr(t);\n return PersistencePromise.resolve(n);\n }\n containsKey(e, t) {\n return PersistencePromise.resolve(this.Nr.containsKey(t));\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A memory-backed instance of Persistence. Data is stored only in RAM and\n * not persisted across sessions.\n */\nclass __PRIVATE_MemoryPersistence {\n /**\n * The constructor accepts a factory for creating a reference delegate. This\n * allows both the delegate and this instance to have strong references to\n * each other without having nullable fields that would then need to be\n * checked or asserted on every access.\n */\n constructor(e, t) {\n this.Br = {}, this.overlays = {}, this.kr = new __PRIVATE_ListenSequence(0), this.qr = !1, this.qr = !0, this.referenceDelegate = e(this), this.Qr = new __PRIVATE_MemoryTargetCache(this);\n this.indexManager = new __PRIVATE_MemoryIndexManager(), this.remoteDocumentCache = function __PRIVATE_newMemoryRemoteDocumentCache(e) {\n return new __PRIVATE_MemoryRemoteDocumentCacheImpl(e);\n }(e => this.referenceDelegate.Kr(e)), this.serializer = new __PRIVATE_LocalSerializer(t), this.$r = new __PRIVATE_MemoryBundleCache(this.serializer);\n }\n start() {\n return Promise.resolve();\n }\n shutdown() {\n // No durable state to ensure is closed on shutdown.\n return this.qr = !1, Promise.resolve();\n }\n get started() {\n return this.qr;\n }\n setDatabaseDeletedListener() {\n // No op.\n }\n setNetworkEnabled() {\n // No op.\n }\n getIndexManager(e) {\n // We do not currently support indices for memory persistence, so we can\n // return the same shared instance of the memory index manager.\n return this.indexManager;\n }\n getDocumentOverlayCache(e) {\n let t = this.overlays[e.toKey()];\n return t || (t = new __PRIVATE_MemoryDocumentOverlayCache(), this.overlays[e.toKey()] = t), t;\n }\n getMutationQueue(e, t) {\n let n = this.Br[e.toKey()];\n return n || (n = new __PRIVATE_MemoryMutationQueue(t, this.referenceDelegate), this.Br[e.toKey()] = n), n;\n }\n getTargetCache() {\n return this.Qr;\n }\n getRemoteDocumentCache() {\n return this.remoteDocumentCache;\n }\n getBundleCache() {\n return this.$r;\n }\n runTransaction(e, t, n) {\n __PRIVATE_logDebug(\"MemoryPersistence\", \"Starting transaction:\", e);\n const r = new __PRIVATE_MemoryTransaction(this.kr.next());\n return this.referenceDelegate.Ur(), n(r).next(e => this.referenceDelegate.Wr(r).next(() => e)).toPromise().then(e => (r.raiseOnCommittedEvent(), e));\n }\n Gr(e, t) {\n return PersistencePromise.or(Object.values(this.Br).map(n => () => n.containsKey(e, t)));\n }\n}\n\n/**\n * Memory persistence is not actually transactional, but future implementations\n * may have transaction-scoped state.\n */\nclass __PRIVATE_MemoryTransaction extends PersistenceTransaction {\n constructor(e) {\n super(), this.currentSequenceNumber = e;\n }\n}\nclass __PRIVATE_MemoryEagerDelegate {\n constructor(e) {\n this.persistence = e, /** Tracks all documents that are active in Query views. */\n this.zr = new __PRIVATE_ReferenceSet(), /** The list of documents that are potentially GCed after each transaction. */\n this.jr = null;\n }\n static Hr(e) {\n return new __PRIVATE_MemoryEagerDelegate(e);\n }\n get Jr() {\n if (this.jr) return this.jr;\n throw fail();\n }\n addReference(e, t, n) {\n return this.zr.addReference(n, t), this.Jr.delete(n.toString()), PersistencePromise.resolve();\n }\n removeReference(e, t, n) {\n return this.zr.removeReference(n, t), this.Jr.add(n.toString()), PersistencePromise.resolve();\n }\n markPotentiallyOrphaned(e, t) {\n return this.Jr.add(t.toString()), PersistencePromise.resolve();\n }\n removeTarget(e, t) {\n this.zr.Vr(t.targetId).forEach(e => this.Jr.add(e.toString()));\n const n = this.persistence.getTargetCache();\n return n.getMatchingKeysForTargetId(e, t.targetId).next(e => {\n e.forEach(e => this.Jr.add(e.toString()));\n }).next(() => n.removeTargetData(e, t));\n }\n Ur() {\n this.jr = new Set();\n }\n Wr(e) {\n // Remove newly orphaned documents.\n const t = this.persistence.getRemoteDocumentCache().newChangeBuffer();\n return PersistencePromise.forEach(this.Jr, n => {\n const r = DocumentKey.fromPath(n);\n return this.Yr(e, r).next(e => {\n e || t.removeEntry(r, SnapshotVersion.min());\n });\n }).next(() => (this.jr = null, t.apply(e)));\n }\n updateLimboDocument(e, t) {\n return this.Yr(e, t).next(e => {\n e ? this.Jr.delete(t.toString()) : this.Jr.add(t.toString());\n });\n }\n Kr(e) {\n // For eager GC, we don't care about the document size, there are no size thresholds.\n return 0;\n }\n Yr(e, t) {\n return PersistencePromise.or([() => PersistencePromise.resolve(this.zr.containsKey(t)), () => this.persistence.getTargetCache().containsKey(e, t), () => this.persistence.Gr(e, t)]);\n }\n}\nclass __PRIVATE_MemoryLruDelegate {\n constructor(e, t) {\n this.persistence = e, this.Zr = new ObjectMap(e => __PRIVATE_encodeResourcePath(e.path), (e, t) => e.isEqual(t)), this.garbageCollector = __PRIVATE_newLruGarbageCollector(this, t);\n }\n static Hr(e, t) {\n return new __PRIVATE_MemoryLruDelegate(e, t);\n }\n // No-ops, present so memory persistence doesn't have to care which delegate\n // it has.\n Ur() {}\n Wr(e) {\n return PersistencePromise.resolve();\n }\n forEachTarget(e, t) {\n return this.persistence.getTargetCache().forEachTarget(e, t);\n }\n Hn(e) {\n const t = this.Zn(e);\n return this.persistence.getTargetCache().getTargetCount(e).next(e => t.next(t => e + t));\n }\n Zn(e) {\n let t = 0;\n return this.Jn(e, e => {\n t++;\n }).next(() => t);\n }\n Jn(e, t) {\n return PersistencePromise.forEach(this.Zr, (n, r) => this.er(e, n, r).next(e => e ? PersistencePromise.resolve() : t(r)));\n }\n removeTargets(e, t, n) {\n return this.persistence.getTargetCache().removeTargets(e, t, n);\n }\n removeOrphanedDocuments(e, t) {\n let n = 0;\n const r = this.persistence.getRemoteDocumentCache(),\n i = r.newChangeBuffer();\n return r.Fr(e, r => this.er(e, r, t).next(e => {\n e || (n++, i.removeEntry(r, SnapshotVersion.min()));\n })).next(() => i.apply(e)).next(() => n);\n }\n markPotentiallyOrphaned(e, t) {\n return this.Zr.set(t, e.currentSequenceNumber), PersistencePromise.resolve();\n }\n removeTarget(e, t) {\n const n = t.withSequenceNumber(e.currentSequenceNumber);\n return this.persistence.getTargetCache().updateTargetData(e, n);\n }\n addReference(e, t, n) {\n return this.Zr.set(n, e.currentSequenceNumber), PersistencePromise.resolve();\n }\n removeReference(e, t, n) {\n return this.Zr.set(n, e.currentSequenceNumber), PersistencePromise.resolve();\n }\n updateLimboDocument(e, t) {\n return this.Zr.set(t, e.currentSequenceNumber), PersistencePromise.resolve();\n }\n Kr(e) {\n let t = e.key.toString().length;\n return e.isFoundDocument() && (t += __PRIVATE_estimateByteSize(e.data.value)), t;\n }\n er(e, t, n) {\n return PersistencePromise.or([() => this.persistence.Gr(e, t), () => this.persistence.getTargetCache().containsKey(e, t), () => {\n const e = this.Zr.get(t);\n return PersistencePromise.resolve(void 0 !== e && e > n);\n }]);\n }\n getCacheSize(e) {\n return this.persistence.getRemoteDocumentCache().getSize(e);\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** Performs database creation and schema upgrades. */\nclass __PRIVATE_SchemaConverter {\n constructor(e) {\n this.serializer = e;\n }\n /**\n * Performs database creation and schema upgrades.\n *\n * Note that in production, this method is only ever used to upgrade the schema\n * to SCHEMA_VERSION. Different values of toVersion are only used for testing\n * and local feature development.\n */\n O(e, t, n, r) {\n const i = new __PRIVATE_SimpleDbTransaction(\"createOrUpgrade\", t);\n n < 1 && r >= 1 && (!function __PRIVATE_createPrimaryClientStore(e) {\n e.createObjectStore(\"owner\");\n }(e), function __PRIVATE_createMutationQueue(e) {\n e.createObjectStore(\"mutationQueues\", {\n keyPath: \"userId\"\n });\n e.createObjectStore(\"mutations\", {\n keyPath: \"batchId\",\n autoIncrement: !0\n }).createIndex(\"userMutationsIndex\", x, {\n unique: !0\n }), e.createObjectStore(\"documentMutations\");\n }\n /**\n * Upgrade function to migrate the 'mutations' store from V1 to V3. Loads\n * and rewrites all data.\n */(e), __PRIVATE_createQueryCache(e), function __PRIVATE_createLegacyRemoteDocumentCache(e) {\n e.createObjectStore(\"remoteDocuments\");\n }(e));\n // Migration 2 to populate the targetGlobal object no longer needed since\n // migration 3 unconditionally clears it.\n let s = PersistencePromise.resolve();\n return n < 3 && r >= 3 && (\n // Brand new clients don't need to drop and recreate--only clients that\n // potentially have corrupt data.\n 0 !== n && (!function __PRIVATE_dropQueryCache(e) {\n e.deleteObjectStore(\"targetDocuments\"), e.deleteObjectStore(\"targets\"), e.deleteObjectStore(\"targetGlobal\");\n }(e), __PRIVATE_createQueryCache(e)), s = s.next(() =>\n /**\n * Creates the target global singleton row.\n *\n * @param txn - The version upgrade transaction for indexeddb\n */\n function __PRIVATE_writeEmptyTargetGlobalEntry(e) {\n const t = e.store(\"targetGlobal\"),\n n = {\n highestTargetId: 0,\n highestListenSequenceNumber: 0,\n lastRemoteSnapshotVersion: SnapshotVersion.min().toTimestamp(),\n targetCount: 0\n };\n return t.put(\"targetGlobalKey\", n);\n }(i))), n < 4 && r >= 4 && (0 !== n && (\n // Schema version 3 uses auto-generated keys to generate globally unique\n // mutation batch IDs (this was previously ensured internally by the\n // client). To migrate to the new schema, we have to read all mutations\n // and write them back out. We preserve the existing batch IDs to guarantee\n // consistency with other object stores. Any further mutation batch IDs will\n // be auto-generated.\n s = s.next(() => function __PRIVATE_upgradeMutationBatchSchemaAndMigrateData(e, t) {\n return t.store(\"mutations\").U().next(n => {\n e.deleteObjectStore(\"mutations\");\n e.createObjectStore(\"mutations\", {\n keyPath: \"batchId\",\n autoIncrement: !0\n }).createIndex(\"userMutationsIndex\", x, {\n unique: !0\n });\n const r = t.store(\"mutations\"),\n i = n.map(e => r.put(e));\n return PersistencePromise.waitFor(i);\n });\n }(e, i))), s = s.next(() => {\n !function __PRIVATE_createClientMetadataStore(e) {\n e.createObjectStore(\"clientMetadata\", {\n keyPath: \"clientId\"\n });\n }(e);\n })), n < 5 && r >= 5 && (s = s.next(() => this.Xr(i))), n < 6 && r >= 6 && (s = s.next(() => (function __PRIVATE_createDocumentGlobalStore(e) {\n e.createObjectStore(\"remoteDocumentGlobal\");\n }(e), this.ei(i)))), n < 7 && r >= 7 && (s = s.next(() => this.ti(i))), n < 8 && r >= 8 && (s = s.next(() => this.ni(e, i))), n < 9 && r >= 9 && (s = s.next(() => {\n // Multi-Tab used to manage its own changelog, but this has been moved\n // to the DbRemoteDocument object store itself. Since the previous change\n // log only contained transient data, we can drop its object store.\n !function __PRIVATE_dropRemoteDocumentChangesStore(e) {\n e.objectStoreNames.contains(\"remoteDocumentChanges\") && e.deleteObjectStore(\"remoteDocumentChanges\");\n }(e);\n // Note: Schema version 9 used to create a read time index for the\n // RemoteDocumentCache. This is now done with schema version 13.\n })), n < 10 && r >= 10 && (s = s.next(() => this.ri(i))), n < 11 && r >= 11 && (s = s.next(() => {\n !function __PRIVATE_createBundlesStore(e) {\n e.createObjectStore(\"bundles\", {\n keyPath: \"bundleId\"\n });\n }(e), function __PRIVATE_createNamedQueriesStore(e) {\n e.createObjectStore(\"namedQueries\", {\n keyPath: \"name\"\n });\n }(e);\n })), n < 12 && r >= 12 && (s = s.next(() => {\n !function __PRIVATE_createDocumentOverlayStore(e) {\n const t = e.createObjectStore(\"documentOverlays\", {\n keyPath: z\n });\n t.createIndex(\"collectionPathOverlayIndex\", j, {\n unique: !1\n }), t.createIndex(\"collectionGroupOverlayIndex\", H, {\n unique: !1\n });\n }(e);\n })), n < 13 && r >= 13 && (s = s.next(() => function __PRIVATE_createRemoteDocumentCache(e) {\n const t = e.createObjectStore(\"remoteDocumentsV14\", {\n keyPath: N\n });\n t.createIndex(\"documentKeyIndex\", L), t.createIndex(\"collectionGroupIndex\", B);\n }(e)).next(() => this.ii(e, i)).next(() => e.deleteObjectStore(\"remoteDocuments\"))), n < 14 && r >= 14 && (s = s.next(() => this.si(e, i))), n < 15 && r >= 15 && (s = s.next(() => function __PRIVATE_createFieldIndex(e) {\n e.createObjectStore(\"indexConfiguration\", {\n keyPath: \"indexId\",\n autoIncrement: !0\n }).createIndex(\"collectionGroupIndex\", \"collectionGroup\", {\n unique: !1\n });\n e.createObjectStore(\"indexState\", {\n keyPath: $\n }).createIndex(\"sequenceNumberIndex\", U, {\n unique: !1\n });\n e.createObjectStore(\"indexEntries\", {\n keyPath: W\n }).createIndex(\"documentKeyIndex\", G, {\n unique: !1\n });\n }(e))), n < 16 && r >= 16 && (\n // Clear the object stores to remove possibly corrupted index entries\n s = s.next(() => {\n t.objectStore(\"indexState\").clear();\n }).next(() => {\n t.objectStore(\"indexEntries\").clear();\n })), s;\n }\n ei(e) {\n let t = 0;\n return e.store(\"remoteDocuments\").J((e, n) => {\n t += __PRIVATE_dbDocumentSize(n);\n }).next(() => {\n const n = {\n byteSize: t\n };\n return e.store(\"remoteDocumentGlobal\").put(\"remoteDocumentGlobalKey\", n);\n });\n }\n Xr(e) {\n const t = e.store(\"mutationQueues\"),\n n = e.store(\"mutations\");\n return t.U().next(t => PersistencePromise.forEach(t, t => {\n const r = IDBKeyRange.bound([t.userId, -1], [t.userId, t.lastAcknowledgedBatchId]);\n return n.U(\"userMutationsIndex\", r).next(n => PersistencePromise.forEach(n, n => {\n __PRIVATE_hardAssert(n.userId === t.userId);\n const r = __PRIVATE_fromDbMutationBatch(this.serializer, n);\n return removeMutationBatch(e, t.userId, r).next(() => {});\n }));\n }));\n }\n /**\n * Ensures that every document in the remote document cache has a corresponding sentinel row\n * with a sequence number. Missing rows are given the most recently used sequence number.\n */\n ti(e) {\n const t = e.store(\"targetDocuments\"),\n n = e.store(\"remoteDocuments\");\n return e.store(\"targetGlobal\").get(\"targetGlobalKey\").next(e => {\n const r = [];\n return n.J((n, i) => {\n const s = new ResourcePath(n),\n o = function __PRIVATE_sentinelKey(e) {\n return [0, __PRIVATE_encodeResourcePath(e)];\n }(s);\n r.push(t.get(o).next(n => n ? PersistencePromise.resolve() : (n => t.put({\n targetId: 0,\n path: __PRIVATE_encodeResourcePath(n),\n sequenceNumber: e.highestListenSequenceNumber\n }))(s)));\n }).next(() => PersistencePromise.waitFor(r));\n });\n }\n ni(e, t) {\n // Create the index.\n e.createObjectStore(\"collectionParents\", {\n keyPath: K\n });\n const n = t.store(\"collectionParents\"),\n r = new __PRIVATE_MemoryCollectionParentIndex(),\n addEntry = e => {\n if (r.add(e)) {\n const t = e.lastSegment(),\n r = e.popLast();\n return n.put({\n collectionId: t,\n parent: __PRIVATE_encodeResourcePath(r)\n });\n }\n };\n // Helper to add an index entry iff we haven't already written it.\n // Index existing remote documents.\n return t.store(\"remoteDocuments\").J({\n H: !0\n }, (e, t) => {\n const n = new ResourcePath(e);\n return addEntry(n.popLast());\n }).next(() => t.store(\"documentMutations\").J({\n H: !0\n }, ([e, t, n], r) => {\n const i = __PRIVATE_decodeResourcePath(t);\n return addEntry(i.popLast());\n }));\n }\n ri(e) {\n const t = e.store(\"targets\");\n return t.J((e, n) => {\n const r = __PRIVATE_fromDbTarget(n),\n i = __PRIVATE_toDbTarget(this.serializer, r);\n return t.put(i);\n });\n }\n ii(e, t) {\n const n = t.store(\"remoteDocuments\"),\n r = [];\n return n.J((e, n) => {\n const i = t.store(\"remoteDocumentsV14\"),\n s = function __PRIVATE_extractKey(e) {\n return e.document ? new DocumentKey(ResourcePath.fromString(e.document.name).popFirst(5)) : e.noDocument ? DocumentKey.fromSegments(e.noDocument.path) : e.unknownDocument ? DocumentKey.fromSegments(e.unknownDocument.path) : fail();\n }\n /**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */(n).path.toArray(),\n o = {\n prefixPath: s.slice(0, s.length - 2),\n collectionGroup: s[s.length - 2],\n documentId: s[s.length - 1],\n readTime: n.readTime || [0, 0],\n unknownDocument: n.unknownDocument,\n noDocument: n.noDocument,\n document: n.document,\n hasCommittedMutations: !!n.hasCommittedMutations\n };\n r.push(i.put(o));\n }).next(() => PersistencePromise.waitFor(r));\n }\n si(e, t) {\n const n = t.store(\"mutations\"),\n r = __PRIVATE_newIndexedDbRemoteDocumentCache(this.serializer),\n i = new __PRIVATE_MemoryPersistence(__PRIVATE_MemoryEagerDelegate.Hr, this.serializer.ct);\n return n.U().next(e => {\n const n = new Map();\n return e.forEach(e => {\n var t;\n let r = null !== (t = n.get(e.userId)) && void 0 !== t ? t : __PRIVATE_documentKeySet();\n __PRIVATE_fromDbMutationBatch(this.serializer, e).keys().forEach(e => r = r.add(e)), n.set(e.userId, r);\n }), PersistencePromise.forEach(n, (e, n) => {\n const s = new User(n),\n o = __PRIVATE_IndexedDbDocumentOverlayCache.lt(this.serializer, s),\n _ = i.getIndexManager(s),\n a = __PRIVATE_IndexedDbMutationQueue.lt(s, this.serializer, _, i.referenceDelegate);\n return new LocalDocumentsView(r, a, o, _).recalculateAndSaveOverlaysForDocumentKeys(new __PRIVATE_IndexedDbTransaction(t, __PRIVATE_ListenSequence.oe), e).next();\n });\n });\n }\n}\nfunction __PRIVATE_createQueryCache(e) {\n e.createObjectStore(\"targetDocuments\", {\n keyPath: q\n }).createIndex(\"documentTargetsIndex\", Q, {\n unique: !0\n });\n // NOTE: This is unique only because the TargetId is the suffix.\n e.createObjectStore(\"targets\", {\n keyPath: \"targetId\"\n }).createIndex(\"queryTargetsIndex\", k, {\n unique: !0\n }), e.createObjectStore(\"targetGlobal\");\n}\nconst Re = \"Failed to obtain exclusive access to the persistence layer. To allow shared access, multi-tab synchronization has to be enabled in all tabs. If you are using `experimentalForceOwningTab:true`, make sure that only one tab has persistence enabled at any given time.\";\n\n/**\n * Oldest acceptable age in milliseconds for client metadata before the client\n * is considered inactive and its associated data is garbage collected.\n */\n/**\n * An IndexedDB-backed instance of Persistence. Data is stored persistently\n * across sessions.\n *\n * On Web only, the Firestore SDKs support shared access to its persistence\n * layer. This allows multiple browser tabs to read and write to IndexedDb and\n * to synchronize state even without network connectivity. Shared access is\n * currently optional and not enabled unless all clients invoke\n * `enablePersistence()` with `{synchronizeTabs:true}`.\n *\n * In multi-tab mode, if multiple clients are active at the same time, the SDK\n * will designate one client as the “primary client”. An effort is made to pick\n * a visible, network-connected and active client, and this client is\n * responsible for letting other clients know about its presence. The primary\n * client writes a unique client-generated identifier (the client ID) to\n * IndexedDb’s “owner” store every 4 seconds. If the primary client fails to\n * update this entry, another client can acquire the lease and take over as\n * primary.\n *\n * Some persistence operations in the SDK are designated as primary-client only\n * operations. This includes the acknowledgment of mutations and all updates of\n * remote documents. The effects of these operations are written to persistence\n * and then broadcast to other tabs via LocalStorage (see\n * `WebStorageSharedClientState`), which then refresh their state from\n * persistence.\n *\n * Similarly, the primary client listens to notifications sent by secondary\n * clients to discover persistence changes written by secondary clients, such as\n * the addition of new mutations and query targets.\n *\n * If multi-tab is not enabled and another tab already obtained the primary\n * lease, IndexedDbPersistence enters a failed state and all subsequent\n * operations will automatically fail.\n *\n * Additionally, there is an optimization so that when a tab is closed, the\n * primary lease is released immediately (this is especially important to make\n * sure that a refreshed tab is able to immediately re-acquire the primary\n * lease). Unfortunately, IndexedDB cannot be reliably used in window.unload\n * since it is an asynchronous API. So in addition to attempting to give up the\n * lease, the leaseholder writes its client ID to a \"zombiedClient\" entry in\n * LocalStorage which acts as an indicator that another tab should go ahead and\n * take the primary lease immediately regardless of the current lease timestamp.\n *\n * TODO(b/114226234): Remove `synchronizeTabs` section when multi-tab is no\n * longer optional.\n */\nclass __PRIVATE_IndexedDbPersistence {\n constructor(\n /**\n * Whether to synchronize the in-memory state of multiple tabs and share\n * access to local persistence.\n */\n e, t, n, r, i, s, o, _, a,\n /**\n * If set to true, forcefully obtains database access. Existing tabs will\n * no longer be able to access IndexedDB.\n */\n u, c = 16) {\n if (this.allowTabSynchronization = e, this.persistenceKey = t, this.clientId = n, this.oi = i, this.window = s, this.document = o, this._i = a, this.ai = u, this.ui = c, this.kr = null, this.qr = !1, this.isPrimary = !1, this.networkEnabled = !0, /** Our window.unload handler, if registered. */\n this.ci = null, this.inForeground = !1, /** Our 'visibilitychange' listener if registered. */\n this.li = null, /** The client metadata refresh task. */\n this.hi = null, /** The last time we garbage collected the client metadata object store. */\n this.Pi = Number.NEGATIVE_INFINITY, /** A listener to notify on primary state changes. */\n this.Ii = e => Promise.resolve(), !__PRIVATE_IndexedDbPersistence.D()) throw new FirestoreError(C.UNIMPLEMENTED, \"This platform is either missing IndexedDB or is known to have an incomplete implementation. Offline persistence has been disabled.\");\n this.referenceDelegate = new __PRIVATE_IndexedDbLruDelegateImpl(this, r), this.Ti = t + \"main\", this.serializer = new __PRIVATE_LocalSerializer(_), this.Ei = new __PRIVATE_SimpleDb(this.Ti, this.ui, new __PRIVATE_SchemaConverter(this.serializer)), this.Qr = new __PRIVATE_IndexedDbTargetCache(this.referenceDelegate, this.serializer), this.remoteDocumentCache = __PRIVATE_newIndexedDbRemoteDocumentCache(this.serializer), this.$r = new __PRIVATE_IndexedDbBundleCache(), this.window && this.window.localStorage ? this.di = this.window.localStorage : (this.di = null, !1 === u && __PRIVATE_logError(\"IndexedDbPersistence\", \"LocalStorage is unavailable. As a result, persistence may not work reliably. In particular enablePersistence() could fail immediately after refreshing the page.\"));\n }\n /**\n * Attempt to start IndexedDb persistence.\n *\n * @returns Whether persistence was enabled.\n */\n start() {\n // NOTE: This is expected to fail sometimes (in the case of another tab\n // already having the persistence lock), so it's the first thing we should\n // do.\n return this.Ai().then(() => {\n if (!this.isPrimary && !this.allowTabSynchronization)\n // Fail `start()` if `synchronizeTabs` is disabled and we cannot\n // obtain the primary lease.\n throw new FirestoreError(C.FAILED_PRECONDITION, Re);\n return this.Ri(), this.Vi(), this.mi(), this.runTransaction(\"getHighestListenSequenceNumber\", \"readonly\", e => this.Qr.getHighestSequenceNumber(e));\n }).then(e => {\n this.kr = new __PRIVATE_ListenSequence(e, this._i);\n }).then(() => {\n this.qr = !0;\n }).catch(e => (this.Ei && this.Ei.close(), Promise.reject(e)));\n }\n /**\n * Registers a listener that gets called when the primary state of the\n * instance changes. Upon registering, this listener is invoked immediately\n * with the current primary state.\n *\n * PORTING NOTE: This is only used for Web multi-tab.\n */\n fi(e) {\n return this.Ii = async t => {\n if (this.started) return e(t);\n }, e(this.isPrimary);\n }\n /**\n * Registers a listener that gets called when the database receives a\n * version change event indicating that it has deleted.\n *\n * PORTING NOTE: This is only used for Web multi-tab.\n */\n setDatabaseDeletedListener(e) {\n this.Ei.L(async t => {\n // Check if an attempt is made to delete IndexedDB.\n null === t.newVersion && (await e());\n });\n }\n /**\n * Adjusts the current network state in the client's metadata, potentially\n * affecting the primary lease.\n *\n * PORTING NOTE: This is only used for Web multi-tab.\n */\n setNetworkEnabled(e) {\n this.networkEnabled !== e && (this.networkEnabled = e,\n // Schedule a primary lease refresh for immediate execution. The eventual\n // lease update will be propagated via `primaryStateListener`.\n this.oi.enqueueAndForget(async () => {\n this.started && (await this.Ai());\n }));\n }\n /**\n * Updates the client metadata in IndexedDb and attempts to either obtain or\n * extend the primary lease for the local client. Asynchronously notifies the\n * primary state listener if the client either newly obtained or released its\n * primary lease.\n */\n Ai() {\n return this.runTransaction(\"updateClientMetadataAndTryBecomePrimary\", \"readwrite\", e => __PRIVATE_clientMetadataStore(e).put({\n clientId: this.clientId,\n updateTimeMs: Date.now(),\n networkEnabled: this.networkEnabled,\n inForeground: this.inForeground\n }).next(() => {\n if (this.isPrimary) return this.gi(e).next(e => {\n e || (this.isPrimary = !1, this.oi.enqueueRetryable(() => this.Ii(!1)));\n });\n }).next(() => this.pi(e)).next(t => this.isPrimary && !t ? this.yi(e).next(() => !1) : !!t && this.wi(e).next(() => !0))).catch(e => {\n if (__PRIVATE_isIndexedDbTransactionError(e))\n // Proceed with the existing state. Any subsequent access to\n // IndexedDB will verify the lease.\n return __PRIVATE_logDebug(\"IndexedDbPersistence\", \"Failed to extend owner lease: \", e), this.isPrimary;\n if (!this.allowTabSynchronization) throw e;\n return __PRIVATE_logDebug(\"IndexedDbPersistence\", \"Releasing owner lease after error during lease refresh\", e), /* isPrimary= */!1;\n }).then(e => {\n this.isPrimary !== e && this.oi.enqueueRetryable(() => this.Ii(e)), this.isPrimary = e;\n });\n }\n gi(e) {\n return __PRIVATE_primaryClientStore(e).get(\"owner\").next(e => PersistencePromise.resolve(this.Si(e)));\n }\n bi(e) {\n return __PRIVATE_clientMetadataStore(e).delete(this.clientId);\n }\n /**\n * If the garbage collection threshold has passed, prunes the\n * RemoteDocumentChanges and the ClientMetadata store based on the last update\n * time of all clients.\n */\n async Di() {\n if (this.isPrimary && !this.Ci(this.Pi, 18e5)) {\n this.Pi = Date.now();\n const e = await this.runTransaction(\"maybeGarbageCollectMultiClientState\", \"readwrite-primary\", e => {\n const t = __PRIVATE_getStore(e, \"clientMetadata\");\n return t.U().next(e => {\n const n = this.vi(e, 18e5),\n r = e.filter(e => -1 === n.indexOf(e));\n // Delete metadata for clients that are no longer considered active.\n return PersistencePromise.forEach(r, e => t.delete(e.clientId)).next(() => r);\n });\n }).catch(() => []);\n // Delete potential leftover entries that may continue to mark the\n // inactive clients as zombied in LocalStorage.\n // Ideally we'd delete the IndexedDb and LocalStorage zombie entries for\n // the client atomically, but we can't. So we opt to delete the IndexedDb\n // entries first to avoid potentially reviving a zombied client.\n if (this.di) for (const t of e) this.di.removeItem(this.Fi(t.clientId));\n }\n }\n /**\n * Schedules a recurring timer to update the client metadata and to either\n * extend or acquire the primary lease if the client is eligible.\n */\n mi() {\n this.hi = this.oi.enqueueAfterDelay(\"client_metadata_refresh\" /* TimerId.ClientMetadataRefresh */, 4e3, () => this.Ai().then(() => this.Di()).then(() => this.mi()));\n }\n /** Checks whether `client` is the local client. */\n Si(e) {\n return !!e && e.ownerId === this.clientId;\n }\n /**\n * Evaluate the state of all active clients and determine whether the local\n * client is or can act as the holder of the primary lease. Returns whether\n * the client is eligible for the lease, but does not actually acquire it.\n * May return 'false' even if there is no active leaseholder and another\n * (foreground) client should become leaseholder instead.\n */\n pi(e) {\n if (this.ai) return PersistencePromise.resolve(!0);\n return __PRIVATE_primaryClientStore(e).get(\"owner\").next(t => {\n // A client is eligible for the primary lease if:\n // - its network is enabled and the client's tab is in the foreground.\n // - its network is enabled and no other client's tab is in the\n // foreground.\n // - every clients network is disabled and the client's tab is in the\n // foreground.\n // - every clients network is disabled and no other client's tab is in\n // the foreground.\n // - the `forceOwningTab` setting was passed in.\n if (null !== t && this.Ci(t.leaseTimestampMs, 5e3) && !this.Mi(t.ownerId)) {\n if (this.Si(t) && this.networkEnabled) return !0;\n if (!this.Si(t)) {\n if (!t.allowTabSynchronization)\n // Fail the `canActAsPrimary` check if the current leaseholder has\n // not opted into multi-tab synchronization. If this happens at\n // client startup, we reject the Promise returned by\n // `enablePersistence()` and the user can continue to use Firestore\n // with in-memory persistence.\n // If this fails during a lease refresh, we will instead block the\n // AsyncQueue from executing further operations. Note that this is\n // acceptable since mixing & matching different `synchronizeTabs`\n // settings is not supported.\n // TODO(b/114226234): Remove this check when `synchronizeTabs` can\n // no longer be turned off.\n throw new FirestoreError(C.FAILED_PRECONDITION, Re);\n return !1;\n }\n }\n return !(!this.networkEnabled || !this.inForeground) || __PRIVATE_clientMetadataStore(e).U().next(e => void 0 === this.vi(e, 5e3).find(e => {\n if (this.clientId !== e.clientId) {\n const t = !this.networkEnabled && e.networkEnabled,\n n = !this.inForeground && e.inForeground,\n r = this.networkEnabled === e.networkEnabled;\n if (t || n && r) return !0;\n }\n return !1;\n }));\n }).next(e => (this.isPrimary !== e && __PRIVATE_logDebug(\"IndexedDbPersistence\", `Client ${e ? \"is\" : \"is not\"} eligible for a primary lease.`), e));\n }\n async shutdown() {\n // The shutdown() operations are idempotent and can be called even when\n // start() aborted (e.g. because it couldn't acquire the persistence lease).\n this.qr = !1, this.xi(), this.hi && (this.hi.cancel(), this.hi = null), this.Oi(), this.Ni(),\n // Use `SimpleDb.runTransaction` directly to avoid failing if another tab\n // has obtained the primary lease.\n await this.Ei.runTransaction(\"shutdown\", \"readwrite\", [\"owner\", \"clientMetadata\"], e => {\n const t = new __PRIVATE_IndexedDbTransaction(e, __PRIVATE_ListenSequence.oe);\n return this.yi(t).next(() => this.bi(t));\n }), this.Ei.close(),\n // Remove the entry marking the client as zombied from LocalStorage since\n // we successfully deleted its metadata from IndexedDb.\n this.Li();\n }\n /**\n * Returns clients that are not zombied and have an updateTime within the\n * provided threshold.\n */\n vi(e, t) {\n return e.filter(e => this.Ci(e.updateTimeMs, t) && !this.Mi(e.clientId));\n }\n /**\n * Returns the IDs of the clients that are currently active. If multi-tab\n * is not supported, returns an array that only contains the local client's\n * ID.\n *\n * PORTING NOTE: This is only used for Web multi-tab.\n */\n Bi() {\n return this.runTransaction(\"getActiveClients\", \"readonly\", e => __PRIVATE_clientMetadataStore(e).U().next(e => this.vi(e, 18e5).map(e => e.clientId)));\n }\n get started() {\n return this.qr;\n }\n getMutationQueue(e, t) {\n return __PRIVATE_IndexedDbMutationQueue.lt(e, this.serializer, t, this.referenceDelegate);\n }\n getTargetCache() {\n return this.Qr;\n }\n getRemoteDocumentCache() {\n return this.remoteDocumentCache;\n }\n getIndexManager(e) {\n return new __PRIVATE_IndexedDbIndexManager(e, this.serializer.ct.databaseId);\n }\n getDocumentOverlayCache(e) {\n return __PRIVATE_IndexedDbDocumentOverlayCache.lt(this.serializer, e);\n }\n getBundleCache() {\n return this.$r;\n }\n runTransaction(e, t, n) {\n __PRIVATE_logDebug(\"IndexedDbPersistence\", \"Starting transaction:\", e);\n const r = \"readonly\" === t ? \"readonly\" : \"readwrite\",\n i = /** Returns the object stores for the provided schema. */\n function __PRIVATE_getObjectStores(e) {\n return 16 === e ? te : 15 === e ? ee : 14 === e ? X : 13 === e ? Z : 12 === e ? Y : 11 === e ? J : void fail();\n }(this.ui);\n let s;\n // Do all transactions as readwrite against all object stores, since we\n // are the only reader/writer.\n return this.Ei.runTransaction(e, r, i, r => (s = new __PRIVATE_IndexedDbTransaction(r, this.kr ? this.kr.next() : __PRIVATE_ListenSequence.oe), \"readwrite-primary\" === t ? this.gi(s).next(e => !!e || this.pi(s)).next(t => {\n if (!t) throw __PRIVATE_logError(`Failed to obtain primary lease for action '${e}'.`), this.isPrimary = !1, this.oi.enqueueRetryable(() => this.Ii(!1)), new FirestoreError(C.FAILED_PRECONDITION, F);\n return n(s);\n }).next(e => this.wi(s).next(() => e)) : this.ki(s).next(() => n(s)))).then(e => (s.raiseOnCommittedEvent(), e));\n }\n /**\n * Verifies that the current tab is the primary leaseholder or alternatively\n * that the leaseholder has opted into multi-tab synchronization.\n */\n // TODO(b/114226234): Remove this check when `synchronizeTabs` can no longer\n // be turned off.\n ki(e) {\n return __PRIVATE_primaryClientStore(e).get(\"owner\").next(e => {\n if (null !== e && this.Ci(e.leaseTimestampMs, 5e3) && !this.Mi(e.ownerId) && !this.Si(e) && !(this.ai || this.allowTabSynchronization && e.allowTabSynchronization)) throw new FirestoreError(C.FAILED_PRECONDITION, Re);\n });\n }\n /**\n * Obtains or extends the new primary lease for the local client. This\n * method does not verify that the client is eligible for this lease.\n */\n wi(e) {\n const t = {\n ownerId: this.clientId,\n allowTabSynchronization: this.allowTabSynchronization,\n leaseTimestampMs: Date.now()\n };\n return __PRIVATE_primaryClientStore(e).put(\"owner\", t);\n }\n static D() {\n return __PRIVATE_SimpleDb.D();\n }\n /** Checks the primary lease and removes it if we are the current primary. */\n yi(e) {\n const t = __PRIVATE_primaryClientStore(e);\n return t.get(\"owner\").next(e => this.Si(e) ? (__PRIVATE_logDebug(\"IndexedDbPersistence\", \"Releasing primary lease.\"), t.delete(\"owner\")) : PersistencePromise.resolve());\n }\n /** Verifies that `updateTimeMs` is within `maxAgeMs`. */\n Ci(e, t) {\n const n = Date.now();\n return !(e < n - t) && (!(e > n) || (__PRIVATE_logError(`Detected an update time that is in the future: ${e} > ${n}`), !1));\n }\n Ri() {\n null !== this.document && \"function\" == typeof this.document.addEventListener && (this.li = () => {\n this.oi.enqueueAndForget(() => (this.inForeground = \"visible\" === this.document.visibilityState, this.Ai()));\n }, this.document.addEventListener(\"visibilitychange\", this.li), this.inForeground = \"visible\" === this.document.visibilityState);\n }\n Oi() {\n this.li && (this.document.removeEventListener(\"visibilitychange\", this.li), this.li = null);\n }\n /**\n * Attaches a window.unload handler that will synchronously write our\n * clientId to a \"zombie client id\" location in LocalStorage. This can be used\n * by tabs trying to acquire the primary lease to determine that the lease\n * is no longer valid even if the timestamp is recent. This is particularly\n * important for the refresh case (so the tab correctly re-acquires the\n * primary lease). LocalStorage is used for this rather than IndexedDb because\n * it is a synchronous API and so can be used reliably from an unload\n * handler.\n */\n Vi() {\n var e;\n \"function\" == typeof (null === (e = this.window) || void 0 === e ? void 0 : e.addEventListener) && (this.ci = () => {\n // Note: In theory, this should be scheduled on the AsyncQueue since it\n // accesses internal state. We execute this code directly during shutdown\n // to make sure it gets a chance to run.\n this.xi();\n const e = /(?:Version|Mobile)\\/1[456]/;\n isSafari() && (navigator.appVersion.match(e) || navigator.userAgent.match(e)) &&\n // On Safari 14, 15, and 16, we do not run any cleanup actions as it might\n // trigger a bug that prevents Safari from re-opening IndexedDB during\n // the next page load.\n // See https://bugs.webkit.org/show_bug.cgi?id=226547\n this.oi.enterRestrictedMode(/* purgeExistingTasks= */!0), this.oi.enqueueAndForget(() => this.shutdown());\n }, this.window.addEventListener(\"pagehide\", this.ci));\n }\n Ni() {\n this.ci && (this.window.removeEventListener(\"pagehide\", this.ci), this.ci = null);\n }\n /**\n * Returns whether a client is \"zombied\" based on its LocalStorage entry.\n * Clients become zombied when their tab closes without running all of the\n * cleanup logic in `shutdown()`.\n */\n Mi(e) {\n var t;\n try {\n const n = null !== (null === (t = this.di) || void 0 === t ? void 0 : t.getItem(this.Fi(e)));\n return __PRIVATE_logDebug(\"IndexedDbPersistence\", `Client '${e}' ${n ? \"is\" : \"is not\"} zombied in LocalStorage`), n;\n } catch (e) {\n // Gracefully handle if LocalStorage isn't working.\n return __PRIVATE_logError(\"IndexedDbPersistence\", \"Failed to get zombied client id.\", e), !1;\n }\n }\n /**\n * Record client as zombied (a client that had its tab closed). Zombied\n * clients are ignored during primary tab selection.\n */\n xi() {\n if (this.di) try {\n this.di.setItem(this.Fi(this.clientId), String(Date.now()));\n } catch (e) {\n // Gracefully handle if LocalStorage isn't available / working.\n __PRIVATE_logError(\"Failed to set zombie client id.\", e);\n }\n }\n /** Removes the zombied client entry if it exists. */\n Li() {\n if (this.di) try {\n this.di.removeItem(this.Fi(this.clientId));\n } catch (e) {\n // Ignore\n }\n }\n Fi(e) {\n return `firestore_zombie_${this.persistenceKey}_${e}`;\n }\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the primary client object store.\n */\nfunction __PRIVATE_primaryClientStore(e) {\n return __PRIVATE_getStore(e, \"owner\");\n}\n\n/**\n * Helper to get a typed SimpleDbStore for the client metadata object store.\n */\nfunction __PRIVATE_clientMetadataStore(e) {\n return __PRIVATE_getStore(e, \"clientMetadata\");\n}\n\n/**\n * Generates a string used as a prefix when storing data in IndexedDB and\n * LocalStorage.\n */\nfunction __PRIVATE_indexedDbStoragePrefix(e, t) {\n // Use two different prefix formats:\n // * firestore / persistenceKey / projectID . databaseID / ...\n // * firestore / persistenceKey / projectID / ...\n // projectIDs are DNS-compatible names and cannot contain dots\n // so there's no danger of collisions.\n let n = e.projectId;\n return e.isDefaultDatabase || (n += \".\" + e.database), \"firestore/\" + t + \"/\" + n + \"/\";\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A set of changes to what documents are currently in view and out of view for\n * a given query. These changes are sent to the LocalStore by the View (via\n * the SyncEngine) and are used to pin / unpin documents as appropriate.\n */\nclass __PRIVATE_LocalViewChanges {\n constructor(e, t, n, r) {\n this.targetId = e, this.fromCache = t, this.qi = n, this.Qi = r;\n }\n static Ki(e, t) {\n let n = __PRIVATE_documentKeySet(),\n r = __PRIVATE_documentKeySet();\n for (const e of t.docChanges) switch (e.type) {\n case 0 /* ChangeType.Added */:\n n = n.add(e.doc.key);\n break;\n case 1 /* ChangeType.Removed */:\n r = r.add(e.doc.key);\n // do nothing\n }\n return new __PRIVATE_LocalViewChanges(e, t.fromCache, n, r);\n }\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A tracker to keep a record of important details during database local query\n * execution.\n */\nclass QueryContext {\n constructor() {\n /**\n * Counts the number of documents passed through during local query execution.\n */\n this._documentReadCount = 0;\n }\n get documentReadCount() {\n return this._documentReadCount;\n }\n incrementDocumentReadCount(e) {\n this._documentReadCount += e;\n }\n}\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * The Firestore query engine.\n *\n * Firestore queries can be executed in three modes. The Query Engine determines\n * what mode to use based on what data is persisted. The mode only determines\n * the runtime complexity of the query - the result set is equivalent across all\n * implementations.\n *\n * The Query engine will use indexed-based execution if a user has configured\n * any index that can be used to execute query (via `setIndexConfiguration()`).\n * Otherwise, the engine will try to optimize the query by re-using a previously\n * persisted query result. If that is not possible, the query will be executed\n * via a full collection scan.\n *\n * Index-based execution is the default when available. The query engine\n * supports partial indexed execution and merges the result from the index\n * lookup with documents that have not yet been indexed. The index evaluation\n * matches the backend's format and as such, the SDK can use indexing for all\n * queries that the backend supports.\n *\n * If no index exists, the query engine tries to take advantage of the target\n * document mapping in the TargetCache. These mappings exists for all queries\n * that have been synced with the backend at least once and allow the query\n * engine to only read documents that previously matched a query plus any\n * documents that were edited after the query was last listened to.\n *\n * There are some cases when this optimization is not guaranteed to produce\n * the same results as full collection scans. In these cases, query\n * processing falls back to full scans. These cases are:\n *\n * - Limit queries where a document that matched the query previously no longer\n * matches the query.\n *\n * - Limit queries where a document edit may cause the document to sort below\n * another document that is in the local cache.\n *\n * - Queries that have never been CURRENT or free of limbo documents.\n */\nclass __PRIVATE_QueryEngine {\n constructor() {\n this.$i = !1, this.Ui = !1,\n /**\n * SDK only decides whether it should create index when collection size is\n * larger than this.\n */\n this.Wi = 100, this.Gi =\n /**\n * This cost represents the evaluation result of\n * (([index, docKey] + [docKey, docContent]) per document in the result set)\n * / ([docKey, docContent] per documents in full collection scan) coming from\n * experiment [enter PR experiment URL here].\n */\n function __PRIVATE_getDefaultRelativeIndexReadCostPerDocument() {\n // These values were derived from an experiment where several members of the\n // Firestore SDK team ran a performance test in various environments.\n // Googlers can see b/299284287 for details.\n return isSafari() ? 8 : __PRIVATE_getAndroidVersion(getUA()) > 0 ? 6 : 4;\n }();\n }\n /** Sets the document view to query against. */\n initialize(e, t) {\n this.zi = e, this.indexManager = t, this.$i = !0;\n }\n /** Returns all local documents matching the specified query. */\n getDocumentsMatchingQuery(e, t, n, r) {\n // Stores the result from executing the query; using this object is more\n // convenient than passing the result between steps of the persistence\n // transaction and improves readability comparatively.\n const i = {\n result: null\n };\n return this.ji(e, t).next(e => {\n i.result = e;\n }).next(() => {\n if (!i.result) return this.Hi(e, t, r, n).next(e => {\n i.result = e;\n });\n }).next(() => {\n if (i.result) return;\n const n = new QueryContext();\n return this.Ji(e, t, n).next(r => {\n if (i.result = r, this.Ui) return this.Yi(e, t, n, r.size);\n });\n }).next(() => i.result);\n }\n Yi(e, t, n, r) {\n return n.documentReadCount < this.Wi ? (__PRIVATE_getLogLevel() <= LogLevel.DEBUG && __PRIVATE_logDebug(\"QueryEngine\", \"SDK will not create cache indexes for query:\", __PRIVATE_stringifyQuery(t), \"since it only creates cache indexes for collection contains\", \"more than or equal to\", this.Wi, \"documents\"), PersistencePromise.resolve()) : (__PRIVATE_getLogLevel() <= LogLevel.DEBUG && __PRIVATE_logDebug(\"QueryEngine\", \"Query:\", __PRIVATE_stringifyQuery(t), \"scans\", n.documentReadCount, \"local documents and returns\", r, \"documents as results.\"), n.documentReadCount > this.Gi * r ? (__PRIVATE_getLogLevel() <= LogLevel.DEBUG && __PRIVATE_logDebug(\"QueryEngine\", \"The SDK decides to create cache indexes for query:\", __PRIVATE_stringifyQuery(t), \"as using cache indexes may help improve performance.\"), this.indexManager.createTargetIndexes(e, __PRIVATE_queryToTarget(t))) : PersistencePromise.resolve());\n }\n /**\n * Performs an indexed query that evaluates the query based on a collection's\n * persisted index values. Returns `null` if an index is not available.\n */\n ji(e, t) {\n if (__PRIVATE_queryMatchesAllDocuments(t))\n // Queries that match all documents don't benefit from using\n // key-based lookups. It is more efficient to scan all documents in a\n // collection, rather than to perform individual lookups.\n return PersistencePromise.resolve(null);\n let n = __PRIVATE_queryToTarget(t);\n return this.indexManager.getIndexType(e, n).next(r => 0 /* IndexType.NONE */ === r ? null : (null !== t.limit && 1 /* IndexType.PARTIAL */ === r && (\n // We cannot apply a limit for targets that are served using a partial\n // index. If a partial index will be used to serve the target, the\n // query may return a superset of documents that match the target\n // (e.g. if the index doesn't include all the target's filters), or\n // may return the correct set of documents in the wrong order (e.g. if\n // the index doesn't include a segment for one of the orderBys).\n // Therefore, a limit should not be applied in such cases.\n t = __PRIVATE_queryWithLimit(t, null, \"F\" /* LimitType.First */), n = __PRIVATE_queryToTarget(t)), this.indexManager.getDocumentsMatchingTarget(e, n).next(r => {\n const i = __PRIVATE_documentKeySet(...r);\n return this.zi.getDocuments(e, i).next(r => this.indexManager.getMinOffset(e, n).next(n => {\n const s = this.Zi(t, r);\n return this.Xi(t, s, i, n.readTime) ? this.ji(e, __PRIVATE_queryWithLimit(t, null, \"F\" /* LimitType.First */)) : this.es(e, s, t, n);\n }));\n })));\n }\n /**\n * Performs a query based on the target's persisted query mapping. Returns\n * `null` if the mapping is not available or cannot be used.\n */\n Hi(e, t, n, r) {\n return __PRIVATE_queryMatchesAllDocuments(t) || r.isEqual(SnapshotVersion.min()) ? PersistencePromise.resolve(null) : this.zi.getDocuments(e, n).next(i => {\n const s = this.Zi(t, i);\n return this.Xi(t, s, n, r) ? PersistencePromise.resolve(null) : (__PRIVATE_getLogLevel() <= LogLevel.DEBUG && __PRIVATE_logDebug(\"QueryEngine\", \"Re-using previous result from %s to execute query: %s\", r.toString(), __PRIVATE_stringifyQuery(t)), this.es(e, s, t, __PRIVATE_newIndexOffsetSuccessorFromReadTime(r, -1)).next(e => e));\n });\n // Queries that have never seen a snapshot without limbo free documents\n // should also be run as a full collection scan.\n }\n /** Applies the query filter and sorting to the provided documents. */\n Zi(e, t) {\n // Sort the documents and re-apply the query filter since previously\n // matching documents do not necessarily still match the query.\n let n = new SortedSet(__PRIVATE_newQueryComparator(e));\n return t.forEach((t, r) => {\n __PRIVATE_queryMatches(e, r) && (n = n.add(r));\n }), n;\n }\n /**\n * Determines if a limit query needs to be refilled from cache, making it\n * ineligible for index-free execution.\n *\n * @param query - The query.\n * @param sortedPreviousResults - The documents that matched the query when it\n * was last synchronized, sorted by the query's comparator.\n * @param remoteKeys - The document keys that matched the query at the last\n * snapshot.\n * @param limboFreeSnapshotVersion - The version of the snapshot when the\n * query was last synchronized.\n */\n Xi(e, t, n, r) {\n if (null === e.limit)\n // Queries without limits do not need to be refilled.\n return !1;\n if (n.size !== t.size)\n // The query needs to be refilled if a previously matching document no\n // longer matches.\n return !0;\n // Limit queries are not eligible for index-free query execution if there is\n // a potential that an older document from cache now sorts before a document\n // that was previously part of the limit. This, however, can only happen if\n // the document at the edge of the limit goes out of limit.\n // If a document that is not the limit boundary sorts differently,\n // the boundary of the limit itself did not change and documents from cache\n // will continue to be \"rejected\" by this boundary. Therefore, we can ignore\n // any modifications that don't affect the last document.\n const i = \"F\" /* LimitType.First */ === e.limitType ? t.last() : t.first();\n return !!i && (i.hasPendingWrites || i.version.compareTo(r) > 0);\n }\n Ji(e, t, n) {\n return __PRIVATE_getLogLevel() <= LogLevel.DEBUG && __PRIVATE_logDebug(\"QueryEngine\", \"Using full collection scan to execute query:\", __PRIVATE_stringifyQuery(t)), this.zi.getDocumentsMatchingQuery(e, t, IndexOffset.min(), n);\n }\n /**\n * Combines the results from an indexed execution with the remaining documents\n * that have not yet been indexed.\n */\n es(e, t, n, r) {\n // Retrieve all results for documents that were updated since the offset.\n return this.zi.getDocumentsMatchingQuery(e, n, r).next(e => (\n // Merge with existing results\n t.forEach(t => {\n e = e.insert(t.key, t);\n }), e));\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Implements `LocalStore` interface.\n *\n * Note: some field defined in this class might have public access level, but\n * the class is not exported so they are only accessible from this module.\n * This is useful to implement optional features (like bundles) in free\n * functions, such that they are tree-shakeable.\n */\nclass __PRIVATE_LocalStoreImpl {\n constructor(/** Manages our in-memory or durable persistence. */\n e, t, n, r) {\n this.persistence = e, this.ts = t, this.serializer = r,\n /**\n * Maps a targetID to data about its target.\n *\n * PORTING NOTE: We are using an immutable data structure on Web to make re-runs\n * of `applyRemoteEvent()` idempotent.\n */\n this.ns = new SortedMap(__PRIVATE_primitiveComparator), /** Maps a target to its targetID. */\n // TODO(wuandy): Evaluate if TargetId can be part of Target.\n this.rs = new ObjectMap(e => __PRIVATE_canonifyTarget(e), __PRIVATE_targetEquals),\n /**\n * A per collection group index of the last read time processed by\n * `getNewDocumentChanges()`.\n *\n * PORTING NOTE: This is only used for multi-tab synchronization.\n */\n this.ss = new Map(), this.os = e.getRemoteDocumentCache(), this.Qr = e.getTargetCache(), this.$r = e.getBundleCache(), this._s(n);\n }\n _s(e) {\n // TODO(indexing): Add spec tests that test these components change after a\n // user change\n this.documentOverlayCache = this.persistence.getDocumentOverlayCache(e), this.indexManager = this.persistence.getIndexManager(e), this.mutationQueue = this.persistence.getMutationQueue(e, this.indexManager), this.localDocuments = new LocalDocumentsView(this.os, this.mutationQueue, this.documentOverlayCache, this.indexManager), this.os.setIndexManager(this.indexManager), this.ts.initialize(this.localDocuments, this.indexManager);\n }\n collectGarbage(e) {\n return this.persistence.runTransaction(\"Collect garbage\", \"readwrite-primary\", t => e.collect(t, this.ns));\n }\n}\nfunction __PRIVATE_newLocalStore(/** Manages our in-memory or durable persistence. */\ne, t, n, r) {\n return new __PRIVATE_LocalStoreImpl(e, t, n, r);\n}\n\n/**\n * Tells the LocalStore that the currently authenticated user has changed.\n *\n * In response the local store switches the mutation queue to the new user and\n * returns any resulting document changes.\n */\n// PORTING NOTE: Android and iOS only return the documents affected by the\n// change.\nasync function __PRIVATE_localStoreHandleUserChange(e, t) {\n const n = __PRIVATE_debugCast(e);\n return await n.persistence.runTransaction(\"Handle user change\", \"readonly\", e => {\n // Swap out the mutation queue, grabbing the pending mutation batches\n // before and after.\n let r;\n return n.mutationQueue.getAllMutationBatches(e).next(i => (r = i, n._s(t), n.mutationQueue.getAllMutationBatches(e))).next(t => {\n const i = [],\n s = [];\n // Union the old/new changed keys.\n let o = __PRIVATE_documentKeySet();\n for (const e of r) {\n i.push(e.batchId);\n for (const t of e.mutations) o = o.add(t.key);\n }\n for (const e of t) {\n s.push(e.batchId);\n for (const t of e.mutations) o = o.add(t.key);\n }\n // Return the set of all (potentially) changed documents and the list\n // of mutation batch IDs that were affected by change.\n return n.localDocuments.getDocuments(e, o).next(e => ({\n us: e,\n removedBatchIds: i,\n addedBatchIds: s\n }));\n });\n });\n}\n\n/* Accepts locally generated Mutations and commit them to storage. */\n/**\n * Acknowledges the given batch.\n *\n * On the happy path when a batch is acknowledged, the local store will\n *\n * + remove the batch from the mutation queue;\n * + apply the changes to the remote document cache;\n * + recalculate the latency compensated view implied by those changes (there\n * may be mutations in the queue that affect the documents but haven't been\n * acknowledged yet); and\n * + give the changed documents back the sync engine\n *\n * @returns The resulting (modified) documents.\n */\nfunction __PRIVATE_localStoreAcknowledgeBatch(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"Acknowledge batch\", \"readwrite-primary\", e => {\n const r = t.batch.keys(),\n i = n.os.newChangeBuffer({\n trackRemovals: !0\n });\n return function __PRIVATE_applyWriteToRemoteDocuments(e, t, n, r) {\n const i = n.batch,\n s = i.keys();\n let o = PersistencePromise.resolve();\n return s.forEach(e => {\n o = o.next(() => r.getEntry(t, e)).next(t => {\n const s = n.docVersions.get(e);\n __PRIVATE_hardAssert(null !== s), t.version.compareTo(s) < 0 && (i.applyToRemoteDocument(t, n), t.isValidDocument() && (\n // We use the commitVersion as the readTime rather than the\n // document's updateTime since the updateTime is not advanced\n // for updates that do not modify the underlying document.\n t.setReadTime(n.commitVersion), r.addEntry(t)));\n });\n }), o.next(() => e.mutationQueue.removeMutationBatch(t, i));\n }\n /** Returns the local view of the documents affected by a mutation batch. */\n // PORTING NOTE: Multi-Tab only.\n (n, e, t, i).next(() => i.apply(e)).next(() => n.mutationQueue.performConsistencyCheck(e)).next(() => n.documentOverlayCache.removeOverlaysForBatchId(e, r, t.batch.batchId)).next(() => n.localDocuments.recalculateAndSaveOverlaysForDocumentKeys(e, function __PRIVATE_getKeysWithTransformResults(e) {\n let t = __PRIVATE_documentKeySet();\n for (let n = 0; n < e.mutationResults.length; ++n) {\n e.mutationResults[n].transformResults.length > 0 && (t = t.add(e.batch.mutations[n].key));\n }\n return t;\n }\n /**\n * Removes mutations from the MutationQueue for the specified batch;\n * LocalDocuments will be recalculated.\n *\n * @returns The resulting modified documents.\n */(t))).next(() => n.localDocuments.getDocuments(e, r));\n });\n}\n\n/**\n * Returns the last consistent snapshot processed (used by the RemoteStore to\n * determine whether to buffer incoming snapshots from the backend).\n */\nfunction __PRIVATE_localStoreGetLastRemoteSnapshotVersion(e) {\n const t = __PRIVATE_debugCast(e);\n return t.persistence.runTransaction(\"Get last remote snapshot version\", \"readonly\", e => t.Qr.getLastRemoteSnapshotVersion(e));\n}\n\n/**\n * Updates the \"ground-state\" (remote) documents. We assume that the remote\n * event reflects any write batches that have been acknowledged or rejected\n * (i.e. we do not re-apply local mutations to updates from this event).\n *\n * LocalDocuments are re-calculated if there are remaining mutations in the\n * queue.\n */\nfunction __PRIVATE_localStoreApplyRemoteEventToLocalCache(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = t.snapshotVersion;\n let i = n.ns;\n return n.persistence.runTransaction(\"Apply remote event\", \"readwrite-primary\", e => {\n const s = n.os.newChangeBuffer({\n trackRemovals: !0\n });\n // Reset newTargetDataByTargetMap in case this transaction gets re-run.\n i = n.ns;\n const o = [];\n t.targetChanges.forEach((s, _) => {\n const a = i.get(_);\n if (!a) return;\n // Only update the remote keys if the target is still active. This\n // ensures that we can persist the updated target data along with\n // the updated assignment.\n o.push(n.Qr.removeMatchingKeys(e, s.removedDocuments, _).next(() => n.Qr.addMatchingKeys(e, s.addedDocuments, _)));\n let u = a.withSequenceNumber(e.currentSequenceNumber);\n null !== t.targetMismatches.get(_) ? u = u.withResumeToken(ByteString.EMPTY_BYTE_STRING, SnapshotVersion.min()).withLastLimboFreeSnapshotVersion(SnapshotVersion.min()) : s.resumeToken.approximateByteSize() > 0 && (u = u.withResumeToken(s.resumeToken, r)), i = i.insert(_, u),\n // Update the target data if there are target changes (or if\n // sufficient time has passed since the last update).\n /**\n * Returns true if the newTargetData should be persisted during an update of\n * an active target. TargetData should always be persisted when a target is\n * being released and should not call this function.\n *\n * While the target is active, TargetData updates can be omitted when nothing\n * about the target has changed except metadata like the resume token or\n * snapshot version. Occasionally it's worth the extra write to prevent these\n * values from getting too stale after a crash, but this doesn't have to be\n * too frequent.\n */\n function __PRIVATE_shouldPersistTargetData(e, t, n) {\n // Always persist target data if we don't already have a resume token.\n if (0 === e.resumeToken.approximateByteSize()) return !0;\n // Don't allow resume token changes to be buffered indefinitely. This\n // allows us to be reasonably up-to-date after a crash and avoids needing\n // to loop over all active queries on shutdown. Especially in the browser\n // we may not get time to do anything interesting while the current tab is\n // closing.\n if (t.snapshotVersion.toMicroseconds() - e.snapshotVersion.toMicroseconds() >= 3e8) return !0;\n // Otherwise if the only thing that has changed about a target is its resume\n // token it's not worth persisting. Note that the RemoteStore keeps an\n // in-memory view of the currently active targets which includes the current\n // resume token, so stream failure or user changes will still use an\n // up-to-date resume token regardless of what we do here.\n return n.addedDocuments.size + n.modifiedDocuments.size + n.removedDocuments.size > 0;\n }\n /**\n * Notifies local store of the changed views to locally pin documents.\n */(a, u, s) && o.push(n.Qr.updateTargetData(e, u));\n });\n let _ = __PRIVATE_mutableDocumentMap(),\n a = __PRIVATE_documentKeySet();\n // HACK: The only reason we allow a null snapshot version is so that we\n // can synthesize remote events when we get permission denied errors while\n // trying to resolve the state of a locally cached document that is in\n // limbo.\n if (t.documentUpdates.forEach(r => {\n t.resolvedLimboDocuments.has(r) && o.push(n.persistence.referenceDelegate.updateLimboDocument(e, r));\n }),\n // Each loop iteration only affects its \"own\" doc, so it's safe to get all\n // the remote documents in advance in a single call.\n o.push(__PRIVATE_populateDocumentChangeBuffer(e, s, t.documentUpdates).next(e => {\n _ = e.cs, a = e.ls;\n })), !r.isEqual(SnapshotVersion.min())) {\n const t = n.Qr.getLastRemoteSnapshotVersion(e).next(t => n.Qr.setTargetsMetadata(e, e.currentSequenceNumber, r));\n o.push(t);\n }\n return PersistencePromise.waitFor(o).next(() => s.apply(e)).next(() => n.localDocuments.getLocalViewOfDocuments(e, _, a)).next(() => _);\n }).then(e => (n.ns = i, e));\n}\n\n/**\n * Populates document change buffer with documents from backend or a bundle.\n * Returns the document changes resulting from applying those documents, and\n * also a set of documents whose existence state are changed as a result.\n *\n * @param txn - Transaction to use to read existing documents from storage.\n * @param documentBuffer - Document buffer to collect the resulted changes to be\n * applied to storage.\n * @param documents - Documents to be applied.\n */\nfunction __PRIVATE_populateDocumentChangeBuffer(e, t, n) {\n let r = __PRIVATE_documentKeySet(),\n i = __PRIVATE_documentKeySet();\n return n.forEach(e => r = r.add(e)), t.getEntries(e, r).next(e => {\n let r = __PRIVATE_mutableDocumentMap();\n return n.forEach((n, s) => {\n const o = e.get(n);\n // Check if see if there is a existence state change for this document.\n s.isFoundDocument() !== o.isFoundDocument() && (i = i.add(n)),\n // Note: The order of the steps below is important, since we want\n // to ensure that rejected limbo resolutions (which fabricate\n // NoDocuments with SnapshotVersion.min()) never add documents to\n // cache.\n s.isNoDocument() && s.version.isEqual(SnapshotVersion.min()) ? (\n // NoDocuments with SnapshotVersion.min() are used in manufactured\n // events. We remove these documents from cache since we lost\n // access.\n t.removeEntry(n, s.readTime), r = r.insert(n, s)) : !o.isValidDocument() || s.version.compareTo(o.version) > 0 || 0 === s.version.compareTo(o.version) && o.hasPendingWrites ? (t.addEntry(s), r = r.insert(n, s)) : __PRIVATE_logDebug(\"LocalStore\", \"Ignoring outdated watch update for \", n, \". Current version:\", o.version, \" Watch version:\", s.version);\n }), {\n cs: r,\n ls: i\n };\n });\n}\n\n/**\n * Gets the mutation batch after the passed in batchId in the mutation queue\n * or null if empty.\n * @param afterBatchId - If provided, the batch to search after.\n * @returns The next mutation or null if there wasn't one.\n */\nfunction __PRIVATE_localStoreGetNextMutationBatch(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"Get next mutation batch\", \"readonly\", e => (void 0 === t && (t = -1), n.mutationQueue.getNextMutationBatchAfterBatchId(e, t)));\n}\n\n/**\n * Reads the current value of a Document with a given key or null if not\n * found - used for testing.\n */\n/**\n * Assigns the given target an internal ID so that its results can be pinned so\n * they don't get GC'd. A target must be allocated in the local store before\n * the store can be used to manage its view.\n *\n * Allocating an already allocated `Target` will return the existing `TargetData`\n * for that `Target`.\n */\nfunction __PRIVATE_localStoreAllocateTarget(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"Allocate target\", \"readwrite\", e => {\n let r;\n return n.Qr.getTargetData(e, t).next(i => i ? (\n // This target has been listened to previously, so reuse the\n // previous targetID.\n // TODO(mcg): freshen last accessed date?\n r = i, PersistencePromise.resolve(r)) : n.Qr.allocateTargetId(e).next(i => (r = new TargetData(t, i, \"TargetPurposeListen\" /* TargetPurpose.Listen */, e.currentSequenceNumber), n.Qr.addTargetData(e, r).next(() => r))));\n }).then(e => {\n // If Multi-Tab is enabled, the existing target data may be newer than\n // the in-memory data\n const r = n.ns.get(e.targetId);\n return (null === r || e.snapshotVersion.compareTo(r.snapshotVersion) > 0) && (n.ns = n.ns.insert(e.targetId, e), n.rs.set(t, e.targetId)), e;\n });\n}\n\n/**\n * Returns the TargetData as seen by the LocalStore, including updates that may\n * have not yet been persisted to the TargetCache.\n */\n// Visible for testing.\n/**\n * Unpins all the documents associated with the given target. If\n * `keepPersistedTargetData` is set to false and Eager GC enabled, the method\n * directly removes the associated target data from the target cache.\n *\n * Releasing a non-existing `Target` is a no-op.\n */\n// PORTING NOTE: `keepPersistedTargetData` is multi-tab only.\nasync function __PRIVATE_localStoreReleaseTarget(e, t, n) {\n const r = __PRIVATE_debugCast(e),\n i = r.ns.get(t),\n s = n ? \"readwrite\" : \"readwrite-primary\";\n try {\n n || (await r.persistence.runTransaction(\"Release target\", s, e => r.persistence.referenceDelegate.removeTarget(e, i)));\n } catch (e) {\n if (!__PRIVATE_isIndexedDbTransactionError(e)) throw e;\n // All `releaseTarget` does is record the final metadata state for the\n // target, but we've been recording this periodically during target\n // activity. If we lose this write this could cause a very slight\n // difference in the order of target deletion during GC, but we\n // don't define exact LRU semantics so this is acceptable.\n __PRIVATE_logDebug(\"LocalStore\", `Failed to update sequence numbers for target ${t}: ${e}`);\n }\n r.ns = r.ns.remove(t), r.rs.delete(i.target);\n}\n\n/**\n * Runs the specified query against the local store and returns the results,\n * potentially taking advantage of query data from previous executions (such\n * as the set of remote keys).\n *\n * @param usePreviousResults - Whether results from previous executions can\n * be used to optimize this query execution.\n */\nfunction __PRIVATE_localStoreExecuteQuery(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n let i = SnapshotVersion.min(),\n s = __PRIVATE_documentKeySet();\n return r.persistence.runTransaction(\"Execute query\", \"readwrite\",\n // Use readwrite instead of readonly so indexes can be created\n // Use readwrite instead of readonly so indexes can be created\n e => function __PRIVATE_localStoreGetTargetData(e, t, n) {\n const r = __PRIVATE_debugCast(e),\n i = r.rs.get(n);\n return void 0 !== i ? PersistencePromise.resolve(r.ns.get(i)) : r.Qr.getTargetData(t, n);\n }(r, e, __PRIVATE_queryToTarget(t)).next(t => {\n if (t) return i = t.lastLimboFreeSnapshotVersion, r.Qr.getMatchingKeysForTargetId(e, t.targetId).next(e => {\n s = e;\n });\n }).next(() => r.ts.getDocumentsMatchingQuery(e, t, n ? i : SnapshotVersion.min(), n ? s : __PRIVATE_documentKeySet())).next(e => (__PRIVATE_setMaxReadTime(r, __PRIVATE_queryCollectionGroup(t), e), {\n documents: e,\n hs: s\n })));\n}\n\n// PORTING NOTE: Multi-Tab only.\nfunction __PRIVATE_localStoreGetCachedTarget(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = __PRIVATE_debugCast(n.Qr),\n i = n.ns.get(t);\n return i ? Promise.resolve(i.target) : n.persistence.runTransaction(\"Get target data\", \"readonly\", e => r.ot(e, t).next(e => e ? e.target : null));\n}\n\n/**\n * Returns the set of documents that have been updated since the last call.\n * If this is the first call, returns the set of changes since client\n * initialization. Further invocations will return document that have changed\n * since the prior call.\n */\n// PORTING NOTE: Multi-Tab only.\nfunction __PRIVATE_localStoreGetNewDocumentChanges(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = n.ss.get(t) || SnapshotVersion.min();\n // Get the current maximum read time for the collection. This should always\n // exist, but to reduce the chance for regressions we default to\n // SnapshotVersion.Min()\n // TODO(indexing): Consider removing the default value.\n return n.persistence.runTransaction(\"Get new document changes\", \"readonly\", e => n.os.getAllFromCollectionGroup(e, t, __PRIVATE_newIndexOffsetSuccessorFromReadTime(r, -1), /* limit= */Number.MAX_SAFE_INTEGER)).then(e => (__PRIVATE_setMaxReadTime(n, t, e), e));\n}\n\n/** Sets the collection group's maximum read time from the given documents. */\n// PORTING NOTE: Multi-Tab only.\nfunction __PRIVATE_setMaxReadTime(e, t, n) {\n let r = e.ss.get(t) || SnapshotVersion.min();\n n.forEach((e, t) => {\n t.readTime.compareTo(r) > 0 && (r = t.readTime);\n }), e.ss.set(t, r);\n}\n\n/**\n * Creates a new target using the given bundle name, which will be used to\n * hold the keys of all documents from the bundle in query-document mappings.\n * This ensures that the loaded documents do not get garbage collected\n * right away.\n */\n/**\n * Applies the documents from a bundle to the \"ground-state\" (remote)\n * documents.\n *\n * LocalDocuments are re-calculated if there are remaining mutations in the\n * queue.\n */\nasync function __PRIVATE_localStoreApplyBundledDocuments(e, t, n, r) {\n const i = __PRIVATE_debugCast(e);\n let s = __PRIVATE_documentKeySet(),\n o = __PRIVATE_mutableDocumentMap();\n for (const e of n) {\n const n = t.Ps(e.metadata.name);\n e.document && (s = s.add(n));\n const r = t.Is(e);\n r.setReadTime(t.Ts(e.metadata.readTime)), o = o.insert(n, r);\n }\n const _ = i.os.newChangeBuffer({\n trackRemovals: !0\n }),\n a = await __PRIVATE_localStoreAllocateTarget(i, function __PRIVATE_umbrellaTarget(e) {\n // It is OK that the path used for the query is not valid, because this will\n // not be read and queried.\n return __PRIVATE_queryToTarget(__PRIVATE_newQueryForPath(ResourcePath.fromString(`__bundle__/docs/${e}`)));\n }(r));\n // Allocates a target to hold all document keys from the bundle, such that\n // they will not get garbage collected right away.\n return i.persistence.runTransaction(\"Apply bundle documents\", \"readwrite\", e => __PRIVATE_populateDocumentChangeBuffer(e, _, o).next(t => (_.apply(e), t)).next(t => i.Qr.removeMatchingKeysForTargetId(e, a.targetId).next(() => i.Qr.addMatchingKeys(e, s, a.targetId)).next(() => i.localDocuments.getLocalViewOfDocuments(e, t.cs, t.ls)).next(() => t.cs)));\n}\n\n/**\n * Returns a promise of a boolean to indicate if the given bundle has already\n * been loaded and the create time is newer than the current loading bundle.\n */\n/**\n * Saves the given `NamedQuery` to local persistence.\n */\nasync function __PRIVATE_localStoreSaveNamedQuery(e, t, n = __PRIVATE_documentKeySet()) {\n // Allocate a target for the named query such that it can be resumed\n // from associated read time if users use it to listen.\n // NOTE: this also means if no corresponding target exists, the new target\n // will remain active and will not get collected, unless users happen to\n // unlisten the query somehow.\n const r = await __PRIVATE_localStoreAllocateTarget(e, __PRIVATE_queryToTarget(__PRIVATE_fromBundledQuery(t.bundledQuery))),\n i = __PRIVATE_debugCast(e);\n return i.persistence.runTransaction(\"Save named query\", \"readwrite\", e => {\n const s = __PRIVATE_fromVersion(t.readTime);\n // Simply save the query itself if it is older than what the SDK already\n // has.\n if (r.snapshotVersion.compareTo(s) >= 0) return i.$r.saveNamedQuery(e, t);\n // Update existing target data because the query from the bundle is newer.\n const o = r.withResumeToken(ByteString.EMPTY_BYTE_STRING, s);\n return i.ns = i.ns.insert(o.targetId, o), i.Qr.updateTargetData(e, o).next(() => i.Qr.removeMatchingKeysForTargetId(e, r.targetId)).next(() => i.Qr.addMatchingKeys(e, n, r.targetId)).next(() => i.$r.saveNamedQuery(e, t));\n });\n}\n\n/** Assembles the key for a client state in WebStorage */\nfunction createWebStorageClientStateKey(e, t) {\n return `firestore_clients_${e}_${t}`;\n}\n\n// The format of the WebStorage key that stores the mutation state is:\n// firestore_mutations__\n// (for unauthenticated users)\n// or: firestore_mutations___\n\n// 'user_uid' is last to avoid needing to escape '_' characters that it might\n// contain.\n/** Assembles the key for a mutation batch in WebStorage */\nfunction createWebStorageMutationBatchKey(e, t, n) {\n let r = `firestore_mutations_${e}_${n}`;\n return t.isAuthenticated() && (r += `_${t.uid}`), r;\n}\n\n// The format of the WebStorage key that stores a query target's metadata is:\n// firestore_targets__\n/** Assembles the key for a query state in WebStorage */\nfunction createWebStorageQueryTargetMetadataKey(e, t) {\n return `firestore_targets_${e}_${t}`;\n}\n\n// The WebStorage prefix that stores the primary tab's online state. The\n// format of the key is:\n// firestore_online_state_\n/**\n * Holds the state of a mutation batch, including its user ID, batch ID and\n * whether the batch is 'pending', 'acknowledged' or 'rejected'.\n */\n// Visible for testing\nclass __PRIVATE_MutationMetadata {\n constructor(e, t, n, r) {\n this.user = e, this.batchId = t, this.state = n, this.error = r;\n }\n /**\n * Parses a MutationMetadata from its JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static Es(e, t, n) {\n const r = JSON.parse(n);\n let i,\n s = \"object\" == typeof r && -1 !== [\"pending\", \"acknowledged\", \"rejected\"].indexOf(r.state) && (void 0 === r.error || \"object\" == typeof r.error);\n return s && r.error && (s = \"string\" == typeof r.error.message && \"string\" == typeof r.error.code, s && (i = new FirestoreError(r.error.code, r.error.message))), s ? new __PRIVATE_MutationMetadata(e, t, r.state, i) : (__PRIVATE_logError(\"SharedClientState\", `Failed to parse mutation state for ID '${t}': ${n}`), null);\n }\n ds() {\n const e = {\n state: this.state,\n updateTimeMs: Date.now()\n };\n return this.error && (e.error = {\n code: this.error.code,\n message: this.error.message\n }), JSON.stringify(e);\n }\n}\n\n/**\n * Holds the state of a query target, including its target ID and whether the\n * target is 'not-current', 'current' or 'rejected'.\n */\n// Visible for testing\nclass __PRIVATE_QueryTargetMetadata {\n constructor(e, t, n) {\n this.targetId = e, this.state = t, this.error = n;\n }\n /**\n * Parses a QueryTargetMetadata from its JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static Es(e, t) {\n const n = JSON.parse(t);\n let r,\n i = \"object\" == typeof n && -1 !== [\"not-current\", \"current\", \"rejected\"].indexOf(n.state) && (void 0 === n.error || \"object\" == typeof n.error);\n return i && n.error && (i = \"string\" == typeof n.error.message && \"string\" == typeof n.error.code, i && (r = new FirestoreError(n.error.code, n.error.message))), i ? new __PRIVATE_QueryTargetMetadata(e, n.state, r) : (__PRIVATE_logError(\"SharedClientState\", `Failed to parse target state for ID '${e}': ${t}`), null);\n }\n ds() {\n const e = {\n state: this.state,\n updateTimeMs: Date.now()\n };\n return this.error && (e.error = {\n code: this.error.code,\n message: this.error.message\n }), JSON.stringify(e);\n }\n}\n\n/**\n * This class represents the immutable ClientState for a client read from\n * WebStorage, containing the list of active query targets.\n */\nclass __PRIVATE_RemoteClientState {\n constructor(e, t) {\n this.clientId = e, this.activeTargetIds = t;\n }\n /**\n * Parses a RemoteClientState from the JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static Es(e, t) {\n const n = JSON.parse(t);\n let r = \"object\" == typeof n && n.activeTargetIds instanceof Array,\n i = __PRIVATE_targetIdSet();\n for (let e = 0; r && e < n.activeTargetIds.length; ++e) r = isSafeInteger(n.activeTargetIds[e]), i = i.add(n.activeTargetIds[e]);\n return r ? new __PRIVATE_RemoteClientState(e, i) : (__PRIVATE_logError(\"SharedClientState\", `Failed to parse client data for instance '${e}': ${t}`), null);\n }\n}\n\n/**\n * This class represents the online state for all clients participating in\n * multi-tab. The online state is only written to by the primary client, and\n * used in secondary clients to update their query views.\n */\nclass __PRIVATE_SharedOnlineState {\n constructor(e, t) {\n this.clientId = e, this.onlineState = t;\n }\n /**\n * Parses a SharedOnlineState from its JSON representation in WebStorage.\n * Logs a warning and returns null if the format of the data is not valid.\n */\n static Es(e) {\n const t = JSON.parse(e);\n return \"object\" == typeof t && -1 !== [\"Unknown\", \"Online\", \"Offline\"].indexOf(t.onlineState) && \"string\" == typeof t.clientId ? new __PRIVATE_SharedOnlineState(t.clientId, t.onlineState) : (__PRIVATE_logError(\"SharedClientState\", `Failed to parse online state: ${e}`), null);\n }\n}\n\n/**\n * Metadata state of the local client. Unlike `RemoteClientState`, this class is\n * mutable and keeps track of all pending mutations, which allows us to\n * update the range of pending mutation batch IDs as new mutations are added or\n * removed.\n *\n * The data in `LocalClientState` is not read from WebStorage and instead\n * updated via its instance methods. The updated state can be serialized via\n * `toWebStorageJSON()`.\n */\n// Visible for testing.\nclass __PRIVATE_LocalClientState {\n constructor() {\n this.activeTargetIds = __PRIVATE_targetIdSet();\n }\n As(e) {\n this.activeTargetIds = this.activeTargetIds.add(e);\n }\n Rs(e) {\n this.activeTargetIds = this.activeTargetIds.delete(e);\n }\n /**\n * Converts this entry into a JSON-encoded format we can use for WebStorage.\n * Does not encode `clientId` as it is part of the key in WebStorage.\n */\n ds() {\n const e = {\n activeTargetIds: this.activeTargetIds.toArray(),\n updateTimeMs: Date.now()\n };\n return JSON.stringify(e);\n }\n}\n\n/**\n * `WebStorageSharedClientState` uses WebStorage (window.localStorage) as the\n * backing store for the SharedClientState. It keeps track of all active\n * clients and supports modifications of the local client's data.\n */\nclass __PRIVATE_WebStorageSharedClientState {\n constructor(e, t, n, r, i) {\n this.window = e, this.oi = t, this.persistenceKey = n, this.Vs = r, this.syncEngine = null, this.onlineStateHandler = null, this.sequenceNumberHandler = null, this.fs = this.gs.bind(this), this.ps = new SortedMap(__PRIVATE_primitiveComparator), this.started = !1,\n /**\n * Captures WebStorage events that occur before `start()` is called. These\n * events are replayed once `WebStorageSharedClientState` is started.\n */\n this.ys = [];\n // Escape the special characters mentioned here:\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions\n const s = n.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n this.storage = this.window.localStorage, this.currentUser = i, this.ws = createWebStorageClientStateKey(this.persistenceKey, this.Vs), this.Ss = /** Assembles the key for the current sequence number. */\n function createWebStorageSequenceNumberKey(e) {\n return `firestore_sequence_number_${e}`;\n }\n /**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */(this.persistenceKey), this.ps = this.ps.insert(this.Vs, new __PRIVATE_LocalClientState()), this.bs = new RegExp(`^firestore_clients_${s}_([^_]*)$`), this.Ds = new RegExp(`^firestore_mutations_${s}_(\\\\d+)(?:_(.*))?$`), this.Cs = new RegExp(`^firestore_targets_${s}_(\\\\d+)$`), this.vs = /** Assembles the key for the online state of the primary tab. */\n function createWebStorageOnlineStateKey(e) {\n return `firestore_online_state_${e}`;\n }\n // The WebStorage prefix that plays as a event to indicate the remote documents\n // might have changed due to some secondary tabs loading a bundle.\n // format of the key is:\n // firestore_bundle_loaded_v2_\n // The version ending with \"v2\" stores the list of modified collection groups.\n (this.persistenceKey), this.Fs = function createBundleLoadedKey(e) {\n return `firestore_bundle_loaded_v2_${e}`;\n }\n // The WebStorage key prefix for the key that stores the last sequence number allocated. The key\n // looks like 'firestore_sequence_number_'.\n (this.persistenceKey),\n // Rather than adding the storage observer during start(), we add the\n // storage observer during initialization. This ensures that we collect\n // events before other components populate their initial state (during their\n // respective start() calls). Otherwise, we might for example miss a\n // mutation that is added after LocalStore's start() processed the existing\n // mutations but before we observe WebStorage events.\n this.window.addEventListener(\"storage\", this.fs);\n }\n /** Returns 'true' if WebStorage is available in the current environment. */\n static D(e) {\n return !(!e || !e.localStorage);\n }\n async start() {\n // Retrieve the list of existing clients to backfill the data in\n // SharedClientState.\n const e = await this.syncEngine.Bi();\n for (const t of e) {\n if (t === this.Vs) continue;\n const e = this.getItem(createWebStorageClientStateKey(this.persistenceKey, t));\n if (e) {\n const n = __PRIVATE_RemoteClientState.Es(t, e);\n n && (this.ps = this.ps.insert(n.clientId, n));\n }\n }\n this.Ms();\n // Check if there is an existing online state and call the callback handler\n // if applicable.\n const t = this.storage.getItem(this.vs);\n if (t) {\n const e = this.xs(t);\n e && this.Os(e);\n }\n for (const e of this.ys) this.gs(e);\n this.ys = [],\n // Register a window unload hook to remove the client metadata entry from\n // WebStorage even if `shutdown()` was not called.\n this.window.addEventListener(\"pagehide\", () => this.shutdown()), this.started = !0;\n }\n writeSequenceNumber(e) {\n this.setItem(this.Ss, JSON.stringify(e));\n }\n getAllActiveQueryTargets() {\n return this.Ns(this.ps);\n }\n isActiveQueryTarget(e) {\n let t = !1;\n return this.ps.forEach((n, r) => {\n r.activeTargetIds.has(e) && (t = !0);\n }), t;\n }\n addPendingMutation(e) {\n this.Ls(e, \"pending\");\n }\n updateMutationState(e, t, n) {\n this.Ls(e, t, n),\n // Once a final mutation result is observed by other clients, they no longer\n // access the mutation's metadata entry. Since WebStorage replays events\n // in order, it is safe to delete the entry right after updating it.\n this.Bs(e);\n }\n addLocalQueryTarget(e) {\n let t = \"not-current\";\n // Lookup an existing query state if the target ID was already registered\n // by another tab\n if (this.isActiveQueryTarget(e)) {\n const n = this.storage.getItem(createWebStorageQueryTargetMetadataKey(this.persistenceKey, e));\n if (n) {\n const r = __PRIVATE_QueryTargetMetadata.Es(e, n);\n r && (t = r.state);\n }\n }\n return this.ks.As(e), this.Ms(), t;\n }\n removeLocalQueryTarget(e) {\n this.ks.Rs(e), this.Ms();\n }\n isLocalQueryTarget(e) {\n return this.ks.activeTargetIds.has(e);\n }\n clearQueryState(e) {\n this.removeItem(createWebStorageQueryTargetMetadataKey(this.persistenceKey, e));\n }\n updateQueryState(e, t, n) {\n this.qs(e, t, n);\n }\n handleUserChange(e, t, n) {\n t.forEach(e => {\n this.Bs(e);\n }), this.currentUser = e, n.forEach(e => {\n this.addPendingMutation(e);\n });\n }\n setOnlineState(e) {\n this.Qs(e);\n }\n notifyBundleLoaded(e) {\n this.Ks(e);\n }\n shutdown() {\n this.started && (this.window.removeEventListener(\"storage\", this.fs), this.removeItem(this.ws), this.started = !1);\n }\n getItem(e) {\n const t = this.storage.getItem(e);\n return __PRIVATE_logDebug(\"SharedClientState\", \"READ\", e, t), t;\n }\n setItem(e, t) {\n __PRIVATE_logDebug(\"SharedClientState\", \"SET\", e, t), this.storage.setItem(e, t);\n }\n removeItem(e) {\n __PRIVATE_logDebug(\"SharedClientState\", \"REMOVE\", e), this.storage.removeItem(e);\n }\n gs(e) {\n // Note: The function is typed to take Event to be interface-compatible with\n // `Window.addEventListener`.\n const t = e;\n if (t.storageArea === this.storage) {\n if (__PRIVATE_logDebug(\"SharedClientState\", \"EVENT\", t.key, t.newValue), t.key === this.ws) return void __PRIVATE_logError(\"Received WebStorage notification for local change. Another client might have garbage-collected our state\");\n this.oi.enqueueRetryable(async () => {\n if (this.started) {\n if (null !== t.key) if (this.bs.test(t.key)) {\n if (null == t.newValue) {\n const e = this.$s(t.key);\n return this.Us(e, null);\n }\n {\n const e = this.Ws(t.key, t.newValue);\n if (e) return this.Us(e.clientId, e);\n }\n } else if (this.Ds.test(t.key)) {\n if (null !== t.newValue) {\n const e = this.Gs(t.key, t.newValue);\n if (e) return this.zs(e);\n }\n } else if (this.Cs.test(t.key)) {\n if (null !== t.newValue) {\n const e = this.js(t.key, t.newValue);\n if (e) return this.Hs(e);\n }\n } else if (t.key === this.vs) {\n if (null !== t.newValue) {\n const e = this.xs(t.newValue);\n if (e) return this.Os(e);\n }\n } else if (t.key === this.Ss) {\n const e = function __PRIVATE_fromWebStorageSequenceNumber(e) {\n let t = __PRIVATE_ListenSequence.oe;\n if (null != e) try {\n const n = JSON.parse(e);\n __PRIVATE_hardAssert(\"number\" == typeof n), t = n;\n } catch (e) {\n __PRIVATE_logError(\"SharedClientState\", \"Failed to read sequence number from WebStorage\", e);\n }\n return t;\n }\n /**\n * `MemorySharedClientState` is a simple implementation of SharedClientState for\n * clients using memory persistence. The state in this class remains fully\n * isolated and no synchronization is performed.\n */(t.newValue);\n e !== __PRIVATE_ListenSequence.oe && this.sequenceNumberHandler(e);\n } else if (t.key === this.Fs) {\n const e = this.Js(t.newValue);\n await Promise.all(e.map(e => this.syncEngine.Ys(e)));\n }\n } else this.ys.push(t);\n });\n }\n }\n get ks() {\n return this.ps.get(this.Vs);\n }\n Ms() {\n this.setItem(this.ws, this.ks.ds());\n }\n Ls(e, t, n) {\n const r = new __PRIVATE_MutationMetadata(this.currentUser, e, t, n),\n i = createWebStorageMutationBatchKey(this.persistenceKey, this.currentUser, e);\n this.setItem(i, r.ds());\n }\n Bs(e) {\n const t = createWebStorageMutationBatchKey(this.persistenceKey, this.currentUser, e);\n this.removeItem(t);\n }\n Qs(e) {\n const t = {\n clientId: this.Vs,\n onlineState: e\n };\n this.storage.setItem(this.vs, JSON.stringify(t));\n }\n qs(e, t, n) {\n const r = createWebStorageQueryTargetMetadataKey(this.persistenceKey, e),\n i = new __PRIVATE_QueryTargetMetadata(e, t, n);\n this.setItem(r, i.ds());\n }\n Ks(e) {\n const t = JSON.stringify(Array.from(e));\n this.setItem(this.Fs, t);\n }\n /**\n * Parses a client state key in WebStorage. Returns null if the key does not\n * match the expected key format.\n */\n $s(e) {\n const t = this.bs.exec(e);\n return t ? t[1] : null;\n }\n /**\n * Parses a client state in WebStorage. Returns 'null' if the value could not\n * be parsed.\n */\n Ws(e, t) {\n const n = this.$s(e);\n return __PRIVATE_RemoteClientState.Es(n, t);\n }\n /**\n * Parses a mutation batch state in WebStorage. Returns 'null' if the value\n * could not be parsed.\n */\n Gs(e, t) {\n const n = this.Ds.exec(e),\n r = Number(n[1]),\n i = void 0 !== n[2] ? n[2] : null;\n return __PRIVATE_MutationMetadata.Es(new User(i), r, t);\n }\n /**\n * Parses a query target state from WebStorage. Returns 'null' if the value\n * could not be parsed.\n */\n js(e, t) {\n const n = this.Cs.exec(e),\n r = Number(n[1]);\n return __PRIVATE_QueryTargetMetadata.Es(r, t);\n }\n /**\n * Parses an online state from WebStorage. Returns 'null' if the value\n * could not be parsed.\n */\n xs(e) {\n return __PRIVATE_SharedOnlineState.Es(e);\n }\n Js(e) {\n return JSON.parse(e);\n }\n async zs(e) {\n if (e.user.uid === this.currentUser.uid) return this.syncEngine.Zs(e.batchId, e.state, e.error);\n __PRIVATE_logDebug(\"SharedClientState\", `Ignoring mutation for non-active user ${e.user.uid}`);\n }\n Hs(e) {\n return this.syncEngine.Xs(e.targetId, e.state, e.error);\n }\n Us(e, t) {\n const n = t ? this.ps.insert(e, t) : this.ps.remove(e),\n r = this.Ns(this.ps),\n i = this.Ns(n),\n s = [],\n o = [];\n return i.forEach(e => {\n r.has(e) || s.push(e);\n }), r.forEach(e => {\n i.has(e) || o.push(e);\n }), this.syncEngine.eo(s, o).then(() => {\n this.ps = n;\n });\n }\n Os(e) {\n // We check whether the client that wrote this online state is still active\n // by comparing its client ID to the list of clients kept active in\n // IndexedDb. If a client does not update their IndexedDb client state\n // within 5 seconds, it is considered inactive and we don't emit an online\n // state event.\n this.ps.get(e.clientId) && this.onlineStateHandler(e.onlineState);\n }\n Ns(e) {\n let t = __PRIVATE_targetIdSet();\n return e.forEach((e, n) => {\n t = t.unionWith(n.activeTargetIds);\n }), t;\n }\n}\nclass __PRIVATE_MemorySharedClientState {\n constructor() {\n this.no = new __PRIVATE_LocalClientState(), this.ro = {}, this.onlineStateHandler = null, this.sequenceNumberHandler = null;\n }\n addPendingMutation(e) {\n // No op.\n }\n updateMutationState(e, t, n) {\n // No op.\n }\n addLocalQueryTarget(e) {\n return this.no.As(e), this.ro[e] || \"not-current\";\n }\n updateQueryState(e, t, n) {\n this.ro[e] = t;\n }\n removeLocalQueryTarget(e) {\n this.no.Rs(e);\n }\n isLocalQueryTarget(e) {\n return this.no.activeTargetIds.has(e);\n }\n clearQueryState(e) {\n delete this.ro[e];\n }\n getAllActiveQueryTargets() {\n return this.no.activeTargetIds;\n }\n isActiveQueryTarget(e) {\n return this.no.activeTargetIds.has(e);\n }\n start() {\n return this.no = new __PRIVATE_LocalClientState(), Promise.resolve();\n }\n handleUserChange(e, t, n) {\n // No op.\n }\n setOnlineState(e) {\n // No op.\n }\n shutdown() {}\n writeSequenceNumber(e) {}\n notifyBundleLoaded(e) {\n // No op.\n }\n}\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_NoopConnectivityMonitor {\n io(e) {\n // No-op.\n }\n shutdown() {\n // No-op.\n }\n}\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n// References to `window` are guarded by BrowserConnectivityMonitor.isAvailable()\n/* eslint-disable no-restricted-globals */\n/**\n * Browser implementation of ConnectivityMonitor.\n */\nclass __PRIVATE_BrowserConnectivityMonitor {\n constructor() {\n this.so = () => this.oo(), this._o = () => this.ao(), this.uo = [], this.co();\n }\n io(e) {\n this.uo.push(e);\n }\n shutdown() {\n window.removeEventListener(\"online\", this.so), window.removeEventListener(\"offline\", this._o);\n }\n co() {\n window.addEventListener(\"online\", this.so), window.addEventListener(\"offline\", this._o);\n }\n oo() {\n __PRIVATE_logDebug(\"ConnectivityMonitor\", \"Network connectivity changed: AVAILABLE\");\n for (const e of this.uo) e(0 /* NetworkStatus.AVAILABLE */);\n }\n ao() {\n __PRIVATE_logDebug(\"ConnectivityMonitor\", \"Network connectivity changed: UNAVAILABLE\");\n for (const e of this.uo) e(1 /* NetworkStatus.UNAVAILABLE */);\n }\n // TODO(chenbrian): Consider passing in window either into this component or\n // here for testing via FakeWindow.\n /** Checks that all used attributes of window are available. */\n static D() {\n return \"undefined\" != typeof window && void 0 !== window.addEventListener && void 0 !== window.removeEventListener;\n }\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * The value returned from the most recent invocation of\n * `generateUniqueDebugId()`, or null if it has never been invoked.\n */\nlet Ve = null;\n\n/**\n * Generates and returns an initial value for `lastUniqueDebugId`.\n *\n * The returned value is randomly selected from a range of integers that are\n * represented as 8 hexadecimal digits. This means that (within reason) any\n * numbers generated by incrementing the returned number by 1 will also be\n * represented by 8 hexadecimal digits. This leads to all \"IDs\" having the same\n * length when converted to a hexadecimal string, making reading logs containing\n * these IDs easier to follow. And since the return value is randomly selected\n * it will help to differentiate between logs from different executions.\n */\n/**\n * Generates and returns a unique ID as a hexadecimal string.\n *\n * The returned ID is intended to be used in debug logging messages to help\n * correlate log messages that may be spatially separated in the logs, but\n * logically related. For example, a network connection could include the same\n * \"debug ID\" string in all of its log messages to help trace a specific\n * connection over time.\n *\n * @return the 10-character generated ID (e.g. \"0xa1b2c3d4\").\n */\nfunction __PRIVATE_generateUniqueDebugId() {\n return null === Ve ? Ve = function __PRIVATE_generateInitialUniqueDebugId() {\n return 268435456 + Math.round(2147483648 * Math.random());\n }() : Ve++, \"0x\" + Ve.toString(16);\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst me = {\n BatchGetDocuments: \"batchGet\",\n Commit: \"commit\",\n RunQuery: \"runQuery\",\n RunAggregationQuery: \"runAggregationQuery\"\n};\n\n/**\n * Maps RPC names to the corresponding REST endpoint name.\n *\n * We use array notation to avoid mangling.\n */\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Provides a simple helper class that implements the Stream interface to\n * bridge to other implementations that are streams but do not implement the\n * interface. The stream callbacks are invoked with the callOn... methods.\n */\nclass __PRIVATE_StreamBridge {\n constructor(e) {\n this.lo = e.lo, this.ho = e.ho;\n }\n Po(e) {\n this.Io = e;\n }\n To(e) {\n this.Eo = e;\n }\n Ao(e) {\n this.Ro = e;\n }\n onMessage(e) {\n this.Vo = e;\n }\n close() {\n this.ho();\n }\n send(e) {\n this.lo(e);\n }\n mo() {\n this.Io();\n }\n fo() {\n this.Eo();\n }\n po(e) {\n this.Ro(e);\n }\n yo(e) {\n this.Vo(e);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst fe = \"WebChannelConnection\";\nclass __PRIVATE_WebChannelConnection extends\n/**\n * Base class for all Rest-based connections to the backend (WebChannel and\n * HTTP).\n */\nclass __PRIVATE_RestConnection {\n constructor(e) {\n this.databaseInfo = e, this.databaseId = e.databaseId;\n const t = e.ssl ? \"https\" : \"http\",\n n = encodeURIComponent(this.databaseId.projectId),\n r = encodeURIComponent(this.databaseId.database);\n this.wo = t + \"://\" + e.host, this.So = `projects/${n}/databases/${r}`, this.bo = \"(default)\" === this.databaseId.database ? `project_id=${n}` : `project_id=${n}&database_id=${r}`;\n }\n get Do() {\n // Both `invokeRPC()` and `invokeStreamingRPC()` use their `path` arguments to determine\n // where to run the query, and expect the `request` to NOT specify the \"path\".\n return !1;\n }\n Co(e, t, n, r, i) {\n const s = __PRIVATE_generateUniqueDebugId(),\n o = this.vo(e, t.toUriEncodedString());\n __PRIVATE_logDebug(\"RestConnection\", `Sending RPC '${e}' ${s}:`, o, n);\n const _ = {\n \"google-cloud-resource-prefix\": this.So,\n \"x-goog-request-params\": this.bo\n };\n return this.Fo(_, r, i), this.Mo(e, o, _, n).then(t => (__PRIVATE_logDebug(\"RestConnection\", `Received RPC '${e}' ${s}: `, t), t), t => {\n throw __PRIVATE_logWarn(\"RestConnection\", `RPC '${e}' ${s} failed with error: `, t, \"url: \", o, \"request:\", n), t;\n });\n }\n xo(e, t, n, r, i, s) {\n // The REST API automatically aggregates all of the streamed results, so we\n // can just use the normal invoke() method.\n return this.Co(e, t, n, r, i);\n }\n /**\n * Modifies the headers for a request, adding any authorization token if\n * present and any additional headers for the request.\n */\n Fo(e, t, n) {\n e[\"X-Goog-Api-Client\"] =\n // SDK_VERSION is updated to different value at runtime depending on the entry point,\n // so we need to get its value when we need it in a function.\n function __PRIVATE_getGoogApiClientValue() {\n return \"gl-js/ fire/\" + b;\n }(),\n // Content-Type: text/plain will avoid preflight requests which might\n // mess with CORS and redirects by proxies. If we add custom headers\n // we will need to change this code to potentially use the $httpOverwrite\n // parameter supported by ESF to avoid triggering preflight requests.\n e[\"Content-Type\"] = \"text/plain\", this.databaseInfo.appId && (e[\"X-Firebase-GMPID\"] = this.databaseInfo.appId), t && t.headers.forEach((t, n) => e[n] = t), n && n.headers.forEach((t, n) => e[n] = t);\n }\n vo(e, t) {\n const n = me[e];\n return `${this.wo}/v1/${t}:${n}`;\n }\n /**\n * Closes and cleans up any resources associated with the connection. This\n * implementation is a no-op because there are no resources associated\n * with the RestConnection that need to be cleaned up.\n */\n terminate() {\n // No-op\n }\n} {\n constructor(e) {\n super(e), this.forceLongPolling = e.forceLongPolling, this.autoDetectLongPolling = e.autoDetectLongPolling, this.useFetchStreams = e.useFetchStreams, this.longPollingOptions = e.longPollingOptions;\n }\n Mo(e, t, n, r) {\n const i = __PRIVATE_generateUniqueDebugId();\n return new Promise((s, o) => {\n const _ = new XhrIo();\n _.setWithCredentials(!0), _.listenOnce(EventType.COMPLETE, () => {\n try {\n switch (_.getLastErrorCode()) {\n case ErrorCode.NO_ERROR:\n const t = _.getResponseJson();\n __PRIVATE_logDebug(fe, `XHR for RPC '${e}' ${i} received:`, JSON.stringify(t)), s(t);\n break;\n case ErrorCode.TIMEOUT:\n __PRIVATE_logDebug(fe, `RPC '${e}' ${i} timed out`), o(new FirestoreError(C.DEADLINE_EXCEEDED, \"Request time out\"));\n break;\n case ErrorCode.HTTP_ERROR:\n const n = _.getStatus();\n if (__PRIVATE_logDebug(fe, `RPC '${e}' ${i} failed with status:`, n, \"response text:\", _.getResponseText()), n > 0) {\n let e = _.getResponseJson();\n Array.isArray(e) && (e = e[0]);\n const t = null == e ? void 0 : e.error;\n if (t && t.status && t.message) {\n const e = function __PRIVATE_mapCodeFromHttpResponseErrorStatus(e) {\n const t = e.toLowerCase().replace(/_/g, \"-\");\n return Object.values(C).indexOf(t) >= 0 ? t : C.UNKNOWN;\n }(t.status);\n o(new FirestoreError(e, t.message));\n } else o(new FirestoreError(C.UNKNOWN, \"Server responded with status \" + _.getStatus()));\n } else\n // If we received an HTTP_ERROR but there's no status code,\n // it's most probably a connection issue\n o(new FirestoreError(C.UNAVAILABLE, \"Connection failed.\"));\n break;\n default:\n fail();\n }\n } finally {\n __PRIVATE_logDebug(fe, `RPC '${e}' ${i} completed.`);\n }\n });\n const a = JSON.stringify(r);\n __PRIVATE_logDebug(fe, `RPC '${e}' ${i} sending request:`, r), _.send(t, \"POST\", a, n, 15);\n });\n }\n Oo(e, t, n) {\n const r = __PRIVATE_generateUniqueDebugId(),\n i = [this.wo, \"/\", \"google.firestore.v1.Firestore\", \"/\", e, \"/channel\"],\n s = createWebChannelTransport(),\n o = getStatEventTarget(),\n _ = {\n // Required for backend stickiness, routing behavior is based on this\n // parameter.\n httpSessionIdParam: \"gsessionid\",\n initMessageHeaders: {},\n messageUrlParams: {\n // This param is used to improve routing and project isolation by the\n // backend and must be included in every request.\n database: `projects/${this.databaseId.projectId}/databases/${this.databaseId.database}`\n },\n sendRawJson: !0,\n supportsCrossDomainXhr: !0,\n internalChannelParams: {\n // Override the default timeout (randomized between 10-20 seconds) since\n // a large write batch on a slow internet connection may take a long\n // time to send to the backend. Rather than have WebChannel impose a\n // tight timeout which could lead to infinite timeouts and retries, we\n // set it very large (5-10 minutes) and rely on the browser's builtin\n // timeouts to kick in if the request isn't working.\n forwardChannelRequestTimeoutMs: 6e5\n },\n forceLongPolling: this.forceLongPolling,\n detectBufferingProxy: this.autoDetectLongPolling\n },\n a = this.longPollingOptions.timeoutSeconds;\n void 0 !== a && (_.longPollingTimeout = Math.round(1e3 * a)), this.useFetchStreams && (_.xmlHttpFactory = new FetchXmlHttpFactory({})), this.Fo(_.initMessageHeaders, t, n),\n // Sending the custom headers we just added to request.initMessageHeaders\n // (Authorization, etc.) will trigger the browser to make a CORS preflight\n // request because the XHR will no longer meet the criteria for a \"simple\"\n // CORS request:\n // https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#Simple_requests\n // Therefore to avoid the CORS preflight request (an extra network\n // roundtrip), we use the encodeInitMessageHeaders option to specify that\n // the headers should instead be encoded in the request's POST payload,\n // which is recognized by the webchannel backend.\n _.encodeInitMessageHeaders = !0;\n const u = i.join(\"\");\n __PRIVATE_logDebug(fe, `Creating RPC '${e}' stream ${r}: ${u}`, _);\n const c = s.createWebChannel(u, _);\n // WebChannel supports sending the first message with the handshake - saving\n // a network round trip. However, it will have to call send in the same\n // JS event loop as open. In order to enforce this, we delay actually\n // opening the WebChannel until send is called. Whether we have called\n // open is tracked with this variable.\n let l = !1,\n h = !1;\n // A flag to determine whether the stream was closed (by us or through an\n // error/close event) to avoid delivering multiple close events or sending\n // on a closed stream\n const P = new __PRIVATE_StreamBridge({\n lo: t => {\n h ? __PRIVATE_logDebug(fe, `Not sending because RPC '${e}' stream ${r} is closed:`, t) : (l || (__PRIVATE_logDebug(fe, `Opening RPC '${e}' stream ${r} transport.`), c.open(), l = !0), __PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} sending:`, t), c.send(t));\n },\n ho: () => c.close()\n }),\n __PRIVATE_unguardedEventListen = (e, t, n) => {\n // TODO(dimond): closure typing seems broken because WebChannel does\n // not implement goog.events.Listenable\n e.listen(t, e => {\n try {\n n(e);\n } catch (e) {\n setTimeout(() => {\n throw e;\n }, 0);\n }\n });\n };\n // Closure events are guarded and exceptions are swallowed, so catch any\n // exception and rethrow using a setTimeout so they become visible again.\n // Note that eventually this function could go away if we are confident\n // enough the code is exception free.\n return __PRIVATE_unguardedEventListen(c, WebChannel.EventType.OPEN, () => {\n h || (__PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} transport opened.`), P.mo());\n }), __PRIVATE_unguardedEventListen(c, WebChannel.EventType.CLOSE, () => {\n h || (h = !0, __PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} transport closed`), P.po());\n }), __PRIVATE_unguardedEventListen(c, WebChannel.EventType.ERROR, t => {\n h || (h = !0, __PRIVATE_logWarn(fe, `RPC '${e}' stream ${r} transport errored:`, t), P.po(new FirestoreError(C.UNAVAILABLE, \"The operation could not be completed\")));\n }), __PRIVATE_unguardedEventListen(c, WebChannel.EventType.MESSAGE, t => {\n var n;\n if (!h) {\n const i = t.data[0];\n __PRIVATE_hardAssert(!!i);\n // TODO(b/35143891): There is a bug in One Platform that caused errors\n // (and only errors) to be wrapped in an extra array. To be forward\n // compatible with the bug we need to check either condition. The latter\n // can be removed once the fix has been rolled out.\n // Use any because msgData.error is not typed.\n const s = i,\n o = s.error || (null === (n = s[0]) || void 0 === n ? void 0 : n.error);\n if (o) {\n __PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} received error:`, o);\n // error.status will be a string like 'OK' or 'NOT_FOUND'.\n const t = o.status;\n let n =\n /**\n * Maps an error Code from a GRPC status identifier like 'NOT_FOUND'.\n *\n * @returns The Code equivalent to the given status string or undefined if\n * there is no match.\n */\n function __PRIVATE_mapCodeFromRpcStatus(e) {\n // lookup by string\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const t = ce[e];\n if (void 0 !== t) return __PRIVATE_mapCodeFromRpcCode(t);\n }(t),\n i = o.message;\n void 0 === n && (n = C.INTERNAL, i = \"Unknown error status: \" + t + \" with message \" + o.message),\n // Mark closed so no further events are propagated\n h = !0, P.po(new FirestoreError(n, i)), c.close();\n } else __PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} received:`, i), P.yo(i);\n }\n }), __PRIVATE_unguardedEventListen(o, Event.STAT_EVENT, t => {\n t.stat === Stat.PROXY ? __PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} detected buffering proxy`) : t.stat === Stat.NOPROXY && __PRIVATE_logDebug(fe, `RPC '${e}' stream ${r} detected no buffering proxy`);\n }), setTimeout(() => {\n // Technically we could/should wait for the WebChannel opened event,\n // but because we want to send the first message with the WebChannel\n // handshake we pretend the channel opened here (asynchronously), and\n // then delay the actual open until the first message is sent.\n P.fo();\n }, 0), P;\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** Initializes the WebChannelConnection for the browser. */\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/** The Platform's 'window' implementation or null if not available. */\nfunction __PRIVATE_getWindow() {\n // `window` is not always available, e.g. in ReactNative and WebWorkers.\n // eslint-disable-next-line no-restricted-globals\n return \"undefined\" != typeof window ? window : null;\n}\n\n/** The Platform's 'document' implementation or null if not available. */\nfunction getDocument() {\n // `document` is not always available, e.g. in ReactNative and WebWorkers.\n // eslint-disable-next-line no-restricted-globals\n return \"undefined\" != typeof document ? document : null;\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nfunction __PRIVATE_newSerializer(e) {\n return new JsonProtoSerializer(e, /* useProto3Json= */!0);\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A helper for running delayed tasks following an exponential backoff curve\n * between attempts.\n *\n * Each delay is made up of a \"base\" delay which follows the exponential\n * backoff curve, and a +/- 50% \"jitter\" that is calculated and added to the\n * base delay. This prevents clients from accidentally synchronizing their\n * delays causing spikes of load to the backend.\n */\nclass __PRIVATE_ExponentialBackoff {\n constructor(\n /**\n * The AsyncQueue to run backoff operations on.\n */\n e,\n /**\n * The ID to use when scheduling backoff operations on the AsyncQueue.\n */\n t,\n /**\n * The initial delay (used as the base delay on the first retry attempt).\n * Note that jitter will still be applied, so the actual delay could be as\n * little as 0.5*initialDelayMs.\n */\n n = 1e3\n /**\n * The multiplier to use to determine the extended base delay after each\n * attempt.\n */, r = 1.5\n /**\n * The maximum base delay after which no further backoff is performed.\n * Note that jitter will still be applied, so the actual delay could be as\n * much as 1.5*maxDelayMs.\n */, i = 6e4) {\n this.oi = e, this.timerId = t, this.No = n, this.Lo = r, this.Bo = i, this.ko = 0, this.qo = null, /** The last backoff attempt, as epoch milliseconds. */\n this.Qo = Date.now(), this.reset();\n }\n /**\n * Resets the backoff delay.\n *\n * The very next backoffAndWait() will have no delay. If it is called again\n * (i.e. due to an error), initialDelayMs (plus jitter) will be used, and\n * subsequent ones will increase according to the backoffFactor.\n */\n reset() {\n this.ko = 0;\n }\n /**\n * Resets the backoff delay to the maximum delay (e.g. for use after a\n * RESOURCE_EXHAUSTED error).\n */\n Ko() {\n this.ko = this.Bo;\n }\n /**\n * Returns a promise that resolves after currentDelayMs, and increases the\n * delay for any subsequent attempts. If there was a pending backoff operation\n * already, it will be canceled.\n */\n $o(e) {\n // Cancel any pending backoff operation.\n this.cancel();\n // First schedule using the current base (which may be 0 and should be\n // honored as such).\n const t = Math.floor(this.ko + this.Uo()),\n n = Math.max(0, Date.now() - this.Qo),\n r = Math.max(0, t - n);\n // Guard against lastAttemptTime being in the future due to a clock change.\n r > 0 && __PRIVATE_logDebug(\"ExponentialBackoff\", `Backing off for ${r} ms (base delay: ${this.ko} ms, delay with jitter: ${t} ms, last attempt: ${n} ms ago)`), this.qo = this.oi.enqueueAfterDelay(this.timerId, r, () => (this.Qo = Date.now(), e())),\n // Apply backoff factor to determine next delay and ensure it is within\n // bounds.\n this.ko *= this.Lo, this.ko < this.No && (this.ko = this.No), this.ko > this.Bo && (this.ko = this.Bo);\n }\n Wo() {\n null !== this.qo && (this.qo.skipDelay(), this.qo = null);\n }\n cancel() {\n null !== this.qo && (this.qo.cancel(), this.qo = null);\n }\n /** Returns a random value in the range [-currentBaseMs/2, currentBaseMs/2] */\n Uo() {\n return (Math.random() - .5) * this.ko;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A PersistentStream is an abstract base class that represents a streaming RPC\n * to the Firestore backend. It's built on top of the connections own support\n * for streaming RPCs, and adds several critical features for our clients:\n *\n * - Exponential backoff on failure\n * - Authentication via CredentialsProvider\n * - Dispatching all callbacks into the shared worker queue\n * - Closing idle streams after 60 seconds of inactivity\n *\n * Subclasses of PersistentStream implement serialization of models to and\n * from the JSON representation of the protocol buffers for a specific\n * streaming RPC.\n *\n * ## Starting and Stopping\n *\n * Streaming RPCs are stateful and need to be start()ed before messages can\n * be sent and received. The PersistentStream will call the onOpen() function\n * of the listener once the stream is ready to accept requests.\n *\n * Should a start() fail, PersistentStream will call the registered onClose()\n * listener with a FirestoreError indicating what went wrong.\n *\n * A PersistentStream can be started and stopped repeatedly.\n *\n * Generic types:\n * SendType: The type of the outgoing message of the underlying\n * connection stream\n * ReceiveType: The type of the incoming message of the underlying\n * connection stream\n * ListenerType: The type of the listener that will be used for callbacks\n */\nclass __PRIVATE_PersistentStream {\n constructor(e, t, n, r, i, s, o, _) {\n this.oi = e, this.Go = n, this.zo = r, this.connection = i, this.authCredentialsProvider = s, this.appCheckCredentialsProvider = o, this.listener = _, this.state = 0 /* PersistentStreamState.Initial */,\n /**\n * A close count that's incremented every time the stream is closed; used by\n * getCloseGuardedDispatcher() to invalidate callbacks that happen after\n * close.\n */\n this.jo = 0, this.Ho = null, this.Jo = null, this.stream = null, this.Yo = new __PRIVATE_ExponentialBackoff(e, t);\n }\n /**\n * Returns true if start() has been called and no error has occurred. True\n * indicates the stream is open or in the process of opening (which\n * encompasses respecting backoff, getting auth tokens, and starting the\n * actual RPC). Use isOpen() to determine if the stream is open and ready for\n * outbound requests.\n */\n Zo() {\n return 1 /* PersistentStreamState.Starting */ === this.state || 5 /* PersistentStreamState.Backoff */ === this.state || this.Xo();\n }\n /**\n * Returns true if the underlying RPC is open (the onOpen() listener has been\n * called) and the stream is ready for outbound requests.\n */\n Xo() {\n return 2 /* PersistentStreamState.Open */ === this.state || 3 /* PersistentStreamState.Healthy */ === this.state;\n }\n /**\n * Starts the RPC. Only allowed if isStarted() returns false. The stream is\n * not immediately ready for use: onOpen() will be invoked when the RPC is\n * ready for outbound requests, at which point isOpen() will return true.\n *\n * When start returns, isStarted() will return true.\n */\n start() {\n 4 /* PersistentStreamState.Error */ !== this.state ? this.auth() : this.e_();\n }\n /**\n * Stops the RPC. This call is idempotent and allowed regardless of the\n * current isStarted() state.\n *\n * When stop returns, isStarted() and isOpen() will both return false.\n */\n async stop() {\n this.Zo() && (await this.close(0 /* PersistentStreamState.Initial */));\n }\n /**\n * After an error the stream will usually back off on the next attempt to\n * start it. If the error warrants an immediate restart of the stream, the\n * sender can use this to indicate that the receiver should not back off.\n *\n * Each error will call the onClose() listener. That function can decide to\n * inhibit backoff if required.\n */\n t_() {\n this.state = 0 /* PersistentStreamState.Initial */, this.Yo.reset();\n }\n /**\n * Marks this stream as idle. If no further actions are performed on the\n * stream for one minute, the stream will automatically close itself and\n * notify the stream's onClose() handler with Status.OK. The stream will then\n * be in a !isStarted() state, requiring the caller to start the stream again\n * before further use.\n *\n * Only streams that are in state 'Open' can be marked idle, as all other\n * states imply pending network operations.\n */\n n_() {\n // Starts the idle time if we are in state 'Open' and are not yet already\n // running a timer (in which case the previous idle timeout still applies).\n this.Xo() && null === this.Ho && (this.Ho = this.oi.enqueueAfterDelay(this.Go, 6e4, () => this.r_()));\n }\n /** Sends a message to the underlying stream. */\n i_(e) {\n this.s_(), this.stream.send(e);\n }\n /** Called by the idle timer when the stream should close due to inactivity. */\n async r_() {\n if (this.Xo())\n // When timing out an idle stream there's no reason to force the stream into backoff when\n // it restarts so set the stream state to Initial instead of Error.\n return this.close(0 /* PersistentStreamState.Initial */);\n }\n /** Marks the stream as active again. */\n s_() {\n this.Ho && (this.Ho.cancel(), this.Ho = null);\n }\n /** Cancels the health check delayed operation. */\n o_() {\n this.Jo && (this.Jo.cancel(), this.Jo = null);\n }\n /**\n * Closes the stream and cleans up as necessary:\n *\n * * closes the underlying GRPC stream;\n * * calls the onClose handler with the given 'error';\n * * sets internal stream state to 'finalState';\n * * adjusts the backoff timer based on the error\n *\n * A new stream can be opened by calling start().\n *\n * @param finalState - the intended state of the stream after closing.\n * @param error - the error the connection was closed with.\n */\n async close(e, t) {\n // Cancel any outstanding timers (they're guaranteed not to execute).\n this.s_(), this.o_(), this.Yo.cancel(),\n // Invalidates any stream-related callbacks (e.g. from auth or the\n // underlying stream), guaranteeing they won't execute.\n this.jo++, 4 /* PersistentStreamState.Error */ !== e ?\n // If this is an intentional close ensure we don't delay our next connection attempt.\n this.Yo.reset() : t && t.code === C.RESOURCE_EXHAUSTED ? (\n // Log the error. (Probably either 'quota exceeded' or 'max queue length reached'.)\n __PRIVATE_logError(t.toString()), __PRIVATE_logError(\"Using maximum backoff delay to prevent overloading the backend.\"), this.Yo.Ko()) : t && t.code === C.UNAUTHENTICATED && 3 /* PersistentStreamState.Healthy */ !== this.state && (\n // \"unauthenticated\" error means the token was rejected. This should rarely\n // happen since both Auth and AppCheck ensure a sufficient TTL when we\n // request a token. If a user manually resets their system clock this can\n // fail, however. In this case, we should get a Code.UNAUTHENTICATED error\n // before we received the first message and we need to invalidate the token\n // to ensure that we fetch a new token.\n this.authCredentialsProvider.invalidateToken(), this.appCheckCredentialsProvider.invalidateToken()),\n // Clean up the underlying stream because we are no longer interested in events.\n null !== this.stream && (this.__(), this.stream.close(), this.stream = null),\n // This state must be assigned before calling onClose() to allow the callback to\n // inhibit backoff or otherwise manipulate the state in its non-started state.\n this.state = e,\n // Notify the listener that the stream closed.\n await this.listener.Ao(t);\n }\n /**\n * Can be overridden to perform additional cleanup before the stream is closed.\n * Calling super.tearDown() is not required.\n */\n __() {}\n auth() {\n this.state = 1 /* PersistentStreamState.Starting */;\n const e = this.a_(this.jo),\n t = this.jo;\n // TODO(mikelehen): Just use dispatchIfNotClosed, but see TODO below.\n Promise.all([this.authCredentialsProvider.getToken(), this.appCheckCredentialsProvider.getToken()]).then(([e, n]) => {\n // Stream can be stopped while waiting for authentication.\n // TODO(mikelehen): We really should just use dispatchIfNotClosed\n // and let this dispatch onto the queue, but that opened a spec test can\n // of worms that I don't want to deal with in this PR.\n this.jo === t &&\n // Normally we'd have to schedule the callback on the AsyncQueue.\n // However, the following calls are safe to be called outside the\n // AsyncQueue since they don't chain asynchronous calls\n this.u_(e, n);\n }, t => {\n e(() => {\n const e = new FirestoreError(C.UNKNOWN, \"Fetching auth token failed: \" + t.message);\n return this.c_(e);\n });\n });\n }\n u_(e, t) {\n const n = this.a_(this.jo);\n this.stream = this.l_(e, t), this.stream.Po(() => {\n n(() => this.listener.Po());\n }), this.stream.To(() => {\n n(() => (this.state = 2 /* PersistentStreamState.Open */, this.Jo = this.oi.enqueueAfterDelay(this.zo, 1e4, () => (this.Xo() && (this.state = 3 /* PersistentStreamState.Healthy */), Promise.resolve())), this.listener.To()));\n }), this.stream.Ao(e => {\n n(() => this.c_(e));\n }), this.stream.onMessage(e => {\n n(() => this.onMessage(e));\n });\n }\n e_() {\n this.state = 5 /* PersistentStreamState.Backoff */, this.Yo.$o(async () => {\n this.state = 0 /* PersistentStreamState.Initial */, this.start();\n });\n }\n // Visible for tests\n c_(e) {\n // In theory the stream could close cleanly, however, in our current model\n // we never expect this to happen because if we stop a stream ourselves,\n // this callback will never be called. To prevent cases where we retry\n // without a backoff accidentally, we set the stream to error in all cases.\n return __PRIVATE_logDebug(\"PersistentStream\", `close with error: ${e}`), this.stream = null, this.close(4 /* PersistentStreamState.Error */, e);\n }\n /**\n * Returns a \"dispatcher\" function that dispatches operations onto the\n * AsyncQueue but only runs them if closeCount remains unchanged. This allows\n * us to turn auth / stream callbacks into no-ops if the stream is closed /\n * re-opened, etc.\n */\n a_(e) {\n return t => {\n this.oi.enqueueAndForget(() => this.jo === e ? t() : (__PRIVATE_logDebug(\"PersistentStream\", \"stream callback skipped by getCloseGuardedDispatcher.\"), Promise.resolve()));\n };\n }\n}\n\n/**\n * A PersistentStream that implements the Listen RPC.\n *\n * Once the Listen stream has called the onOpen() listener, any number of\n * listen() and unlisten() calls can be made to control what changes will be\n * sent from the server for ListenResponses.\n */\nclass __PRIVATE_PersistentListenStream extends __PRIVATE_PersistentStream {\n constructor(e, t, n, r, i, s) {\n super(e, \"listen_stream_connection_backoff\" /* TimerId.ListenStreamConnectionBackoff */, \"listen_stream_idle\" /* TimerId.ListenStreamIdle */, \"health_check_timeout\" /* TimerId.HealthCheckTimeout */, t, n, r, s), this.serializer = i;\n }\n l_(e, t) {\n return this.connection.Oo(\"Listen\", e, t);\n }\n onMessage(e) {\n // A successful response means the stream is healthy\n this.Yo.reset();\n const t = __PRIVATE_fromWatchChange(this.serializer, e),\n n = function __PRIVATE_versionFromListenResponse(e) {\n // We have only reached a consistent snapshot for the entire stream if there\n // is a read_time set and it applies to all targets (i.e. the list of\n // targets is empty). The backend is guaranteed to send such responses.\n if (!(\"targetChange\" in e)) return SnapshotVersion.min();\n const t = e.targetChange;\n return t.targetIds && t.targetIds.length ? SnapshotVersion.min() : t.readTime ? __PRIVATE_fromVersion(t.readTime) : SnapshotVersion.min();\n }(e);\n return this.listener.h_(t, n);\n }\n /**\n * Registers interest in the results of the given target. If the target\n * includes a resumeToken it will be included in the request. Results that\n * affect the target will be streamed back as WatchChange messages that\n * reference the targetId.\n */\n P_(e) {\n const t = {};\n t.database = __PRIVATE_getEncodedDatabaseId(this.serializer), t.addTarget = function __PRIVATE_toTarget(e, t) {\n let n;\n const r = t.target;\n if (n = __PRIVATE_targetIsDocumentTarget(r) ? {\n documents: __PRIVATE_toDocumentsTarget(e, r)\n } : {\n query: __PRIVATE_toQueryTarget(e, r)._t\n }, n.targetId = t.targetId, t.resumeToken.approximateByteSize() > 0) {\n n.resumeToken = __PRIVATE_toBytes(e, t.resumeToken);\n const r = __PRIVATE_toInt32Proto(e, t.expectedCount);\n null !== r && (n.expectedCount = r);\n } else if (t.snapshotVersion.compareTo(SnapshotVersion.min()) > 0) {\n // TODO(wuandy): Consider removing above check because it is most likely true.\n // Right now, many tests depend on this behaviour though (leaving min() out\n // of serialization).\n n.readTime = toTimestamp(e, t.snapshotVersion.toTimestamp());\n const r = __PRIVATE_toInt32Proto(e, t.expectedCount);\n null !== r && (n.expectedCount = r);\n }\n return n;\n }(this.serializer, e);\n const n = __PRIVATE_toListenRequestLabels(this.serializer, e);\n n && (t.labels = n), this.i_(t);\n }\n /**\n * Unregisters interest in the results of the target associated with the\n * given targetId.\n */\n I_(e) {\n const t = {};\n t.database = __PRIVATE_getEncodedDatabaseId(this.serializer), t.removeTarget = e, this.i_(t);\n }\n}\n\n/**\n * A Stream that implements the Write RPC.\n *\n * The Write RPC requires the caller to maintain special streamToken\n * state in between calls, to help the server understand which responses the\n * client has processed by the time the next request is made. Every response\n * will contain a streamToken; this value must be passed to the next\n * request.\n *\n * After calling start() on this stream, the next request must be a handshake,\n * containing whatever streamToken is on hand. Once a response to this\n * request is received, all pending mutations may be submitted. When\n * submitting multiple batches of mutations at the same time, it's\n * okay to use the same streamToken for the calls to writeMutations.\n *\n * TODO(b/33271235): Use proto types\n */\nclass __PRIVATE_PersistentWriteStream extends __PRIVATE_PersistentStream {\n constructor(e, t, n, r, i, s) {\n super(e, \"write_stream_connection_backoff\" /* TimerId.WriteStreamConnectionBackoff */, \"write_stream_idle\" /* TimerId.WriteStreamIdle */, \"health_check_timeout\" /* TimerId.HealthCheckTimeout */, t, n, r, s), this.serializer = i, this.T_ = !1;\n }\n /**\n * Tracks whether or not a handshake has been successfully exchanged and\n * the stream is ready to accept mutations.\n */\n get E_() {\n return this.T_;\n }\n // Override of PersistentStream.start\n start() {\n this.T_ = !1, this.lastStreamToken = void 0, super.start();\n }\n __() {\n this.T_ && this.d_([]);\n }\n l_(e, t) {\n return this.connection.Oo(\"Write\", e, t);\n }\n onMessage(e) {\n if (\n // Always capture the last stream token.\n __PRIVATE_hardAssert(!!e.streamToken), this.lastStreamToken = e.streamToken, this.T_) {\n // A successful first write response means the stream is healthy,\n // Note, that we could consider a successful handshake healthy, however,\n // the write itself might be causing an error we want to back off from.\n this.Yo.reset();\n const t = __PRIVATE_fromWriteResults(e.writeResults, e.commitTime),\n n = __PRIVATE_fromVersion(e.commitTime);\n return this.listener.A_(n, t);\n }\n // The first response is always the handshake response\n return __PRIVATE_hardAssert(!e.writeResults || 0 === e.writeResults.length), this.T_ = !0, this.listener.R_();\n }\n /**\n * Sends an initial streamToken to the server, performing the handshake\n * required to make the StreamingWrite RPC work. Subsequent\n * calls should wait until onHandshakeComplete was called.\n */\n V_() {\n // TODO(dimond): Support stream resumption. We intentionally do not set the\n // stream token on the handshake, ignoring any stream token we might have.\n const e = {};\n e.database = __PRIVATE_getEncodedDatabaseId(this.serializer), this.i_(e);\n }\n /** Sends a group of mutations to the Firestore backend to apply. */\n d_(e) {\n const t = {\n streamToken: this.lastStreamToken,\n writes: e.map(e => toMutation(this.serializer, e))\n };\n this.i_(t);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Datastore and its related methods are a wrapper around the external Google\n * Cloud Datastore grpc API, which provides an interface that is more convenient\n * for the rest of the client SDK architecture to consume.\n */\n/**\n * An implementation of Datastore that exposes additional state for internal\n * consumption.\n */\nclass __PRIVATE_DatastoreImpl extends class Datastore {} {\n constructor(e, t, n, r) {\n super(), this.authCredentials = e, this.appCheckCredentials = t, this.connection = n, this.serializer = r, this.m_ = !1;\n }\n f_() {\n if (this.m_) throw new FirestoreError(C.FAILED_PRECONDITION, \"The client has already been terminated.\");\n }\n /** Invokes the provided RPC with auth and AppCheck tokens. */\n Co(e, t, n, r) {\n return this.f_(), Promise.all([this.authCredentials.getToken(), this.appCheckCredentials.getToken()]).then(([i, s]) => this.connection.Co(e, __PRIVATE_toResourcePath(t, n), r, i, s)).catch(e => {\n throw \"FirebaseError\" === e.name ? (e.code === C.UNAUTHENTICATED && (this.authCredentials.invalidateToken(), this.appCheckCredentials.invalidateToken()), e) : new FirestoreError(C.UNKNOWN, e.toString());\n });\n }\n /** Invokes the provided RPC with streamed results with auth and AppCheck tokens. */\n xo(e, t, n, r, i) {\n return this.f_(), Promise.all([this.authCredentials.getToken(), this.appCheckCredentials.getToken()]).then(([s, o]) => this.connection.xo(e, __PRIVATE_toResourcePath(t, n), r, s, o, i)).catch(e => {\n throw \"FirebaseError\" === e.name ? (e.code === C.UNAUTHENTICATED && (this.authCredentials.invalidateToken(), this.appCheckCredentials.invalidateToken()), e) : new FirestoreError(C.UNKNOWN, e.toString());\n });\n }\n terminate() {\n this.m_ = !0, this.connection.terminate();\n }\n}\n\n// TODO(firestorexp): Make sure there is only one Datastore instance per\n// firestore-exp client.\n/**\n * A component used by the RemoteStore to track the OnlineState (that is,\n * whether or not the client as a whole should be considered to be online or\n * offline), implementing the appropriate heuristics.\n *\n * In particular, when the client is trying to connect to the backend, we\n * allow up to MAX_WATCH_STREAM_FAILURES within ONLINE_STATE_TIMEOUT_MS for\n * a connection to succeed. If we have too many failures or the timeout elapses,\n * then we set the OnlineState to Offline, and the client will behave as if\n * it is offline (get()s will return cached data, etc.).\n */\nclass __PRIVATE_OnlineStateTracker {\n constructor(e, t) {\n this.asyncQueue = e, this.onlineStateHandler = t, /** The current OnlineState. */\n this.state = \"Unknown\" /* OnlineState.Unknown */,\n /**\n * A count of consecutive failures to open the stream. If it reaches the\n * maximum defined by MAX_WATCH_STREAM_FAILURES, we'll set the OnlineState to\n * Offline.\n */\n this.g_ = 0,\n /**\n * A timer that elapses after ONLINE_STATE_TIMEOUT_MS, at which point we\n * transition from OnlineState.Unknown to OnlineState.Offline without waiting\n * for the stream to actually fail (MAX_WATCH_STREAM_FAILURES times).\n */\n this.p_ = null,\n /**\n * Whether the client should log a warning message if it fails to connect to\n * the backend (initially true, cleared after a successful stream, or if we've\n * logged the message already).\n */\n this.y_ = !0;\n }\n /**\n * Called by RemoteStore when a watch stream is started (including on each\n * backoff attempt).\n *\n * If this is the first attempt, it sets the OnlineState to Unknown and starts\n * the onlineStateTimer.\n */\n w_() {\n 0 === this.g_ && (this.S_(\"Unknown\" /* OnlineState.Unknown */), this.p_ = this.asyncQueue.enqueueAfterDelay(\"online_state_timeout\" /* TimerId.OnlineStateTimeout */, 1e4, () => (this.p_ = null, this.b_(\"Backend didn't respond within 10 seconds.\"), this.S_(\"Offline\" /* OnlineState.Offline */), Promise.resolve())));\n }\n /**\n * Updates our OnlineState as appropriate after the watch stream reports a\n * failure. The first failure moves us to the 'Unknown' state. We then may\n * allow multiple failures (based on MAX_WATCH_STREAM_FAILURES) before we\n * actually transition to the 'Offline' state.\n */\n D_(e) {\n \"Online\" /* OnlineState.Online */ === this.state ? this.S_(\"Unknown\" /* OnlineState.Unknown */) : (this.g_++, this.g_ >= 1 && (this.C_(), this.b_(`Connection failed 1 times. Most recent error: ${e.toString()}`), this.S_(\"Offline\" /* OnlineState.Offline */)));\n }\n /**\n * Explicitly sets the OnlineState to the specified state.\n *\n * Note that this resets our timers / failure counters, etc. used by our\n * Offline heuristics, so must not be used in place of\n * handleWatchStreamStart() and handleWatchStreamFailure().\n */\n set(e) {\n this.C_(), this.g_ = 0, \"Online\" /* OnlineState.Online */ === e && (\n // We've connected to watch at least once. Don't warn the developer\n // about being offline going forward.\n this.y_ = !1), this.S_(e);\n }\n S_(e) {\n e !== this.state && (this.state = e, this.onlineStateHandler(e));\n }\n b_(e) {\n const t = `Could not reach Cloud Firestore backend. ${e}\\nThis typically indicates that your device does not have a healthy Internet connection at the moment. The client will operate in offline mode until it is able to successfully connect to the backend.`;\n this.y_ ? (__PRIVATE_logError(t), this.y_ = !1) : __PRIVATE_logDebug(\"OnlineStateTracker\", t);\n }\n C_() {\n null !== this.p_ && (this.p_.cancel(), this.p_ = null);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_RemoteStoreImpl {\n constructor(\n /**\n * The local store, used to fill the write pipeline with outbound mutations.\n */\n e, /** The client-side proxy for interacting with the backend. */\n t, n, r, i) {\n this.localStore = e, this.datastore = t, this.asyncQueue = n, this.remoteSyncer = {},\n /**\n * A list of up to MAX_PENDING_WRITES writes that we have fetched from the\n * LocalStore via fillWritePipeline() and have or will send to the write\n * stream.\n *\n * Whenever writePipeline.length > 0 the RemoteStore will attempt to start or\n * restart the write stream. When the stream is established the writes in the\n * pipeline will be sent in order.\n *\n * Writes remain in writePipeline until they are acknowledged by the backend\n * and thus will automatically be re-sent if the stream is interrupted /\n * restarted before they're acknowledged.\n *\n * Write responses from the backend are linked to their originating request\n * purely based on order, and so we can just shift() writes from the front of\n * the writePipeline as we receive responses.\n */\n this.v_ = [],\n /**\n * A mapping of watched targets that the client cares about tracking and the\n * user has explicitly called a 'listen' for this target.\n *\n * These targets may or may not have been sent to or acknowledged by the\n * server. On re-establishing the listen stream, these targets should be sent\n * to the server. The targets removed with unlistens are removed eagerly\n * without waiting for confirmation from the listen stream.\n */\n this.F_ = new Map(),\n /**\n * A set of reasons for why the RemoteStore may be offline. If empty, the\n * RemoteStore may start its network connections.\n */\n this.M_ = new Set(),\n /**\n * Event handlers that get called when the network is disabled or enabled.\n *\n * PORTING NOTE: These functions are used on the Web client to create the\n * underlying streams (to support tree-shakeable streams). On Android and iOS,\n * the streams are created during construction of RemoteStore.\n */\n this.x_ = [], this.O_ = i, this.O_.io(e => {\n n.enqueueAndForget(async () => {\n // Porting Note: Unlike iOS, `restartNetwork()` is called even when the\n // network becomes unreachable as we don't have any other way to tear\n // down our streams.\n __PRIVATE_canUseNetwork(this) && (__PRIVATE_logDebug(\"RemoteStore\", \"Restarting streams for network reachability change.\"), await async function __PRIVATE_restartNetwork(e) {\n const t = __PRIVATE_debugCast(e);\n t.M_.add(4 /* OfflineCause.ConnectivityChange */), await __PRIVATE_disableNetworkInternal(t), t.N_.set(\"Unknown\" /* OnlineState.Unknown */), t.M_.delete(4 /* OfflineCause.ConnectivityChange */), await __PRIVATE_enableNetworkInternal(t);\n }(this));\n });\n }), this.N_ = new __PRIVATE_OnlineStateTracker(n, r);\n }\n}\nasync function __PRIVATE_enableNetworkInternal(e) {\n if (__PRIVATE_canUseNetwork(e)) for (const t of e.x_) await t(/* enabled= */!0);\n}\n\n/**\n * Temporarily disables the network. The network can be re-enabled using\n * enableNetwork().\n */\nasync function __PRIVATE_disableNetworkInternal(e) {\n for (const t of e.x_) await t(/* enabled= */!1);\n}\n\n/**\n * Starts new listen for the given target. Uses resume token if provided. It\n * is a no-op if the target of given `TargetData` is already being listened to.\n */\nfunction __PRIVATE_remoteStoreListen(e, t) {\n const n = __PRIVATE_debugCast(e);\n n.F_.has(t.targetId) || (\n // Mark this as something the client is currently listening for.\n n.F_.set(t.targetId, t), __PRIVATE_shouldStartWatchStream(n) ?\n // The listen will be sent in onWatchStreamOpen\n __PRIVATE_startWatchStream(n) : __PRIVATE_ensureWatchStream(n).Xo() && __PRIVATE_sendWatchRequest(n, t));\n}\n\n/**\n * Removes the listen from server. It is a no-op if the given target id is\n * not being listened to.\n */\nfunction __PRIVATE_remoteStoreUnlisten(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = __PRIVATE_ensureWatchStream(n);\n n.F_.delete(t), r.Xo() && __PRIVATE_sendUnwatchRequest(n, t), 0 === n.F_.size && (r.Xo() ? r.n_() : __PRIVATE_canUseNetwork(n) &&\n // Revert to OnlineState.Unknown if the watch stream is not open and we\n // have no listeners, since without any listens to send we cannot\n // confirm if the stream is healthy and upgrade to OnlineState.Online.\n n.N_.set(\"Unknown\" /* OnlineState.Unknown */));\n}\n\n/**\n * We need to increment the the expected number of pending responses we're due\n * from watch so we wait for the ack to process any messages from this target.\n */\nfunction __PRIVATE_sendWatchRequest(e, t) {\n if (e.L_.xe(t.targetId), t.resumeToken.approximateByteSize() > 0 || t.snapshotVersion.compareTo(SnapshotVersion.min()) > 0) {\n const n = e.remoteSyncer.getRemoteKeysForTarget(t.targetId).size;\n t = t.withExpectedCount(n);\n }\n __PRIVATE_ensureWatchStream(e).P_(t);\n}\n\n/**\n * We need to increment the expected number of pending responses we're due\n * from watch so we wait for the removal on the server before we process any\n * messages from this target.\n */\nfunction __PRIVATE_sendUnwatchRequest(e, t) {\n e.L_.xe(t), __PRIVATE_ensureWatchStream(e).I_(t);\n}\nfunction __PRIVATE_startWatchStream(e) {\n e.L_ = new __PRIVATE_WatchChangeAggregator({\n getRemoteKeysForTarget: t => e.remoteSyncer.getRemoteKeysForTarget(t),\n ot: t => e.F_.get(t) || null,\n tt: () => e.datastore.serializer.databaseId\n }), __PRIVATE_ensureWatchStream(e).start(), e.N_.w_();\n}\n\n/**\n * Returns whether the watch stream should be started because it's necessary\n * and has not yet been started.\n */\nfunction __PRIVATE_shouldStartWatchStream(e) {\n return __PRIVATE_canUseNetwork(e) && !__PRIVATE_ensureWatchStream(e).Zo() && e.F_.size > 0;\n}\nfunction __PRIVATE_canUseNetwork(e) {\n return 0 === __PRIVATE_debugCast(e).M_.size;\n}\nfunction __PRIVATE_cleanUpWatchStreamState(e) {\n e.L_ = void 0;\n}\nasync function __PRIVATE_onWatchStreamConnected(e) {\n // Mark the client as online since we got a \"connected\" notification.\n e.N_.set(\"Online\" /* OnlineState.Online */);\n}\nasync function __PRIVATE_onWatchStreamOpen(e) {\n e.F_.forEach((t, n) => {\n __PRIVATE_sendWatchRequest(e, t);\n });\n}\nasync function __PRIVATE_onWatchStreamClose(e, t) {\n __PRIVATE_cleanUpWatchStreamState(e),\n // If we still need the watch stream, retry the connection.\n __PRIVATE_shouldStartWatchStream(e) ? (e.N_.D_(t), __PRIVATE_startWatchStream(e)) :\n // No need to restart watch stream because there are no active targets.\n // The online state is set to unknown because there is no active attempt\n // at establishing a connection\n e.N_.set(\"Unknown\" /* OnlineState.Unknown */);\n}\nasync function __PRIVATE_onWatchStreamChange(e, t, n) {\n if (\n // Mark the client as online since we got a message from the server\n e.N_.set(\"Online\" /* OnlineState.Online */), t instanceof __PRIVATE_WatchTargetChange && 2 /* WatchTargetChangeState.Removed */ === t.state && t.cause)\n // There was an error on a target, don't wait for a consistent snapshot\n // to raise events\n try {\n await /** Handles an error on a target */async function __PRIVATE_handleTargetError(e, t) {\n const n = t.cause;\n for (const r of t.targetIds)\n // A watched target might have been removed already.\n e.F_.has(r) && (await e.remoteSyncer.rejectListen(r, n), e.F_.delete(r), e.L_.removeTarget(r));\n }\n /**\n * Attempts to fill our write pipeline with writes from the LocalStore.\n *\n * Called internally to bootstrap or refill the write pipeline and by\n * SyncEngine whenever there are new mutations to process.\n *\n * Starts the write stream if necessary.\n */(e, t);\n } catch (n) {\n __PRIVATE_logDebug(\"RemoteStore\", \"Failed to remove targets %s: %s \", t.targetIds.join(\",\"), n), await __PRIVATE_disableNetworkUntilRecovery(e, n);\n } else if (t instanceof __PRIVATE_DocumentWatchChange ? e.L_.Ke(t) : t instanceof __PRIVATE_ExistenceFilterChange ? e.L_.He(t) : e.L_.We(t), !n.isEqual(SnapshotVersion.min())) try {\n const t = await __PRIVATE_localStoreGetLastRemoteSnapshotVersion(e.localStore);\n n.compareTo(t) >= 0 && (\n // We have received a target change with a global snapshot if the snapshot\n // version is not equal to SnapshotVersion.min().\n await (\n /**\n * Takes a batch of changes from the Datastore, repackages them as a\n * RemoteEvent, and passes that on to the listener, which is typically the\n * SyncEngine.\n */\n function __PRIVATE_raiseWatchSnapshot(e, t) {\n const n = e.L_.rt(t);\n // Update in-memory resume tokens. LocalStore will update the\n // persistent view of these when applying the completed RemoteEvent.\n return n.targetChanges.forEach((n, r) => {\n if (n.resumeToken.approximateByteSize() > 0) {\n const i = e.F_.get(r);\n // A watched target might have been removed already.\n i && e.F_.set(r, i.withResumeToken(n.resumeToken, t));\n }\n }),\n // Re-establish listens for the targets that have been invalidated by\n // existence filter mismatches.\n n.targetMismatches.forEach((t, n) => {\n const r = e.F_.get(t);\n if (!r)\n // A watched target might have been removed already.\n return;\n // Clear the resume token for the target, since we're in a known mismatch\n // state.\n e.F_.set(t, r.withResumeToken(ByteString.EMPTY_BYTE_STRING, r.snapshotVersion)),\n // Cause a hard reset by unwatching and rewatching immediately, but\n // deliberately don't send a resume token so that we get a full update.\n __PRIVATE_sendUnwatchRequest(e, t);\n // Mark the target we send as being on behalf of an existence filter\n // mismatch, but don't actually retain that in listenTargets. This ensures\n // that we flag the first re-listen this way without impacting future\n // listens of this target (that might happen e.g. on reconnect).\n const i = new TargetData(r.target, t, n, r.sequenceNumber);\n __PRIVATE_sendWatchRequest(e, i);\n }), e.remoteSyncer.applyRemoteEvent(n);\n }(e, n)\n ));\n } catch (t) {\n __PRIVATE_logDebug(\"RemoteStore\", \"Failed to raise snapshot:\", t), await __PRIVATE_disableNetworkUntilRecovery(e, t);\n }\n}\n\n/**\n * Recovery logic for IndexedDB errors that takes the network offline until\n * `op` succeeds. Retries are scheduled with backoff using\n * `enqueueRetryable()`. If `op()` is not provided, IndexedDB access is\n * validated via a generic operation.\n *\n * The returned Promise is resolved once the network is disabled and before\n * any retry attempt.\n */\nasync function __PRIVATE_disableNetworkUntilRecovery(e, t, n) {\n if (!__PRIVATE_isIndexedDbTransactionError(t)) throw t;\n e.M_.add(1 /* OfflineCause.IndexedDbFailed */),\n // Disable network and raise offline snapshots\n await __PRIVATE_disableNetworkInternal(e), e.N_.set(\"Offline\" /* OnlineState.Offline */), n || (\n // Use a simple read operation to determine if IndexedDB recovered.\n // Ideally, we would expose a health check directly on SimpleDb, but\n // RemoteStore only has access to persistence through LocalStore.\n n = () => __PRIVATE_localStoreGetLastRemoteSnapshotVersion(e.localStore)),\n // Probe IndexedDB periodically and re-enable network\n e.asyncQueue.enqueueRetryable(async () => {\n __PRIVATE_logDebug(\"RemoteStore\", \"Retrying IndexedDB access\"), await n(), e.M_.delete(1 /* OfflineCause.IndexedDbFailed */), await __PRIVATE_enableNetworkInternal(e);\n });\n}\n\n/**\n * Executes `op`. If `op` fails, takes the network offline until `op`\n * succeeds. Returns after the first attempt.\n */\nfunction __PRIVATE_executeWithRecovery(e, t) {\n return t().catch(n => __PRIVATE_disableNetworkUntilRecovery(e, n, t));\n}\nasync function __PRIVATE_fillWritePipeline(e) {\n const t = __PRIVATE_debugCast(e),\n n = __PRIVATE_ensureWriteStream(t);\n let r = t.v_.length > 0 ? t.v_[t.v_.length - 1].batchId : -1;\n for (; __PRIVATE_canAddToWritePipeline(t);) try {\n const e = await __PRIVATE_localStoreGetNextMutationBatch(t.localStore, r);\n if (null === e) {\n 0 === t.v_.length && n.n_();\n break;\n }\n r = e.batchId, __PRIVATE_addToWritePipeline(t, e);\n } catch (e) {\n await __PRIVATE_disableNetworkUntilRecovery(t, e);\n }\n __PRIVATE_shouldStartWriteStream(t) && __PRIVATE_startWriteStream(t);\n}\n\n/**\n * Returns true if we can add to the write pipeline (i.e. the network is\n * enabled and the write pipeline is not full).\n */\nfunction __PRIVATE_canAddToWritePipeline(e) {\n return __PRIVATE_canUseNetwork(e) && e.v_.length < 10;\n}\n\n/**\n * Queues additional writes to be sent to the write stream, sending them\n * immediately if the write stream is established.\n */\nfunction __PRIVATE_addToWritePipeline(e, t) {\n e.v_.push(t);\n const n = __PRIVATE_ensureWriteStream(e);\n n.Xo() && n.E_ && n.d_(t.mutations);\n}\nfunction __PRIVATE_shouldStartWriteStream(e) {\n return __PRIVATE_canUseNetwork(e) && !__PRIVATE_ensureWriteStream(e).Zo() && e.v_.length > 0;\n}\nfunction __PRIVATE_startWriteStream(e) {\n __PRIVATE_ensureWriteStream(e).start();\n}\nasync function __PRIVATE_onWriteStreamOpen(e) {\n __PRIVATE_ensureWriteStream(e).V_();\n}\nasync function __PRIVATE_onWriteHandshakeComplete(e) {\n const t = __PRIVATE_ensureWriteStream(e);\n // Send the write pipeline now that the stream is established.\n for (const n of e.v_) t.d_(n.mutations);\n}\nasync function __PRIVATE_onMutationResult(e, t, n) {\n const r = e.v_.shift(),\n i = MutationBatchResult.from(r, t, n);\n await __PRIVATE_executeWithRecovery(e, () => e.remoteSyncer.applySuccessfulWrite(i)),\n // It's possible that with the completion of this mutation another\n // slot has freed up.\n await __PRIVATE_fillWritePipeline(e);\n}\nasync function __PRIVATE_onWriteStreamClose(e, t) {\n // If the write stream closed after the write handshake completes, a write\n // operation failed and we fail the pending operation.\n t && __PRIVATE_ensureWriteStream(e).E_ && (\n // This error affects the actual write.\n await async function __PRIVATE_handleWriteError(e, t) {\n // Only handle permanent errors here. If it's transient, just let the retry\n // logic kick in.\n if (function __PRIVATE_isPermanentWriteError(e) {\n return __PRIVATE_isPermanentError(e) && e !== C.ABORTED;\n }(t.code)) {\n // This was a permanent error, the request itself was the problem\n // so it's not going to succeed if we resend it.\n const n = e.v_.shift();\n // In this case it's also unlikely that the server itself is melting\n // down -- this was just a bad request so inhibit backoff on the next\n // restart.\n __PRIVATE_ensureWriteStream(e).t_(), await __PRIVATE_executeWithRecovery(e, () => e.remoteSyncer.rejectFailedWrite(n.batchId, t)),\n // It's possible that with the completion of this mutation\n // another slot has freed up.\n await __PRIVATE_fillWritePipeline(e);\n }\n }(e, t)),\n // The write stream might have been started by refilling the write\n // pipeline for failed writes\n __PRIVATE_shouldStartWriteStream(e) && __PRIVATE_startWriteStream(e);\n}\nasync function __PRIVATE_remoteStoreHandleCredentialChange(e, t) {\n const n = __PRIVATE_debugCast(e);\n n.asyncQueue.verifyOperationInProgress(), __PRIVATE_logDebug(\"RemoteStore\", \"RemoteStore received new credentials\");\n const r = __PRIVATE_canUseNetwork(n);\n // Tear down and re-create our network streams. This will ensure we get a\n // fresh auth token for the new user and re-fill the write pipeline with\n // new mutations from the LocalStore (since mutations are per-user).\n n.M_.add(3 /* OfflineCause.CredentialChange */), await __PRIVATE_disableNetworkInternal(n), r &&\n // Don't set the network status to Unknown if we are offline.\n n.N_.set(\"Unknown\" /* OnlineState.Unknown */), await n.remoteSyncer.handleCredentialChange(t), n.M_.delete(3 /* OfflineCause.CredentialChange */), await __PRIVATE_enableNetworkInternal(n);\n}\n\n/**\n * Toggles the network state when the client gains or loses its primary lease.\n */\nasync function __PRIVATE_remoteStoreApplyPrimaryState(e, t) {\n const n = __PRIVATE_debugCast(e);\n t ? (n.M_.delete(2 /* OfflineCause.IsSecondary */), await __PRIVATE_enableNetworkInternal(n)) : t || (n.M_.add(2 /* OfflineCause.IsSecondary */), await __PRIVATE_disableNetworkInternal(n), n.N_.set(\"Unknown\" /* OnlineState.Unknown */));\n}\n\n/**\n * If not yet initialized, registers the WatchStream and its network state\n * callback with `remoteStoreImpl`. Returns the existing stream if one is\n * already available.\n *\n * PORTING NOTE: On iOS and Android, the WatchStream gets registered on startup.\n * This is not done on Web to allow it to be tree-shaken.\n */\nfunction __PRIVATE_ensureWatchStream(e) {\n return e.B_ || (\n // Create stream (but note that it is not started yet).\n e.B_ = function __PRIVATE_newPersistentWatchStream(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n return r.f_(), new __PRIVATE_PersistentListenStream(t, r.connection, r.authCredentials, r.appCheckCredentials, r.serializer, n);\n }\n /**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */(e.datastore, e.asyncQueue, {\n Po: __PRIVATE_onWatchStreamConnected.bind(null, e),\n To: __PRIVATE_onWatchStreamOpen.bind(null, e),\n Ao: __PRIVATE_onWatchStreamClose.bind(null, e),\n h_: __PRIVATE_onWatchStreamChange.bind(null, e)\n }), e.x_.push(async t => {\n t ? (e.B_.t_(), __PRIVATE_shouldStartWatchStream(e) ? __PRIVATE_startWatchStream(e) : e.N_.set(\"Unknown\" /* OnlineState.Unknown */)) : (await e.B_.stop(), __PRIVATE_cleanUpWatchStreamState(e));\n })), e.B_;\n}\n\n/**\n * If not yet initialized, registers the WriteStream and its network state\n * callback with `remoteStoreImpl`. Returns the existing stream if one is\n * already available.\n *\n * PORTING NOTE: On iOS and Android, the WriteStream gets registered on startup.\n * This is not done on Web to allow it to be tree-shaken.\n */\nfunction __PRIVATE_ensureWriteStream(e) {\n return e.k_ || (\n // Create stream (but note that it is not started yet).\n e.k_ = function __PRIVATE_newPersistentWriteStream(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n return r.f_(), new __PRIVATE_PersistentWriteStream(t, r.connection, r.authCredentials, r.appCheckCredentials, r.serializer, n);\n }(e.datastore, e.asyncQueue, {\n Po: () => Promise.resolve(),\n To: __PRIVATE_onWriteStreamOpen.bind(null, e),\n Ao: __PRIVATE_onWriteStreamClose.bind(null, e),\n R_: __PRIVATE_onWriteHandshakeComplete.bind(null, e),\n A_: __PRIVATE_onMutationResult.bind(null, e)\n }), e.x_.push(async t => {\n t ? (e.k_.t_(),\n // This will start the write stream if necessary.\n await __PRIVATE_fillWritePipeline(e)) : (await e.k_.stop(), e.v_.length > 0 && (__PRIVATE_logDebug(\"RemoteStore\", `Stopping write stream with ${e.v_.length} pending writes`), e.v_ = []));\n })), e.k_;\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Represents an operation scheduled to be run in the future on an AsyncQueue.\n *\n * It is created via DelayedOperation.createAndSchedule().\n *\n * Supports cancellation (via cancel()) and early execution (via skipDelay()).\n *\n * Note: We implement `PromiseLike` instead of `Promise`, as the `Promise` type\n * in newer versions of TypeScript defines `finally`, which is not available in\n * IE.\n */\nclass DelayedOperation {\n constructor(e, t, n, r, i) {\n this.asyncQueue = e, this.timerId = t, this.targetTimeMs = n, this.op = r, this.removalCallback = i, this.deferred = new __PRIVATE_Deferred(), this.then = this.deferred.promise.then.bind(this.deferred.promise),\n // It's normal for the deferred promise to be canceled (due to cancellation)\n // and so we attach a dummy catch callback to avoid\n // 'UnhandledPromiseRejectionWarning' log spam.\n this.deferred.promise.catch(e => {});\n }\n get promise() {\n return this.deferred.promise;\n }\n /**\n * Creates and returns a DelayedOperation that has been scheduled to be\n * executed on the provided asyncQueue after the provided delayMs.\n *\n * @param asyncQueue - The queue to schedule the operation on.\n * @param id - A Timer ID identifying the type of operation this is.\n * @param delayMs - The delay (ms) before the operation should be scheduled.\n * @param op - The operation to run.\n * @param removalCallback - A callback to be called synchronously once the\n * operation is executed or canceled, notifying the AsyncQueue to remove it\n * from its delayedOperations list.\n * PORTING NOTE: This exists to prevent making removeDelayedOperation() and\n * the DelayedOperation class public.\n */\n static createAndSchedule(e, t, n, r, i) {\n const s = Date.now() + n,\n o = new DelayedOperation(e, t, s, r, i);\n return o.start(n), o;\n }\n /**\n * Starts the timer. This is called immediately after construction by\n * createAndSchedule().\n */\n start(e) {\n this.timerHandle = setTimeout(() => this.handleDelayElapsed(), e);\n }\n /**\n * Queues the operation to run immediately (if it hasn't already been run or\n * canceled).\n */\n skipDelay() {\n return this.handleDelayElapsed();\n }\n /**\n * Cancels the operation if it hasn't already been executed or canceled. The\n * promise will be rejected.\n *\n * As long as the operation has not yet been run, calling cancel() provides a\n * guarantee that the operation will not be run.\n */\n cancel(e) {\n null !== this.timerHandle && (this.clearTimeout(), this.deferred.reject(new FirestoreError(C.CANCELLED, \"Operation cancelled\" + (e ? \": \" + e : \"\"))));\n }\n handleDelayElapsed() {\n this.asyncQueue.enqueueAndForget(() => null !== this.timerHandle ? (this.clearTimeout(), this.op().then(e => this.deferred.resolve(e))) : Promise.resolve());\n }\n clearTimeout() {\n null !== this.timerHandle && (this.removalCallback(this), clearTimeout(this.timerHandle), this.timerHandle = null);\n }\n}\n\n/**\n * Returns a FirestoreError that can be surfaced to the user if the provided\n * error is an IndexedDbTransactionError. Re-throws the error otherwise.\n */\nfunction __PRIVATE_wrapInUserErrorIfRecoverable(e, t) {\n if (__PRIVATE_logError(\"AsyncQueue\", `${t}: ${e}`), __PRIVATE_isIndexedDbTransactionError(e)) return new FirestoreError(C.UNAVAILABLE, `${t}: ${e}`);\n throw e;\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * DocumentSet is an immutable (copy-on-write) collection that holds documents\n * in order specified by the provided comparator. We always add a document key\n * comparator on top of what is provided to guarantee document equality based on\n * the key.\n */\nclass DocumentSet {\n /** The default ordering is by key if the comparator is omitted */\n constructor(e) {\n // We are adding document key comparator to the end as it's the only\n // guaranteed unique property of a document.\n this.comparator = e ? (t, n) => e(t, n) || DocumentKey.comparator(t.key, n.key) : (e, t) => DocumentKey.comparator(e.key, t.key), this.keyedMap = documentMap(), this.sortedSet = new SortedMap(this.comparator);\n }\n /**\n * Returns an empty copy of the existing DocumentSet, using the same\n * comparator.\n */\n static emptySet(e) {\n return new DocumentSet(e.comparator);\n }\n has(e) {\n return null != this.keyedMap.get(e);\n }\n get(e) {\n return this.keyedMap.get(e);\n }\n first() {\n return this.sortedSet.minKey();\n }\n last() {\n return this.sortedSet.maxKey();\n }\n isEmpty() {\n return this.sortedSet.isEmpty();\n }\n /**\n * Returns the index of the provided key in the document set, or -1 if the\n * document key is not present in the set;\n */\n indexOf(e) {\n const t = this.keyedMap.get(e);\n return t ? this.sortedSet.indexOf(t) : -1;\n }\n get size() {\n return this.sortedSet.size;\n }\n /** Iterates documents in order defined by \"comparator\" */\n forEach(e) {\n this.sortedSet.inorderTraversal((t, n) => (e(t), !1));\n }\n /** Inserts or updates a document with the same key */\n add(e) {\n // First remove the element if we have it.\n const t = this.delete(e.key);\n return t.copy(t.keyedMap.insert(e.key, e), t.sortedSet.insert(e, null));\n }\n /** Deletes a document with a given key */\n delete(e) {\n const t = this.get(e);\n return t ? this.copy(this.keyedMap.remove(e), this.sortedSet.remove(t)) : this;\n }\n isEqual(e) {\n if (!(e instanceof DocumentSet)) return !1;\n if (this.size !== e.size) return !1;\n const t = this.sortedSet.getIterator(),\n n = e.sortedSet.getIterator();\n for (; t.hasNext();) {\n const e = t.getNext().key,\n r = n.getNext().key;\n if (!e.isEqual(r)) return !1;\n }\n return !0;\n }\n toString() {\n const e = [];\n return this.forEach(t => {\n e.push(t.toString());\n }), 0 === e.length ? \"DocumentSet ()\" : \"DocumentSet (\\n \" + e.join(\" \\n\") + \"\\n)\";\n }\n copy(e, t) {\n const n = new DocumentSet();\n return n.comparator = this.comparator, n.keyedMap = e, n.sortedSet = t, n;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * DocumentChangeSet keeps track of a set of changes to docs in a query, merging\n * duplicate events for the same doc.\n */\nclass __PRIVATE_DocumentChangeSet {\n constructor() {\n this.q_ = new SortedMap(DocumentKey.comparator);\n }\n track(e) {\n const t = e.doc.key,\n n = this.q_.get(t);\n n ?\n // Merge the new change with the existing change.\n 0 /* ChangeType.Added */ !== e.type && 3 /* ChangeType.Metadata */ === n.type ? this.q_ = this.q_.insert(t, e) : 3 /* ChangeType.Metadata */ === e.type && 1 /* ChangeType.Removed */ !== n.type ? this.q_ = this.q_.insert(t, {\n type: n.type,\n doc: e.doc\n }) : 2 /* ChangeType.Modified */ === e.type && 2 /* ChangeType.Modified */ === n.type ? this.q_ = this.q_.insert(t, {\n type: 2 /* ChangeType.Modified */,\n doc: e.doc\n }) : 2 /* ChangeType.Modified */ === e.type && 0 /* ChangeType.Added */ === n.type ? this.q_ = this.q_.insert(t, {\n type: 0 /* ChangeType.Added */,\n doc: e.doc\n }) : 1 /* ChangeType.Removed */ === e.type && 0 /* ChangeType.Added */ === n.type ? this.q_ = this.q_.remove(t) : 1 /* ChangeType.Removed */ === e.type && 2 /* ChangeType.Modified */ === n.type ? this.q_ = this.q_.insert(t, {\n type: 1 /* ChangeType.Removed */,\n doc: n.doc\n }) : 0 /* ChangeType.Added */ === e.type && 1 /* ChangeType.Removed */ === n.type ? this.q_ = this.q_.insert(t, {\n type: 2 /* ChangeType.Modified */,\n doc: e.doc\n }) :\n // This includes these cases, which don't make sense:\n // Added->Added\n // Removed->Removed\n // Modified->Added\n // Removed->Modified\n // Metadata->Added\n // Removed->Metadata\n fail() : this.q_ = this.q_.insert(t, e);\n }\n Q_() {\n const e = [];\n return this.q_.inorderTraversal((t, n) => {\n e.push(n);\n }), e;\n }\n}\nclass ViewSnapshot {\n constructor(e, t, n, r, i, s, o, _, a) {\n this.query = e, this.docs = t, this.oldDocs = n, this.docChanges = r, this.mutatedKeys = i, this.fromCache = s, this.syncStateChanged = o, this.excludesMetadataChanges = _, this.hasCachedResults = a;\n }\n /** Returns a view snapshot as if all documents in the snapshot were added. */\n static fromInitialDocuments(e, t, n, r, i) {\n const s = [];\n return t.forEach(e => {\n s.push({\n type: 0 /* ChangeType.Added */,\n doc: e\n });\n }), new ViewSnapshot(e, t, DocumentSet.emptySet(t), s, n, r, /* syncStateChanged= */!0, /* excludesMetadataChanges= */!1, i);\n }\n get hasPendingWrites() {\n return !this.mutatedKeys.isEmpty();\n }\n isEqual(e) {\n if (!(this.fromCache === e.fromCache && this.hasCachedResults === e.hasCachedResults && this.syncStateChanged === e.syncStateChanged && this.mutatedKeys.isEqual(e.mutatedKeys) && __PRIVATE_queryEquals(this.query, e.query) && this.docs.isEqual(e.docs) && this.oldDocs.isEqual(e.oldDocs))) return !1;\n const t = this.docChanges,\n n = e.docChanges;\n if (t.length !== n.length) return !1;\n for (let e = 0; e < t.length; e++) if (t[e].type !== n[e].type || !t[e].doc.isEqual(n[e].doc)) return !1;\n return !0;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Holds the listeners and the last received ViewSnapshot for a query being\n * tracked by EventManager.\n */\nclass __PRIVATE_QueryListenersInfo {\n constructor() {\n this.K_ = void 0, this.U_ = [];\n }\n // Helper methods that checks if the query has listeners that listening to remote store\n W_() {\n return this.U_.some(e => e.G_());\n }\n}\nclass __PRIVATE_EventManagerImpl {\n constructor() {\n this.queries = new ObjectMap(e => __PRIVATE_canonifyQuery(e), __PRIVATE_queryEquals), this.onlineState = \"Unknown\" /* OnlineState.Unknown */, this.z_ = new Set();\n }\n}\nasync function __PRIVATE_eventManagerListen(e, t) {\n const n = __PRIVATE_debugCast(e);\n let r = 3 /* ListenerSetupAction.NoActionRequired */;\n const i = t.query;\n let s = n.queries.get(i);\n s ? !s.W_() && t.G_() && (\n // Query has been listening to local cache, and tries to add a new listener sourced from watch.\n r = 2 /* ListenerSetupAction.RequireWatchConnectionOnly */) : (s = new __PRIVATE_QueryListenersInfo(), r = t.G_() ? 0 /* ListenerSetupAction.InitializeLocalListenAndRequireWatchConnection */ : 1 /* ListenerSetupAction.InitializeLocalListenOnly */);\n try {\n switch (r) {\n case 0 /* ListenerSetupAction.InitializeLocalListenAndRequireWatchConnection */:\n s.K_ = await n.onListen(i, /** enableRemoteListen= */!0);\n break;\n case 1 /* ListenerSetupAction.InitializeLocalListenOnly */:\n s.K_ = await n.onListen(i, /** enableRemoteListen= */!1);\n break;\n case 2 /* ListenerSetupAction.RequireWatchConnectionOnly */:\n await n.onFirstRemoteStoreListen(i);\n }\n } catch (e) {\n const n = __PRIVATE_wrapInUserErrorIfRecoverable(e, `Initialization of query '${__PRIVATE_stringifyQuery(t.query)}' failed`);\n return void t.onError(n);\n }\n if (n.queries.set(i, s), s.U_.push(t),\n // Run global snapshot listeners if a consistent snapshot has been emitted.\n t.j_(n.onlineState), s.K_) {\n t.H_(s.K_) && __PRIVATE_raiseSnapshotsInSyncEvent(n);\n }\n}\nasync function __PRIVATE_eventManagerUnlisten(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = t.query;\n let i = 3 /* ListenerRemovalAction.NoActionRequired */;\n const s = n.queries.get(r);\n if (s) {\n const e = s.U_.indexOf(t);\n e >= 0 && (s.U_.splice(e, 1), 0 === s.U_.length ? i = t.G_() ? 0 /* ListenerRemovalAction.TerminateLocalListenAndRequireWatchDisconnection */ : 1 /* ListenerRemovalAction.TerminateLocalListenOnly */ : !s.W_() && t.G_() && (\n // The removed listener is the last one that sourced from watch.\n i = 2 /* ListenerRemovalAction.RequireWatchDisconnectionOnly */));\n }\n switch (i) {\n case 0 /* ListenerRemovalAction.TerminateLocalListenAndRequireWatchDisconnection */:\n return n.queries.delete(r), n.onUnlisten(r, /** disableRemoteListen= */!0);\n case 1 /* ListenerRemovalAction.TerminateLocalListenOnly */:\n return n.queries.delete(r), n.onUnlisten(r, /** disableRemoteListen= */!1);\n case 2 /* ListenerRemovalAction.RequireWatchDisconnectionOnly */:\n return n.onLastRemoteStoreUnlisten(r);\n default:\n return;\n }\n}\nfunction __PRIVATE_eventManagerOnWatchChange(e, t) {\n const n = __PRIVATE_debugCast(e);\n let r = !1;\n for (const e of t) {\n const t = e.query,\n i = n.queries.get(t);\n if (i) {\n for (const t of i.U_) t.H_(e) && (r = !0);\n i.K_ = e;\n }\n }\n r && __PRIVATE_raiseSnapshotsInSyncEvent(n);\n}\nfunction __PRIVATE_eventManagerOnWatchError(e, t, n) {\n const r = __PRIVATE_debugCast(e),\n i = r.queries.get(t);\n if (i) for (const e of i.U_) e.onError(n);\n // Remove all listeners. NOTE: We don't need to call syncEngine.unlisten()\n // after an error.\n r.queries.delete(t);\n}\n\n// Call all global snapshot listeners that have been set.\nfunction __PRIVATE_raiseSnapshotsInSyncEvent(e) {\n e.z_.forEach(e => {\n e.next();\n });\n}\nvar ge, pe;\n\n/** Listen to both cache and server changes */\n(pe = ge || (ge = {})).J_ = \"default\", /** Listen to changes in cache only */\npe.Cache = \"cache\";\n\n/**\n * QueryListener takes a series of internal view snapshots and determines\n * when to raise the event.\n *\n * It uses an Observer to dispatch events.\n */\nclass __PRIVATE_QueryListener {\n constructor(e, t, n) {\n this.query = e, this.Y_ = t,\n /**\n * Initial snapshots (e.g. from cache) may not be propagated to the wrapped\n * observer. This flag is set to true once we've actually raised an event.\n */\n this.Z_ = !1, this.X_ = null, this.onlineState = \"Unknown\" /* OnlineState.Unknown */, this.options = n || {};\n }\n /**\n * Applies the new ViewSnapshot to this listener, raising a user-facing event\n * if applicable (depending on what changed, whether the user has opted into\n * metadata-only changes, etc.). Returns true if a user-facing event was\n * indeed raised.\n */\n H_(e) {\n if (!this.options.includeMetadataChanges) {\n // Remove the metadata only changes.\n const t = [];\n for (const n of e.docChanges) 3 /* ChangeType.Metadata */ !== n.type && t.push(n);\n e = new ViewSnapshot(e.query, e.docs, e.oldDocs, t, e.mutatedKeys, e.fromCache, e.syncStateChanged, /* excludesMetadataChanges= */!0, e.hasCachedResults);\n }\n let t = !1;\n return this.Z_ ? this.ea(e) && (this.Y_.next(e), t = !0) : this.ta(e, this.onlineState) && (this.na(e), t = !0), this.X_ = e, t;\n }\n onError(e) {\n this.Y_.error(e);\n }\n /** Returns whether a snapshot was raised. */\n j_(e) {\n this.onlineState = e;\n let t = !1;\n return this.X_ && !this.Z_ && this.ta(this.X_, e) && (this.na(this.X_), t = !0), t;\n }\n ta(e, t) {\n // Always raise the first event when we're synced\n if (!e.fromCache) return !0;\n // Always raise event if listening to cache\n if (!this.G_()) return !0;\n // NOTE: We consider OnlineState.Unknown as online (it should become Offline\n // or Online if we wait long enough).\n const n = \"Offline\" /* OnlineState.Offline */ !== t;\n // Don't raise the event if we're online, aren't synced yet (checked\n // above) and are waiting for a sync.\n return (!this.options.ra || !n) && (!e.docs.isEmpty() || e.hasCachedResults || \"Offline\" /* OnlineState.Offline */ === t);\n // Raise data from cache if we have any documents, have cached results before,\n // or we are offline.\n }\n ea(e) {\n // We don't need to handle includeDocumentMetadataChanges here because\n // the Metadata only changes have already been stripped out if needed.\n // At this point the only changes we will see are the ones we should\n // propagate.\n if (e.docChanges.length > 0) return !0;\n const t = this.X_ && this.X_.hasPendingWrites !== e.hasPendingWrites;\n return !(!e.syncStateChanged && !t) && !0 === this.options.includeMetadataChanges;\n // Generally we should have hit one of the cases above, but it's possible\n // to get here if there were only metadata docChanges and they got\n // stripped out.\n }\n na(e) {\n e = ViewSnapshot.fromInitialDocuments(e.query, e.docs, e.mutatedKeys, e.fromCache, e.hasCachedResults), this.Z_ = !0, this.Y_.next(e);\n }\n G_() {\n return this.options.source !== ge.Cache;\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A complete element in the bundle stream, together with the byte length it\n * occupies in the stream.\n */\nclass __PRIVATE_SizedBundleElement {\n constructor(e,\n // How many bytes this element takes to store in the bundle.\n t) {\n this.ia = e, this.byteLength = t;\n }\n sa() {\n return \"metadata\" in this.ia;\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Helper to convert objects from bundles to model objects in the SDK.\n */\nclass __PRIVATE_BundleConverterImpl {\n constructor(e) {\n this.serializer = e;\n }\n Ps(e) {\n return fromName(this.serializer, e);\n }\n /**\n * Converts a BundleDocument to a MutableDocument.\n */\n Is(e) {\n return e.metadata.exists ? __PRIVATE_fromDocument(this.serializer, e.document, !1) : MutableDocument.newNoDocument(this.Ps(e.metadata.name), this.Ts(e.metadata.readTime));\n }\n Ts(e) {\n return __PRIVATE_fromVersion(e);\n }\n}\n\n/**\n * A class to process the elements from a bundle, load them into local\n * storage and provide progress update while loading.\n */\nclass __PRIVATE_BundleLoader {\n constructor(e, t, n) {\n this.oa = e, this.localStore = t, this.serializer = n, /** Batched queries to be saved into storage */\n this.queries = [], /** Batched documents to be saved into storage */\n this.documents = [], /** The collection groups affected by this bundle. */\n this.collectionGroups = new Set(), this.progress = __PRIVATE_bundleInitialProgress(e);\n }\n /**\n * Adds an element from the bundle to the loader.\n *\n * Returns a new progress if adding the element leads to a new progress,\n * otherwise returns null.\n */\n _a(e) {\n this.progress.bytesLoaded += e.byteLength;\n let t = this.progress.documentsLoaded;\n if (e.ia.namedQuery) this.queries.push(e.ia.namedQuery);else if (e.ia.documentMetadata) {\n this.documents.push({\n metadata: e.ia.documentMetadata\n }), e.ia.documentMetadata.exists || ++t;\n const n = ResourcePath.fromString(e.ia.documentMetadata.name);\n this.collectionGroups.add(n.get(n.length - 2));\n } else e.ia.document && (this.documents[this.documents.length - 1].document = e.ia.document, ++t);\n return t !== this.progress.documentsLoaded ? (this.progress.documentsLoaded = t, Object.assign({}, this.progress)) : null;\n }\n aa(e) {\n const t = new Map(),\n n = new __PRIVATE_BundleConverterImpl(this.serializer);\n for (const r of e) if (r.metadata.queries) {\n const e = n.Ps(r.metadata.name);\n for (const n of r.metadata.queries) {\n const r = (t.get(n) || __PRIVATE_documentKeySet()).add(e);\n t.set(n, r);\n }\n }\n return t;\n }\n /**\n * Update the progress to 'Success' and return the updated progress.\n */\n async complete() {\n const e = await __PRIVATE_localStoreApplyBundledDocuments(this.localStore, new __PRIVATE_BundleConverterImpl(this.serializer), this.documents, this.oa.id),\n t = this.aa(this.documents);\n for (const e of this.queries) await __PRIVATE_localStoreSaveNamedQuery(this.localStore, e, t.get(e.name));\n return this.progress.taskState = \"Success\", {\n progress: this.progress,\n ua: this.collectionGroups,\n ca: e\n };\n }\n}\n\n/**\n * Returns a `LoadBundleTaskProgress` representing the initial progress of\n * loading a bundle.\n */\nfunction __PRIVATE_bundleInitialProgress(e) {\n return {\n taskState: \"Running\",\n documentsLoaded: 0,\n bytesLoaded: 0,\n totalDocuments: e.totalDocuments,\n totalBytes: e.totalBytes\n };\n}\n\n/**\n * Returns a `LoadBundleTaskProgress` representing the progress that the loading\n * has succeeded.\n */\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_AddedLimboDocument {\n constructor(e) {\n this.key = e;\n }\n}\nclass __PRIVATE_RemovedLimboDocument {\n constructor(e) {\n this.key = e;\n }\n}\n\n/**\n * View is responsible for computing the final merged truth of what docs are in\n * a query. It gets notified of local and remote changes to docs, and applies\n * the query filters and limits to determine the most correct possible results.\n */\nclass __PRIVATE_View {\n constructor(e, /** Documents included in the remote target */\n t) {\n this.query = e, this.la = t, this.ha = null, this.hasCachedResults = !1,\n /**\n * A flag whether the view is current with the backend. A view is considered\n * current after it has seen the current flag from the backend and did not\n * lose consistency within the watch stream (e.g. because of an existence\n * filter mismatch).\n */\n this.current = !1, /** Documents in the view but not in the remote target */\n this.Pa = __PRIVATE_documentKeySet(), /** Document Keys that have local changes */\n this.mutatedKeys = __PRIVATE_documentKeySet(), this.Ia = __PRIVATE_newQueryComparator(e), this.Ta = new DocumentSet(this.Ia);\n }\n /**\n * The set of remote documents that the server has told us belongs to the target associated with\n * this view.\n */\n get Ea() {\n return this.la;\n }\n /**\n * Iterates over a set of doc changes, applies the query limit, and computes\n * what the new results should be, what the changes were, and whether we may\n * need to go back to the local cache for more results. Does not make any\n * changes to the view.\n * @param docChanges - The doc changes to apply to this view.\n * @param previousChanges - If this is being called with a refill, then start\n * with this set of docs and changes instead of the current view.\n * @returns a new set of docs, changes, and refill flag.\n */\n da(e, t) {\n const n = t ? t.Aa : new __PRIVATE_DocumentChangeSet(),\n r = t ? t.Ta : this.Ta;\n let i = t ? t.mutatedKeys : this.mutatedKeys,\n s = r,\n o = !1;\n // Track the last doc in a (full) limit. This is necessary, because some\n // update (a delete, or an update moving a doc past the old limit) might\n // mean there is some other document in the local cache that either should\n // come (1) between the old last limit doc and the new last document, in the\n // case of updates, or (2) after the new last document, in the case of\n // deletes. So we keep this doc at the old limit to compare the updates to.\n // Note that this should never get used in a refill (when previousChanges is\n // set), because there will only be adds -- no deletes or updates.\n const _ = \"F\" /* LimitType.First */ === this.query.limitType && r.size === this.query.limit ? r.last() : null,\n a = \"L\" /* LimitType.Last */ === this.query.limitType && r.size === this.query.limit ? r.first() : null;\n // Drop documents out to meet limit/limitToLast requirement.\n if (e.inorderTraversal((e, t) => {\n const u = r.get(e),\n c = __PRIVATE_queryMatches(this.query, t) ? t : null,\n l = !!u && this.mutatedKeys.has(u.key),\n h = !!c && (c.hasLocalMutations ||\n // We only consider committed mutations for documents that were\n // mutated during the lifetime of the view.\n this.mutatedKeys.has(c.key) && c.hasCommittedMutations);\n let P = !1;\n // Calculate change\n if (u && c) {\n u.data.isEqual(c.data) ? l !== h && (n.track({\n type: 3 /* ChangeType.Metadata */,\n doc: c\n }), P = !0) : this.Ra(u, c) || (n.track({\n type: 2 /* ChangeType.Modified */,\n doc: c\n }), P = !0, (_ && this.Ia(c, _) > 0 || a && this.Ia(c, a) < 0) && (\n // This doc moved from inside the limit to outside the limit.\n // That means there may be some other doc in the local cache\n // that should be included instead.\n o = !0));\n } else !u && c ? (n.track({\n type: 0 /* ChangeType.Added */,\n doc: c\n }), P = !0) : u && !c && (n.track({\n type: 1 /* ChangeType.Removed */,\n doc: u\n }), P = !0, (_ || a) && (\n // A doc was removed from a full limit query. We'll need to\n // requery from the local cache to see if we know about some other\n // doc that should be in the results.\n o = !0));\n P && (c ? (s = s.add(c), i = h ? i.add(e) : i.delete(e)) : (s = s.delete(e), i = i.delete(e)));\n }), null !== this.query.limit) for (; s.size > this.query.limit;) {\n const e = \"F\" /* LimitType.First */ === this.query.limitType ? s.last() : s.first();\n s = s.delete(e.key), i = i.delete(e.key), n.track({\n type: 1 /* ChangeType.Removed */,\n doc: e\n });\n }\n return {\n Ta: s,\n Aa: n,\n Xi: o,\n mutatedKeys: i\n };\n }\n Ra(e, t) {\n // We suppress the initial change event for documents that were modified as\n // part of a write acknowledgment (e.g. when the value of a server transform\n // is applied) as Watch will send us the same document again.\n // By suppressing the event, we only raise two user visible events (one with\n // `hasPendingWrites` and the final state of the document) instead of three\n // (one with `hasPendingWrites`, the modified document with\n // `hasPendingWrites` and the final state of the document).\n return e.hasLocalMutations && t.hasCommittedMutations && !t.hasLocalMutations;\n }\n /**\n * Updates the view with the given ViewDocumentChanges and optionally updates\n * limbo docs and sync state from the provided target change.\n * @param docChanges - The set of changes to make to the view's docs.\n * @param limboResolutionEnabled - Whether to update limbo documents based on\n * this change.\n * @param targetChange - A target change to apply for computing limbo docs and\n * sync state.\n * @param targetIsPendingReset - Whether the target is pending to reset due to\n * existence filter mismatch. If not explicitly specified, it is treated\n * equivalently to `false`.\n * @returns A new ViewChange with the given docs, changes, and sync state.\n */\n // PORTING NOTE: The iOS/Android clients always compute limbo document changes.\n applyChanges(e, t, n, r) {\n const i = this.Ta;\n this.Ta = e.Ta, this.mutatedKeys = e.mutatedKeys;\n // Sort changes based on type and query comparator\n const s = e.Aa.Q_();\n s.sort((e, t) => function __PRIVATE_compareChangeType(e, t) {\n const order = e => {\n switch (e) {\n case 0 /* ChangeType.Added */:\n return 1;\n case 2 /* ChangeType.Modified */:\n case 3 /* ChangeType.Metadata */:\n // A metadata change is converted to a modified change at the public\n // api layer. Since we sort by document key and then change type,\n // metadata and modified changes must be sorted equivalently.\n return 2;\n case 1 /* ChangeType.Removed */:\n return 0;\n default:\n return fail();\n }\n };\n return order(e) - order(t);\n }\n /**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */(e.type, t.type) || this.Ia(e.doc, t.doc)), this.Va(n), r = null != r && r;\n const o = t && !r ? this.ma() : [],\n _ = 0 === this.Pa.size && this.current && !r ? 1 /* SyncState.Synced */ : 0 /* SyncState.Local */,\n a = _ !== this.ha;\n // We are at synced state if there is no limbo docs are waiting to be resolved, view is current\n // with the backend, and the query is not pending to reset due to existence filter mismatch.\n if (this.ha = _, 0 !== s.length || a) {\n return {\n snapshot: new ViewSnapshot(this.query, e.Ta, i, s, e.mutatedKeys, 0 /* SyncState.Local */ === _, a, /* excludesMetadataChanges= */!1, !!n && n.resumeToken.approximateByteSize() > 0),\n fa: o\n };\n }\n // no changes\n return {\n fa: o\n };\n }\n /**\n * Applies an OnlineState change to the view, potentially generating a\n * ViewChange if the view's syncState changes as a result.\n */\n j_(e) {\n return this.current && \"Offline\" /* OnlineState.Offline */ === e ? (\n // If we're offline, set `current` to false and then call applyChanges()\n // to refresh our syncState and generate a ViewChange as appropriate. We\n // are guaranteed to get a new TargetChange that sets `current` back to\n // true once the client is back online.\n this.current = !1, this.applyChanges({\n Ta: this.Ta,\n Aa: new __PRIVATE_DocumentChangeSet(),\n mutatedKeys: this.mutatedKeys,\n Xi: !1\n }, /* limboResolutionEnabled= */!1)) : {\n fa: []\n };\n }\n /**\n * Returns whether the doc for the given key should be in limbo.\n */\n ga(e) {\n // If the remote end says it's part of this query, it's not in limbo.\n return !this.la.has(e) &&\n // The local store doesn't think it's a result, so it shouldn't be in limbo.\n !!this.Ta.has(e) && !this.Ta.get(e).hasLocalMutations;\n }\n /**\n * Updates syncedDocuments, current, and limbo docs based on the given change.\n * Returns the list of changes to which docs are in limbo.\n */\n Va(e) {\n e && (e.addedDocuments.forEach(e => this.la = this.la.add(e)), e.modifiedDocuments.forEach(e => {}), e.removedDocuments.forEach(e => this.la = this.la.delete(e)), this.current = e.current);\n }\n ma() {\n // We can only determine limbo documents when we're in-sync with the server.\n if (!this.current) return [];\n // TODO(klimt): Do this incrementally so that it's not quadratic when\n // updating many documents.\n const e = this.Pa;\n this.Pa = __PRIVATE_documentKeySet(), this.Ta.forEach(e => {\n this.ga(e.key) && (this.Pa = this.Pa.add(e.key));\n });\n // Diff the new limbo docs with the old limbo docs.\n const t = [];\n return e.forEach(e => {\n this.Pa.has(e) || t.push(new __PRIVATE_RemovedLimboDocument(e));\n }), this.Pa.forEach(n => {\n e.has(n) || t.push(new __PRIVATE_AddedLimboDocument(n));\n }), t;\n }\n /**\n * Update the in-memory state of the current view with the state read from\n * persistence.\n *\n * We update the query view whenever a client's primary status changes:\n * - When a client transitions from primary to secondary, it can miss\n * LocalStorage updates and its query views may temporarily not be\n * synchronized with the state on disk.\n * - For secondary to primary transitions, the client needs to update the list\n * of `syncedDocuments` since secondary clients update their query views\n * based purely on synthesized RemoteEvents.\n *\n * @param queryResult.documents - The documents that match the query according\n * to the LocalStore.\n * @param queryResult.remoteKeys - The keys of the documents that match the\n * query according to the backend.\n *\n * @returns The ViewChange that resulted from this synchronization.\n */\n // PORTING NOTE: Multi-tab only.\n pa(e) {\n this.la = e.hs, this.Pa = __PRIVATE_documentKeySet();\n const t = this.da(e.documents);\n return this.applyChanges(t, /* limboResolutionEnabled= */!0);\n }\n /**\n * Returns a view snapshot as if this query was just listened to. Contains\n * a document add for every existing document and the `fromCache` and\n * `hasPendingWrites` status of the already established view.\n */\n // PORTING NOTE: Multi-tab only.\n ya() {\n return ViewSnapshot.fromInitialDocuments(this.query, this.Ta, this.mutatedKeys, 0 /* SyncState.Local */ === this.ha, this.hasCachedResults);\n }\n}\n\n/**\n * QueryView contains all of the data that SyncEngine needs to keep track of for\n * a particular query.\n */\nclass __PRIVATE_QueryView {\n constructor(\n /**\n * The query itself.\n */\n e,\n /**\n * The target number created by the client that is used in the watch\n * stream to identify this query.\n */\n t,\n /**\n * The view is responsible for computing the final merged truth of what\n * docs are in the query. It gets notified of local and remote changes,\n * and applies the query filters and limits to determine the most correct\n * possible results.\n */\n n) {\n this.query = e, this.targetId = t, this.view = n;\n }\n}\n\n/** Tracks a limbo resolution. */\nclass LimboResolution {\n constructor(e) {\n this.key = e,\n /**\n * Set to true once we've received a document. This is used in\n * getRemoteKeysForTarget() and ultimately used by WatchChangeAggregator to\n * decide whether it needs to manufacture a delete event for the target once\n * the target is CURRENT.\n */\n this.wa = !1;\n }\n}\n\n/**\n * An implementation of `SyncEngine` coordinating with other parts of SDK.\n *\n * The parts of SyncEngine that act as a callback to RemoteStore need to be\n * registered individually. This is done in `syncEngineWrite()` and\n * `syncEngineListen()` (as well as `applyPrimaryState()`) as these methods\n * serve as entry points to RemoteStore's functionality.\n *\n * Note: some field defined in this class might have public access level, but\n * the class is not exported so they are only accessible from this module.\n * This is useful to implement optional features (like bundles) in free\n * functions, such that they are tree-shakeable.\n */\nclass __PRIVATE_SyncEngineImpl {\n constructor(e, t, n,\n // PORTING NOTE: Manages state synchronization in multi-tab environments.\n r, i, s) {\n this.localStore = e, this.remoteStore = t, this.eventManager = n, this.sharedClientState = r, this.currentUser = i, this.maxConcurrentLimboResolutions = s, this.Sa = {}, this.ba = new ObjectMap(e => __PRIVATE_canonifyQuery(e), __PRIVATE_queryEquals), this.Da = new Map(),\n /**\n * The keys of documents that are in limbo for which we haven't yet started a\n * limbo resolution query. The strings in this set are the result of calling\n * `key.path.canonicalString()` where `key` is a `DocumentKey` object.\n *\n * The `Set` type was chosen because it provides efficient lookup and removal\n * of arbitrary elements and it also maintains insertion order, providing the\n * desired queue-like FIFO semantics.\n */\n this.Ca = new Set(),\n /**\n * Keeps track of the target ID for each document that is in limbo with an\n * active target.\n */\n this.va = new SortedMap(DocumentKey.comparator),\n /**\n * Keeps track of the information about an active limbo resolution for each\n * active target ID that was started for the purpose of limbo resolution.\n */\n this.Fa = new Map(), this.Ma = new __PRIVATE_ReferenceSet(), /** Stores user completion handlers, indexed by User and BatchId. */\n this.xa = {}, /** Stores user callbacks waiting for all pending writes to be acknowledged. */\n this.Oa = new Map(), this.Na = __PRIVATE_TargetIdGenerator.Ln(), this.onlineState = \"Unknown\" /* OnlineState.Unknown */,\n // The primary state is set to `true` or `false` immediately after Firestore\n // startup. In the interim, a client should only be considered primary if\n // `isPrimary` is true.\n this.La = void 0;\n }\n get isPrimaryClient() {\n return !0 === this.La;\n }\n}\n\n/**\n * Initiates the new listen, resolves promise when listen enqueued to the\n * server. All the subsequent view snapshots or errors are sent to the\n * subscribed handlers. Returns the initial snapshot.\n */\nasync function __PRIVATE_syncEngineListen(e, t, n = !0) {\n const r = __PRIVATE_ensureWatchCallbacks(e);\n let i;\n const s = r.ba.get(t);\n return s ? (\n // PORTING NOTE: With Multi-Tab Web, it is possible that a query view\n // already exists when EventManager calls us for the first time. This\n // happens when the primary tab is already listening to this query on\n // behalf of another tab and the user of the primary also starts listening\n // to the query. EventManager will not have an assigned target ID in this\n // case and calls `listen` to obtain this ID.\n r.sharedClientState.addLocalQueryTarget(s.targetId), i = s.view.ya()) : i = await __PRIVATE_allocateTargetAndMaybeListen(r, t, n, /** shouldInitializeView= */!0), i;\n}\n\n/** Query has been listening to the cache, and tries to initiate the remote store listen */\nasync function __PRIVATE_triggerRemoteStoreListen(e, t) {\n const n = __PRIVATE_ensureWatchCallbacks(e);\n await __PRIVATE_allocateTargetAndMaybeListen(n, t, /** shouldListenToRemote= */!0, /** shouldInitializeView= */!1);\n}\nasync function __PRIVATE_allocateTargetAndMaybeListen(e, t, n, r) {\n const i = await __PRIVATE_localStoreAllocateTarget(e.localStore, __PRIVATE_queryToTarget(t)),\n s = i.targetId,\n o = n ? e.sharedClientState.addLocalQueryTarget(s) : \"not-current\";\n let _;\n return r && (_ = await __PRIVATE_initializeViewAndComputeSnapshot(e, t, s, \"current\" === o, i.resumeToken)), e.isPrimaryClient && n && __PRIVATE_remoteStoreListen(e.remoteStore, i), _;\n}\n\n/**\n * Registers a view for a previously unknown query and computes its initial\n * snapshot.\n */\nasync function __PRIVATE_initializeViewAndComputeSnapshot(e, t, n, r, i) {\n // PORTING NOTE: On Web only, we inject the code that registers new Limbo\n // targets based on view changes. This allows us to only depend on Limbo\n // changes when user code includes queries.\n e.Ba = (t, n, r) => async function __PRIVATE_applyDocChanges(e, t, n, r) {\n let i = t.view.da(n);\n i.Xi && (\n // The query has a limit and some docs were removed, so we need\n // to re-run the query against the local store to make sure we\n // didn't lose any good docs that had been past the limit.\n i = await __PRIVATE_localStoreExecuteQuery(e.localStore, t.query, /* usePreviousResults= */!1).then(({\n documents: e\n }) => t.view.da(e, i)));\n const s = r && r.targetChanges.get(t.targetId),\n o = r && null != r.targetMismatches.get(t.targetId),\n _ = t.view.applyChanges(i, /* limboResolutionEnabled= */e.isPrimaryClient, s, o);\n return __PRIVATE_updateTrackedLimbos(e, t.targetId, _.fa), _.snapshot;\n }(e, t, n, r);\n const s = await __PRIVATE_localStoreExecuteQuery(e.localStore, t, /* usePreviousResults= */!0),\n o = new __PRIVATE_View(t, s.hs),\n _ = o.da(s.documents),\n a = TargetChange.createSynthesizedTargetChangeForCurrentChange(n, r && \"Offline\" /* OnlineState.Offline */ !== e.onlineState, i),\n u = o.applyChanges(_, /* limboResolutionEnabled= */e.isPrimaryClient, a);\n __PRIVATE_updateTrackedLimbos(e, n, u.fa);\n const c = new __PRIVATE_QueryView(t, n, o);\n return e.ba.set(t, c), e.Da.has(n) ? e.Da.get(n).push(t) : e.Da.set(n, [t]), u.snapshot;\n}\n\n/** Stops listening to the query. */\nasync function __PRIVATE_syncEngineUnlisten(e, t, n) {\n const r = __PRIVATE_debugCast(e),\n i = r.ba.get(t),\n s = r.Da.get(i.targetId);\n if (s.length > 1) return r.Da.set(i.targetId, s.filter(e => !__PRIVATE_queryEquals(e, t))), void r.ba.delete(t);\n // No other queries are mapped to the target, clean up the query and the target.\n if (r.isPrimaryClient) {\n // We need to remove the local query target first to allow us to verify\n // whether any other client is still interested in this target.\n r.sharedClientState.removeLocalQueryTarget(i.targetId);\n r.sharedClientState.isActiveQueryTarget(i.targetId) || (await __PRIVATE_localStoreReleaseTarget(r.localStore, i.targetId, /*keepPersistedTargetData=*/!1).then(() => {\n r.sharedClientState.clearQueryState(i.targetId), n && __PRIVATE_remoteStoreUnlisten(r.remoteStore, i.targetId), __PRIVATE_removeAndCleanupTarget(r, i.targetId);\n }).catch(__PRIVATE_ignoreIfPrimaryLeaseLoss));\n } else __PRIVATE_removeAndCleanupTarget(r, i.targetId), await __PRIVATE_localStoreReleaseTarget(r.localStore, i.targetId, /*keepPersistedTargetData=*/!0);\n}\n\n/** Unlistens to the remote store while still listening to the cache. */\nasync function __PRIVATE_triggerRemoteStoreUnlisten(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = n.ba.get(t),\n i = n.Da.get(r.targetId);\n n.isPrimaryClient && 1 === i.length && (\n // PORTING NOTE: Unregister the target ID with local Firestore client as\n // watch target.\n n.sharedClientState.removeLocalQueryTarget(r.targetId), __PRIVATE_remoteStoreUnlisten(n.remoteStore, r.targetId));\n}\n\n/**\n * Initiates the write of local mutation batch which involves adding the\n * writes to the mutation queue, notifying the remote store about new\n * mutations and raising events for any changes this write caused.\n *\n * The promise returned by this call is resolved when the above steps\n * have completed, *not* when the write was acked by the backend. The\n * userCallback is resolved once the write was acked/rejected by the\n * backend (or failed locally for any other reason).\n */\nasync function __PRIVATE_syncEngineWrite(e, t, n) {\n const r = __PRIVATE_syncEngineEnsureWriteCallbacks(e);\n try {\n const e = await function __PRIVATE_localStoreWriteLocally(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = Timestamp.now(),\n i = t.reduce((e, t) => e.add(t.key), __PRIVATE_documentKeySet());\n let s, o;\n return n.persistence.runTransaction(\"Locally write mutations\", \"readwrite\", e => {\n // Figure out which keys do not have a remote version in the cache, this\n // is needed to create the right overlay mutation: if no remote version\n // presents, we do not need to create overlays as patch mutations.\n // TODO(Overlay): Is there a better way to determine this? Using the\n // document version does not work because local mutations set them back\n // to 0.\n let _ = __PRIVATE_mutableDocumentMap(),\n a = __PRIVATE_documentKeySet();\n return n.os.getEntries(e, i).next(e => {\n _ = e, _.forEach((e, t) => {\n t.isValidDocument() || (a = a.add(e));\n });\n }).next(() => n.localDocuments.getOverlayedDocuments(e, _)).next(i => {\n s = i;\n // For non-idempotent mutations (such as `FieldValue.increment()`),\n // we record the base state in a separate patch mutation. This is\n // later used to guarantee consistent values and prevents flicker\n // even if the backend sends us an update that already includes our\n // transform.\n const o = [];\n for (const e of t) {\n const t = __PRIVATE_mutationExtractBaseValue(e, s.get(e.key).overlayedDocument);\n null != t &&\n // NOTE: The base state should only be applied if there's some\n // existing document to override, so use a Precondition of\n // exists=true\n o.push(new __PRIVATE_PatchMutation(e.key, t, __PRIVATE_extractFieldMask(t.value.mapValue), Precondition.exists(!0)));\n }\n return n.mutationQueue.addMutationBatch(e, r, o, t);\n }).next(t => {\n o = t;\n const r = t.applyToLocalDocumentSet(s, a);\n return n.documentOverlayCache.saveOverlays(e, t.batchId, r);\n });\n }).then(() => ({\n batchId: o.batchId,\n changes: __PRIVATE_convertOverlayedDocumentMapToDocumentMap(s)\n }));\n }(r.localStore, t);\n r.sharedClientState.addPendingMutation(e.batchId), function __PRIVATE_addMutationCallback(e, t, n) {\n let r = e.xa[e.currentUser.toKey()];\n r || (r = new SortedMap(__PRIVATE_primitiveComparator));\n r = r.insert(t, n), e.xa[e.currentUser.toKey()] = r;\n }\n /**\n * Resolves or rejects the user callback for the given batch and then discards\n * it.\n */(r, e.batchId, n), await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(r, e.changes), await __PRIVATE_fillWritePipeline(r.remoteStore);\n } catch (e) {\n // If we can't persist the mutation, we reject the user callback and\n // don't send the mutation. The user can then retry the write.\n const t = __PRIVATE_wrapInUserErrorIfRecoverable(e, \"Failed to persist write\");\n n.reject(t);\n }\n}\n\n/**\n * Applies one remote event to the sync engine, notifying any views of the\n * changes, and releasing any pending mutation batches that would become\n * visible because of the snapshot version the remote event contains.\n */\nasync function __PRIVATE_syncEngineApplyRemoteEvent(e, t) {\n const n = __PRIVATE_debugCast(e);\n try {\n const e = await __PRIVATE_localStoreApplyRemoteEventToLocalCache(n.localStore, t);\n // Update `receivedDocument` as appropriate for any limbo targets.\n t.targetChanges.forEach((e, t) => {\n const r = n.Fa.get(t);\n r && (\n // Since this is a limbo resolution lookup, it's for a single document\n // and it could be added, modified, or removed, but not a combination.\n __PRIVATE_hardAssert(e.addedDocuments.size + e.modifiedDocuments.size + e.removedDocuments.size <= 1), e.addedDocuments.size > 0 ? r.wa = !0 : e.modifiedDocuments.size > 0 ? __PRIVATE_hardAssert(r.wa) : e.removedDocuments.size > 0 && (__PRIVATE_hardAssert(r.wa), r.wa = !1));\n }), await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(n, e, t);\n } catch (e) {\n await __PRIVATE_ignoreIfPrimaryLeaseLoss(e);\n }\n}\n\n/**\n * Applies an OnlineState change to the sync engine and notifies any views of\n * the change.\n */\nfunction __PRIVATE_syncEngineApplyOnlineStateChange(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n // If we are the secondary client, we explicitly ignore the remote store's\n // online state (the local client may go offline, even though the primary\n // tab remains online) and only apply the primary tab's online state from\n // SharedClientState.\n if (r.isPrimaryClient && 0 /* OnlineStateSource.RemoteStore */ === n || !r.isPrimaryClient && 1 /* OnlineStateSource.SharedClientState */ === n) {\n const e = [];\n r.ba.forEach((n, r) => {\n const i = r.view.j_(t);\n i.snapshot && e.push(i.snapshot);\n }), function __PRIVATE_eventManagerOnOnlineStateChange(e, t) {\n const n = __PRIVATE_debugCast(e);\n n.onlineState = t;\n let r = !1;\n n.queries.forEach((e, n) => {\n for (const e of n.U_)\n // Run global snapshot listeners if a consistent snapshot has been emitted.\n e.j_(t) && (r = !0);\n }), r && __PRIVATE_raiseSnapshotsInSyncEvent(n);\n }(r.eventManager, t), e.length && r.Sa.h_(e), r.onlineState = t, r.isPrimaryClient && r.sharedClientState.setOnlineState(t);\n }\n}\n\n/**\n * Rejects the listen for the given targetID. This can be triggered by the\n * backend for any active target.\n *\n * @param syncEngine - The sync engine implementation.\n * @param targetId - The targetID corresponds to one previously initiated by the\n * user as part of TargetData passed to listen() on RemoteStore.\n * @param err - A description of the condition that has forced the rejection.\n * Nearly always this will be an indication that the user is no longer\n * authorized to see the data matching the target.\n */\nasync function __PRIVATE_syncEngineRejectListen(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n // PORTING NOTE: Multi-tab only.\n r.sharedClientState.updateQueryState(t, \"rejected\", n);\n const i = r.Fa.get(t),\n s = i && i.key;\n if (s) {\n // TODO(klimt): We really only should do the following on permission\n // denied errors, but we don't have the cause code here.\n // It's a limbo doc. Create a synthetic event saying it was deleted.\n // This is kind of a hack. Ideally, we would have a method in the local\n // store to purge a document. However, it would be tricky to keep all of\n // the local store's invariants with another method.\n let e = new SortedMap(DocumentKey.comparator);\n // TODO(b/217189216): This limbo document should ideally have a read time,\n // so that it is picked up by any read-time based scans. The backend,\n // however, does not send a read time for target removals.\n e = e.insert(s, MutableDocument.newNoDocument(s, SnapshotVersion.min()));\n const n = __PRIVATE_documentKeySet().add(s),\n i = new RemoteEvent(SnapshotVersion.min(), /* targetChanges= */new Map(), /* targetMismatches= */new SortedMap(__PRIVATE_primitiveComparator), e, n);\n await __PRIVATE_syncEngineApplyRemoteEvent(r, i),\n // Since this query failed, we won't want to manually unlisten to it.\n // We only remove it from bookkeeping after we successfully applied the\n // RemoteEvent. If `applyRemoteEvent()` throws, we want to re-listen to\n // this query when the RemoteStore restarts the Watch stream, which should\n // re-trigger the target failure.\n r.va = r.va.remove(s), r.Fa.delete(t), __PRIVATE_pumpEnqueuedLimboResolutions(r);\n } else await __PRIVATE_localStoreReleaseTarget(r.localStore, t, /* keepPersistedTargetData */!1).then(() => __PRIVATE_removeAndCleanupTarget(r, t, n)).catch(__PRIVATE_ignoreIfPrimaryLeaseLoss);\n}\nasync function __PRIVATE_syncEngineApplySuccessfulWrite(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = t.batch.batchId;\n try {\n const e = await __PRIVATE_localStoreAcknowledgeBatch(n.localStore, t);\n // The local store may or may not be able to apply the write result and\n // raise events immediately (depending on whether the watcher is caught\n // up), so we raise user callbacks first so that they consistently happen\n // before listen events.\n __PRIVATE_processUserCallback(n, r, /*error=*/null), __PRIVATE_triggerPendingWritesCallbacks(n, r), n.sharedClientState.updateMutationState(r, \"acknowledged\"), await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(n, e);\n } catch (e) {\n await __PRIVATE_ignoreIfPrimaryLeaseLoss(e);\n }\n}\nasync function __PRIVATE_syncEngineRejectFailedWrite(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n try {\n const e = await function __PRIVATE_localStoreRejectBatch(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"Reject batch\", \"readwrite-primary\", e => {\n let r;\n return n.mutationQueue.lookupMutationBatch(e, t).next(t => (__PRIVATE_hardAssert(null !== t), r = t.keys(), n.mutationQueue.removeMutationBatch(e, t))).next(() => n.mutationQueue.performConsistencyCheck(e)).next(() => n.documentOverlayCache.removeOverlaysForBatchId(e, r, t)).next(() => n.localDocuments.recalculateAndSaveOverlaysForDocumentKeys(e, r)).next(() => n.localDocuments.getDocuments(e, r));\n });\n }\n /**\n * Returns the largest (latest) batch id in mutation queue that is pending\n * server response.\n *\n * Returns `BATCHID_UNKNOWN` if the queue is empty.\n */(r.localStore, t);\n // The local store may or may not be able to apply the write result and\n // raise events immediately (depending on whether the watcher is caught up),\n // so we raise user callbacks first so that they consistently happen before\n // listen events.\n __PRIVATE_processUserCallback(r, t, n), __PRIVATE_triggerPendingWritesCallbacks(r, t), r.sharedClientState.updateMutationState(t, \"rejected\", n), await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(r, e);\n } catch (n) {\n await __PRIVATE_ignoreIfPrimaryLeaseLoss(n);\n }\n}\n\n/**\n * Registers a user callback that resolves when all pending mutations at the moment of calling\n * are acknowledged .\n */\nasync function __PRIVATE_syncEngineRegisterPendingWritesCallback(e, t) {\n const n = __PRIVATE_debugCast(e);\n __PRIVATE_canUseNetwork(n.remoteStore) || __PRIVATE_logDebug(\"SyncEngine\", \"The network is disabled. The task returned by 'awaitPendingWrites()' will not complete until the network is enabled.\");\n try {\n const e = await function __PRIVATE_localStoreGetHighestUnacknowledgedBatchId(e) {\n const t = __PRIVATE_debugCast(e);\n return t.persistence.runTransaction(\"Get highest unacknowledged batch id\", \"readonly\", e => t.mutationQueue.getHighestUnacknowledgedBatchId(e));\n }(n.localStore);\n if (-1 === e)\n // Trigger the callback right away if there is no pending writes at the moment.\n return void t.resolve();\n const r = n.Oa.get(e) || [];\n r.push(t), n.Oa.set(e, r);\n } catch (e) {\n const n = __PRIVATE_wrapInUserErrorIfRecoverable(e, \"Initialization of waitForPendingWrites() operation failed\");\n t.reject(n);\n }\n}\n\n/**\n * Triggers the callbacks that are waiting for this batch id to get acknowledged by server,\n * if there are any.\n */\nfunction __PRIVATE_triggerPendingWritesCallbacks(e, t) {\n (e.Oa.get(t) || []).forEach(e => {\n e.resolve();\n }), e.Oa.delete(t);\n}\n\n/** Reject all outstanding callbacks waiting for pending writes to complete. */\nfunction __PRIVATE_processUserCallback(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n let i = r.xa[r.currentUser.toKey()];\n // NOTE: Mutations restored from persistence won't have callbacks, so it's\n // okay for there to be no callback for this ID.\n if (i) {\n const e = i.get(t);\n e && (n ? e.reject(n) : e.resolve(), i = i.remove(t)), r.xa[r.currentUser.toKey()] = i;\n }\n}\nfunction __PRIVATE_removeAndCleanupTarget(e, t, n = null) {\n e.sharedClientState.removeLocalQueryTarget(t);\n for (const r of e.Da.get(t)) e.ba.delete(r), n && e.Sa.ka(r, n);\n if (e.Da.delete(t), e.isPrimaryClient) {\n e.Ma.Vr(t).forEach(t => {\n e.Ma.containsKey(t) ||\n // We removed the last reference for this key\n __PRIVATE_removeLimboTarget(e, t);\n });\n }\n}\nfunction __PRIVATE_removeLimboTarget(e, t) {\n e.Ca.delete(t.path.canonicalString());\n // It's possible that the target already got removed because the query failed. In that case,\n // the key won't exist in `limboTargetsByKey`. Only do the cleanup if we still have the target.\n const n = e.va.get(t);\n null !== n && (__PRIVATE_remoteStoreUnlisten(e.remoteStore, n), e.va = e.va.remove(t), e.Fa.delete(n), __PRIVATE_pumpEnqueuedLimboResolutions(e));\n}\nfunction __PRIVATE_updateTrackedLimbos(e, t, n) {\n for (const r of n) if (r instanceof __PRIVATE_AddedLimboDocument) e.Ma.addReference(r.key, t), __PRIVATE_trackLimboChange(e, r);else if (r instanceof __PRIVATE_RemovedLimboDocument) {\n __PRIVATE_logDebug(\"SyncEngine\", \"Document no longer in limbo: \" + r.key), e.Ma.removeReference(r.key, t);\n e.Ma.containsKey(r.key) ||\n // We removed the last reference for this key\n __PRIVATE_removeLimboTarget(e, r.key);\n } else fail();\n}\nfunction __PRIVATE_trackLimboChange(e, t) {\n const n = t.key,\n r = n.path.canonicalString();\n e.va.get(n) || e.Ca.has(r) || (__PRIVATE_logDebug(\"SyncEngine\", \"New document in limbo: \" + n), e.Ca.add(r), __PRIVATE_pumpEnqueuedLimboResolutions(e));\n}\n\n/**\n * Starts listens for documents in limbo that are enqueued for resolution,\n * subject to a maximum number of concurrent resolutions.\n *\n * Without bounding the number of concurrent resolutions, the server can fail\n * with \"resource exhausted\" errors which can lead to pathological client\n * behavior as seen in https://github.com/firebase/firebase-js-sdk/issues/2683.\n */\nfunction __PRIVATE_pumpEnqueuedLimboResolutions(e) {\n for (; e.Ca.size > 0 && e.va.size < e.maxConcurrentLimboResolutions;) {\n const t = e.Ca.values().next().value;\n e.Ca.delete(t);\n const n = new DocumentKey(ResourcePath.fromString(t)),\n r = e.Na.next();\n e.Fa.set(r, new LimboResolution(n)), e.va = e.va.insert(n, r), __PRIVATE_remoteStoreListen(e.remoteStore, new TargetData(__PRIVATE_queryToTarget(__PRIVATE_newQueryForPath(n.path)), r, \"TargetPurposeLimboResolution\" /* TargetPurpose.LimboResolution */, __PRIVATE_ListenSequence.oe));\n }\n}\nasync function __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(e, t, n) {\n const r = __PRIVATE_debugCast(e),\n i = [],\n s = [],\n o = [];\n r.ba.isEmpty() || (r.ba.forEach((e, _) => {\n o.push(r.Ba(_, t, n).then(e => {\n // If there are changes, or we are handling a global snapshot, notify\n // secondary clients to update query state.\n if ((e || n) && r.isPrimaryClient) {\n const t = e && !e.fromCache;\n r.sharedClientState.updateQueryState(_.targetId, t ? \"current\" : \"not-current\");\n }\n // Update views if there are actual changes.\n if (e) {\n i.push(e);\n const t = __PRIVATE_LocalViewChanges.Ki(_.targetId, e);\n s.push(t);\n }\n }));\n }), await Promise.all(o), r.Sa.h_(i), await async function __PRIVATE_localStoreNotifyLocalViewChanges(e, t) {\n const n = __PRIVATE_debugCast(e);\n try {\n await n.persistence.runTransaction(\"notifyLocalViewChanges\", \"readwrite\", e => PersistencePromise.forEach(t, t => PersistencePromise.forEach(t.qi, r => n.persistence.referenceDelegate.addReference(e, t.targetId, r)).next(() => PersistencePromise.forEach(t.Qi, r => n.persistence.referenceDelegate.removeReference(e, t.targetId, r)))));\n } catch (e) {\n if (!__PRIVATE_isIndexedDbTransactionError(e)) throw e;\n // If `notifyLocalViewChanges` fails, we did not advance the sequence\n // number for the documents that were included in this transaction.\n // This might trigger them to be deleted earlier than they otherwise\n // would have, but it should not invalidate the integrity of the data.\n __PRIVATE_logDebug(\"LocalStore\", \"Failed to update sequence numbers: \" + e);\n }\n for (const e of t) {\n const t = e.targetId;\n if (!e.fromCache) {\n const e = n.ns.get(t),\n r = e.snapshotVersion,\n i = e.withLastLimboFreeSnapshotVersion(r);\n // Advance the last limbo free snapshot version\n n.ns = n.ns.insert(t, i);\n }\n }\n }(r.localStore, s));\n}\nasync function __PRIVATE_syncEngineHandleCredentialChange(e, t) {\n const n = __PRIVATE_debugCast(e);\n if (!n.currentUser.isEqual(t)) {\n __PRIVATE_logDebug(\"SyncEngine\", \"User change. New user:\", t.toKey());\n const e = await __PRIVATE_localStoreHandleUserChange(n.localStore, t);\n n.currentUser = t,\n // Fails tasks waiting for pending writes requested by previous user.\n function __PRIVATE_rejectOutstandingPendingWritesCallbacks(e, t) {\n e.Oa.forEach(e => {\n e.forEach(e => {\n e.reject(new FirestoreError(C.CANCELLED, t));\n });\n }), e.Oa.clear();\n }(n, \"'waitForPendingWrites' promise is rejected due to a user change.\"),\n // TODO(b/114226417): Consider calling this only in the primary tab.\n n.sharedClientState.handleUserChange(t, e.removedBatchIds, e.addedBatchIds), await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(n, e.us);\n }\n}\nfunction __PRIVATE_syncEngineGetRemoteKeysForTarget(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = n.Fa.get(t);\n if (r && r.wa) return __PRIVATE_documentKeySet().add(r.key);\n {\n let e = __PRIVATE_documentKeySet();\n const r = n.Da.get(t);\n if (!r) return e;\n for (const t of r) {\n const r = n.ba.get(t);\n e = e.unionWith(r.view.Ea);\n }\n return e;\n }\n}\n\n/**\n * Reconcile the list of synced documents in an existing view with those\n * from persistence.\n */\nasync function __PRIVATE_synchronizeViewAndComputeSnapshot(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = await __PRIVATE_localStoreExecuteQuery(n.localStore, t.query, /* usePreviousResults= */!0),\n i = t.view.pa(r);\n return n.isPrimaryClient && __PRIVATE_updateTrackedLimbos(n, t.targetId, i.fa), i;\n}\n\n/**\n * Retrieves newly changed documents from remote document cache and raises\n * snapshots if needed.\n */\n// PORTING NOTE: Multi-Tab only.\nasync function __PRIVATE_syncEngineSynchronizeWithChangedDocuments(e, t) {\n const n = __PRIVATE_debugCast(e);\n return __PRIVATE_localStoreGetNewDocumentChanges(n.localStore, t).then(e => __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(n, e));\n}\n\n/** Applies a mutation state to an existing batch. */\n// PORTING NOTE: Multi-Tab only.\nasync function __PRIVATE_syncEngineApplyBatchState(e, t, n, r) {\n const i = __PRIVATE_debugCast(e),\n s = await function __PRIVATE_localStoreLookupMutationDocuments(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = __PRIVATE_debugCast(n.mutationQueue);\n return n.persistence.runTransaction(\"Lookup mutation documents\", \"readonly\", e => r.vn(e, t).next(t => t ? n.localDocuments.getDocuments(e, t) : PersistencePromise.resolve(null)));\n }\n // PORTING NOTE: Multi-Tab only.\n (i.localStore, t);\n null !== s ? (\"pending\" === n ?\n // If we are the primary client, we need to send this write to the\n // backend. Secondary clients will ignore these writes since their remote\n // connection is disabled.\n await __PRIVATE_fillWritePipeline(i.remoteStore) : \"acknowledged\" === n || \"rejected\" === n ? (\n // NOTE: Both these methods are no-ops for batches that originated from\n // other clients.\n __PRIVATE_processUserCallback(i, t, r || null), __PRIVATE_triggerPendingWritesCallbacks(i, t), function __PRIVATE_localStoreRemoveCachedMutationBatchMetadata(e, t) {\n __PRIVATE_debugCast(__PRIVATE_debugCast(e).mutationQueue).Mn(t);\n }\n // PORTING NOTE: Multi-Tab only.\n (i.localStore, t)) : fail(), await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(i, s)) :\n // A throttled tab may not have seen the mutation before it was completed\n // and removed from the mutation queue, in which case we won't have cached\n // the affected documents. In this case we can safely ignore the update\n // since that means we didn't apply the mutation locally at all (if we\n // had, we would have cached the affected documents), and so we will just\n // see any resulting document changes via normal remote document updates\n // as applicable.\n __PRIVATE_logDebug(\"SyncEngine\", \"Cannot apply mutation batch with id: \" + t);\n}\n\n/** Applies a query target change from a different tab. */\n// PORTING NOTE: Multi-Tab only.\nasync function __PRIVATE_syncEngineApplyPrimaryState(e, t) {\n const n = __PRIVATE_debugCast(e);\n if (__PRIVATE_ensureWatchCallbacks(n), __PRIVATE_syncEngineEnsureWriteCallbacks(n), !0 === t && !0 !== n.La) {\n // Secondary tabs only maintain Views for their local listeners and the\n // Views internal state may not be 100% populated (in particular\n // secondary tabs don't track syncedDocuments, the set of documents the\n // server considers to be in the target). So when a secondary becomes\n // primary, we need to need to make sure that all views for all targets\n // match the state on disk.\n const e = n.sharedClientState.getAllActiveQueryTargets(),\n t = await __PRIVATE_synchronizeQueryViewsAndRaiseSnapshots(n, e.toArray());\n n.La = !0, await __PRIVATE_remoteStoreApplyPrimaryState(n.remoteStore, !0);\n for (const e of t) __PRIVATE_remoteStoreListen(n.remoteStore, e);\n } else if (!1 === t && !1 !== n.La) {\n const e = [];\n let t = Promise.resolve();\n n.Da.forEach((r, i) => {\n n.sharedClientState.isLocalQueryTarget(i) ? e.push(i) : t = t.then(() => (__PRIVATE_removeAndCleanupTarget(n, i), __PRIVATE_localStoreReleaseTarget(n.localStore, i, /*keepPersistedTargetData=*/!0))), __PRIVATE_remoteStoreUnlisten(n.remoteStore, i);\n }), await t, await __PRIVATE_synchronizeQueryViewsAndRaiseSnapshots(n, e),\n // PORTING NOTE: Multi-Tab only.\n function __PRIVATE_resetLimboDocuments(e) {\n const t = __PRIVATE_debugCast(e);\n t.Fa.forEach((e, n) => {\n __PRIVATE_remoteStoreUnlisten(t.remoteStore, n);\n }), t.Ma.mr(), t.Fa = new Map(), t.va = new SortedMap(DocumentKey.comparator);\n }\n /**\n * Reconcile the query views of the provided query targets with the state from\n * persistence. Raises snapshots for any changes that affect the local\n * client and returns the updated state of all target's query data.\n *\n * @param syncEngine - The sync engine implementation\n * @param targets - the list of targets with views that need to be recomputed\n * @param transitionToPrimary - `true` iff the tab transitions from a secondary\n * tab to a primary tab\n */\n // PORTING NOTE: Multi-Tab only.\n (n), n.La = !1, await __PRIVATE_remoteStoreApplyPrimaryState(n.remoteStore, !1);\n }\n}\nasync function __PRIVATE_synchronizeQueryViewsAndRaiseSnapshots(e, t, n) {\n const r = __PRIVATE_debugCast(e),\n i = [],\n s = [];\n for (const e of t) {\n let t;\n const n = r.Da.get(e);\n if (n && 0 !== n.length) {\n // For queries that have a local View, we fetch their current state\n // from LocalStore (as the resume token and the snapshot version\n // might have changed) and reconcile their views with the persisted\n // state (the list of syncedDocuments may have gotten out of sync).\n t = await __PRIVATE_localStoreAllocateTarget(r.localStore, __PRIVATE_queryToTarget(n[0]));\n for (const e of n) {\n const t = r.ba.get(e),\n n = await __PRIVATE_synchronizeViewAndComputeSnapshot(r, t);\n n.snapshot && s.push(n.snapshot);\n }\n } else {\n // For queries that never executed on this client, we need to\n // allocate the target in LocalStore and initialize a new View.\n const n = await __PRIVATE_localStoreGetCachedTarget(r.localStore, e);\n t = await __PRIVATE_localStoreAllocateTarget(r.localStore, n), await __PRIVATE_initializeViewAndComputeSnapshot(r, __PRIVATE_synthesizeTargetToQuery(n), e, /*current=*/!1, t.resumeToken);\n }\n i.push(t);\n }\n return r.Sa.h_(s), i;\n}\n\n/**\n * Creates a `Query` object from the specified `Target`. There is no way to\n * obtain the original `Query`, so we synthesize a `Query` from the `Target`\n * object.\n *\n * The synthesized result might be different from the original `Query`, but\n * since the synthesized `Query` should return the same results as the\n * original one (only the presentation of results might differ), the potential\n * difference will not cause issues.\n */\n// PORTING NOTE: Multi-Tab only.\nfunction __PRIVATE_synthesizeTargetToQuery(e) {\n return __PRIVATE_newQuery(e.path, e.collectionGroup, e.orderBy, e.filters, e.limit, \"F\" /* LimitType.First */, e.startAt, e.endAt);\n}\n\n/** Returns the IDs of the clients that are currently active. */\n// PORTING NOTE: Multi-Tab only.\nfunction __PRIVATE_syncEngineGetActiveClients(e) {\n return function __PRIVATE_localStoreGetActiveClients(e) {\n return __PRIVATE_debugCast(__PRIVATE_debugCast(e).persistence).Bi();\n }(__PRIVATE_debugCast(e).localStore);\n}\n\n/** Applies a query target change from a different tab. */\n// PORTING NOTE: Multi-Tab only.\nasync function __PRIVATE_syncEngineApplyTargetState(e, t, n, r) {\n const i = __PRIVATE_debugCast(e);\n if (i.La)\n // If we receive a target state notification via WebStorage, we are\n // either already secondary or another tab has taken the primary lease.\n return void __PRIVATE_logDebug(\"SyncEngine\", \"Ignoring unexpected query state notification.\");\n const s = i.Da.get(t);\n if (s && s.length > 0) switch (n) {\n case \"current\":\n case \"not-current\":\n {\n const e = await __PRIVATE_localStoreGetNewDocumentChanges(i.localStore, __PRIVATE_queryCollectionGroup(s[0])),\n r = RemoteEvent.createSynthesizedRemoteEventForCurrentChange(t, \"current\" === n, ByteString.EMPTY_BYTE_STRING);\n await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(i, e, r);\n break;\n }\n case \"rejected\":\n await __PRIVATE_localStoreReleaseTarget(i.localStore, t, /* keepPersistedTargetData */!0), __PRIVATE_removeAndCleanupTarget(i, t, r);\n break;\n default:\n fail();\n }\n}\n\n/** Adds or removes Watch targets for queries from different tabs. */\nasync function __PRIVATE_syncEngineApplyActiveTargetsChange(e, t, n) {\n const r = __PRIVATE_ensureWatchCallbacks(e);\n if (r.La) {\n for (const e of t) {\n if (r.Da.has(e) && r.sharedClientState.isActiveQueryTarget(e)) {\n __PRIVATE_logDebug(\"SyncEngine\", \"Adding an already active target \" + e);\n continue;\n }\n const t = await __PRIVATE_localStoreGetCachedTarget(r.localStore, e),\n n = await __PRIVATE_localStoreAllocateTarget(r.localStore, t);\n await __PRIVATE_initializeViewAndComputeSnapshot(r, __PRIVATE_synthesizeTargetToQuery(t), n.targetId, /*current=*/!1, n.resumeToken), __PRIVATE_remoteStoreListen(r.remoteStore, n);\n }\n for (const e of n)\n // Check that the target is still active since the target might have been\n // removed if it has been rejected by the backend.\n r.Da.has(e) && (\n // Release queries that are still active.\n await __PRIVATE_localStoreReleaseTarget(r.localStore, e, /* keepPersistedTargetData */!1).then(() => {\n __PRIVATE_remoteStoreUnlisten(r.remoteStore, e), __PRIVATE_removeAndCleanupTarget(r, e);\n }).catch(__PRIVATE_ignoreIfPrimaryLeaseLoss));\n }\n}\nfunction __PRIVATE_ensureWatchCallbacks(e) {\n const t = __PRIVATE_debugCast(e);\n return t.remoteStore.remoteSyncer.applyRemoteEvent = __PRIVATE_syncEngineApplyRemoteEvent.bind(null, t), t.remoteStore.remoteSyncer.getRemoteKeysForTarget = __PRIVATE_syncEngineGetRemoteKeysForTarget.bind(null, t), t.remoteStore.remoteSyncer.rejectListen = __PRIVATE_syncEngineRejectListen.bind(null, t), t.Sa.h_ = __PRIVATE_eventManagerOnWatchChange.bind(null, t.eventManager), t.Sa.ka = __PRIVATE_eventManagerOnWatchError.bind(null, t.eventManager), t;\n}\nfunction __PRIVATE_syncEngineEnsureWriteCallbacks(e) {\n const t = __PRIVATE_debugCast(e);\n return t.remoteStore.remoteSyncer.applySuccessfulWrite = __PRIVATE_syncEngineApplySuccessfulWrite.bind(null, t), t.remoteStore.remoteSyncer.rejectFailedWrite = __PRIVATE_syncEngineRejectFailedWrite.bind(null, t), t;\n}\n\n/**\n * Loads a Firestore bundle into the SDK. The returned promise resolves when\n * the bundle finished loading.\n *\n * @param syncEngine - SyncEngine to use.\n * @param bundleReader - Bundle to load into the SDK.\n * @param task - LoadBundleTask used to update the loading progress to public API.\n */\nfunction __PRIVATE_syncEngineLoadBundle(e, t, n) {\n const r = __PRIVATE_debugCast(e);\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n (/** Loads a bundle and returns the list of affected collection groups. */\n async function __PRIVATE_loadBundleImpl(e, t, n) {\n try {\n const r = await t.getMetadata();\n if (await function __PRIVATE_localStoreHasNewerBundle(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = __PRIVATE_fromVersion(t.createTime);\n return n.persistence.runTransaction(\"hasNewerBundle\", \"readonly\", e => n.$r.getBundleMetadata(e, t.id)).then(e => !!e && e.createTime.compareTo(r) >= 0);\n }\n /**\n * Saves the given `BundleMetadata` to local persistence.\n */(e.localStore, r)) return await t.close(), n._completeWith(function __PRIVATE_bundleSuccessProgress(e) {\n return {\n taskState: \"Success\",\n documentsLoaded: e.totalDocuments,\n bytesLoaded: e.totalBytes,\n totalDocuments: e.totalDocuments,\n totalBytes: e.totalBytes\n };\n }(r)), Promise.resolve(new Set());\n n._updateProgress(__PRIVATE_bundleInitialProgress(r));\n const i = new __PRIVATE_BundleLoader(r, e.localStore, t.serializer);\n let s = await t.qa();\n for (; s;) {\n const e = await i._a(s);\n e && n._updateProgress(e), s = await t.qa();\n }\n const o = await i.complete();\n return await __PRIVATE_syncEngineEmitNewSnapsAndNotifyLocalStore(e, o.ca, /* remoteEvent */void 0),\n // Save metadata, so loading the same bundle will skip.\n await function __PRIVATE_localStoreSaveBundle(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"Save bundle\", \"readwrite\", e => n.$r.saveBundleMetadata(e, t));\n }\n /**\n * Returns a promise of a `NamedQuery` associated with given query name. Promise\n * resolves to undefined if no persisted data can be found.\n */(e.localStore, r), n._completeWith(o.progress), Promise.resolve(o.ua);\n } catch (e) {\n return __PRIVATE_logWarn(\"SyncEngine\", `Loading bundle failed with ${e}`), n._failWith(e), Promise.resolve(new Set());\n }\n }\n /**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n /**\n * Provides all components needed for Firestore with in-memory persistence.\n * Uses EagerGC garbage collection.\n */)(r, t, n).then(e => {\n r.sharedClientState.notifyBundleLoaded(e);\n });\n}\nclass MemoryOfflineComponentProvider {\n constructor() {\n this.synchronizeTabs = !1;\n }\n async initialize(e) {\n this.serializer = __PRIVATE_newSerializer(e.databaseInfo.databaseId), this.sharedClientState = this.createSharedClientState(e), this.persistence = this.createPersistence(e), await this.persistence.start(), this.localStore = this.createLocalStore(e), this.gcScheduler = this.createGarbageCollectionScheduler(e, this.localStore), this.indexBackfillerScheduler = this.createIndexBackfillerScheduler(e, this.localStore);\n }\n createGarbageCollectionScheduler(e, t) {\n return null;\n }\n createIndexBackfillerScheduler(e, t) {\n return null;\n }\n createLocalStore(e) {\n return __PRIVATE_newLocalStore(this.persistence, new __PRIVATE_QueryEngine(), e.initialUser, this.serializer);\n }\n createPersistence(e) {\n return new __PRIVATE_MemoryPersistence(__PRIVATE_MemoryEagerDelegate.Hr, this.serializer);\n }\n createSharedClientState(e) {\n return new __PRIVATE_MemorySharedClientState();\n }\n async terminate() {\n var e, t;\n null === (e = this.gcScheduler) || void 0 === e || e.stop(), null === (t = this.indexBackfillerScheduler) || void 0 === t || t.stop(), this.sharedClientState.shutdown(), await this.persistence.shutdown();\n }\n}\nclass __PRIVATE_LruGcMemoryOfflineComponentProvider extends MemoryOfflineComponentProvider {\n constructor(e) {\n super(), this.cacheSizeBytes = e;\n }\n createGarbageCollectionScheduler(e, t) {\n __PRIVATE_hardAssert(this.persistence.referenceDelegate instanceof __PRIVATE_MemoryLruDelegate);\n const n = this.persistence.referenceDelegate.garbageCollector;\n return new __PRIVATE_LruScheduler(n, e.asyncQueue, t);\n }\n createPersistence(e) {\n const t = void 0 !== this.cacheSizeBytes ? LruParams.withCacheSize(this.cacheSizeBytes) : LruParams.DEFAULT;\n return new __PRIVATE_MemoryPersistence(e => __PRIVATE_MemoryLruDelegate.Hr(e, t), this.serializer);\n }\n}\n\n/**\n * Provides all components needed for Firestore with IndexedDB persistence.\n */\nclass __PRIVATE_IndexedDbOfflineComponentProvider extends MemoryOfflineComponentProvider {\n constructor(e, t, n) {\n super(), this.Qa = e, this.cacheSizeBytes = t, this.forceOwnership = n, this.synchronizeTabs = !1;\n }\n async initialize(e) {\n await super.initialize(e), await this.Qa.initialize(this, e),\n // Enqueue writes from a previous session\n await __PRIVATE_syncEngineEnsureWriteCallbacks(this.Qa.syncEngine), await __PRIVATE_fillWritePipeline(this.Qa.remoteStore),\n // NOTE: This will immediately call the listener, so we make sure to\n // set it after localStore / remoteStore are started.\n await this.persistence.fi(() => (this.gcScheduler && !this.gcScheduler.started && this.gcScheduler.start(), this.indexBackfillerScheduler && !this.indexBackfillerScheduler.started && this.indexBackfillerScheduler.start(), Promise.resolve()));\n }\n createLocalStore(e) {\n return __PRIVATE_newLocalStore(this.persistence, new __PRIVATE_QueryEngine(), e.initialUser, this.serializer);\n }\n createGarbageCollectionScheduler(e, t) {\n const n = this.persistence.referenceDelegate.garbageCollector;\n return new __PRIVATE_LruScheduler(n, e.asyncQueue, t);\n }\n createIndexBackfillerScheduler(e, t) {\n const n = new __PRIVATE_IndexBackfiller(t, this.persistence);\n return new __PRIVATE_IndexBackfillerScheduler(e.asyncQueue, n);\n }\n createPersistence(e) {\n const t = __PRIVATE_indexedDbStoragePrefix(e.databaseInfo.databaseId, e.databaseInfo.persistenceKey),\n n = void 0 !== this.cacheSizeBytes ? LruParams.withCacheSize(this.cacheSizeBytes) : LruParams.DEFAULT;\n return new __PRIVATE_IndexedDbPersistence(this.synchronizeTabs, t, e.clientId, n, e.asyncQueue, __PRIVATE_getWindow(), getDocument(), this.serializer, this.sharedClientState, !!this.forceOwnership);\n }\n createSharedClientState(e) {\n return new __PRIVATE_MemorySharedClientState();\n }\n}\n\n/**\n * Provides all components needed for Firestore with multi-tab IndexedDB\n * persistence.\n *\n * In the legacy client, this provider is used to provide both multi-tab and\n * non-multi-tab persistence since we cannot tell at build time whether\n * `synchronizeTabs` will be enabled.\n */\nclass __PRIVATE_MultiTabOfflineComponentProvider extends __PRIVATE_IndexedDbOfflineComponentProvider {\n constructor(e, t) {\n super(e, t, /* forceOwnership= */!1), this.Qa = e, this.cacheSizeBytes = t, this.synchronizeTabs = !0;\n }\n async initialize(e) {\n await super.initialize(e);\n const t = this.Qa.syncEngine;\n this.sharedClientState instanceof __PRIVATE_WebStorageSharedClientState && (this.sharedClientState.syncEngine = {\n Zs: __PRIVATE_syncEngineApplyBatchState.bind(null, t),\n Xs: __PRIVATE_syncEngineApplyTargetState.bind(null, t),\n eo: __PRIVATE_syncEngineApplyActiveTargetsChange.bind(null, t),\n Bi: __PRIVATE_syncEngineGetActiveClients.bind(null, t),\n Ys: __PRIVATE_syncEngineSynchronizeWithChangedDocuments.bind(null, t)\n }, await this.sharedClientState.start()),\n // NOTE: This will immediately call the listener, so we make sure to\n // set it after localStore / remoteStore are started.\n await this.persistence.fi(async e => {\n await __PRIVATE_syncEngineApplyPrimaryState(this.Qa.syncEngine, e), this.gcScheduler && (e && !this.gcScheduler.started ? this.gcScheduler.start() : e || this.gcScheduler.stop()), this.indexBackfillerScheduler && (e && !this.indexBackfillerScheduler.started ? this.indexBackfillerScheduler.start() : e || this.indexBackfillerScheduler.stop());\n });\n }\n createSharedClientState(e) {\n const t = __PRIVATE_getWindow();\n if (!__PRIVATE_WebStorageSharedClientState.D(t)) throw new FirestoreError(C.UNIMPLEMENTED, \"IndexedDB persistence is only available on platforms that support LocalStorage.\");\n const n = __PRIVATE_indexedDbStoragePrefix(e.databaseInfo.databaseId, e.databaseInfo.persistenceKey);\n return new __PRIVATE_WebStorageSharedClientState(t, e.asyncQueue, n, e.clientId, e.initialUser);\n }\n}\n\n/**\n * Initializes and wires the components that are needed to interface with the\n * network.\n */\nclass OnlineComponentProvider {\n async initialize(e, t) {\n this.localStore || (this.localStore = e.localStore, this.sharedClientState = e.sharedClientState, this.datastore = this.createDatastore(t), this.remoteStore = this.createRemoteStore(t), this.eventManager = this.createEventManager(t), this.syncEngine = this.createSyncEngine(t, /* startAsPrimary=*/!e.synchronizeTabs), this.sharedClientState.onlineStateHandler = e => __PRIVATE_syncEngineApplyOnlineStateChange(this.syncEngine, e, 1 /* OnlineStateSource.SharedClientState */), this.remoteStore.remoteSyncer.handleCredentialChange = __PRIVATE_syncEngineHandleCredentialChange.bind(null, this.syncEngine), await __PRIVATE_remoteStoreApplyPrimaryState(this.remoteStore, this.syncEngine.isPrimaryClient));\n }\n createEventManager(e) {\n return function __PRIVATE_newEventManager() {\n return new __PRIVATE_EventManagerImpl();\n }();\n }\n createDatastore(e) {\n const t = __PRIVATE_newSerializer(e.databaseInfo.databaseId),\n n = function __PRIVATE_newConnection(e) {\n return new __PRIVATE_WebChannelConnection(e);\n }\n /** Return the Platform-specific connectivity monitor. */(e.databaseInfo);\n return function __PRIVATE_newDatastore(e, t, n, r) {\n return new __PRIVATE_DatastoreImpl(e, t, n, r);\n }(e.authCredentials, e.appCheckCredentials, n, t);\n }\n createRemoteStore(e) {\n return function __PRIVATE_newRemoteStore(e, t, n, r, i) {\n return new __PRIVATE_RemoteStoreImpl(e, t, n, r, i);\n }\n /** Re-enables the network. Idempotent. */(this.localStore, this.datastore, e.asyncQueue, e => __PRIVATE_syncEngineApplyOnlineStateChange(this.syncEngine, e, 0 /* OnlineStateSource.RemoteStore */), function __PRIVATE_newConnectivityMonitor() {\n return __PRIVATE_BrowserConnectivityMonitor.D() ? new __PRIVATE_BrowserConnectivityMonitor() : new __PRIVATE_NoopConnectivityMonitor();\n }());\n }\n createSyncEngine(e, t) {\n return function __PRIVATE_newSyncEngine(e, t, n,\n // PORTING NOTE: Manages state synchronization in multi-tab environments.\n r, i, s, o) {\n const _ = new __PRIVATE_SyncEngineImpl(e, t, n, r, i, s);\n return o && (_.La = !0), _;\n }(this.localStore, this.remoteStore, this.eventManager, this.sharedClientState, e.initialUser, e.maxConcurrentLimboResolutions, t);\n }\n async terminate() {\n var e;\n await async function __PRIVATE_remoteStoreShutdown(e) {\n const t = __PRIVATE_debugCast(e);\n __PRIVATE_logDebug(\"RemoteStore\", \"RemoteStore shutting down.\"), t.M_.add(5 /* OfflineCause.Shutdown */), await __PRIVATE_disableNetworkInternal(t), t.O_.shutdown(),\n // Set the OnlineState to Unknown (rather than Offline) to avoid potentially\n // triggering spurious listener events with cached data, etc.\n t.N_.set(\"Unknown\" /* OnlineState.Unknown */);\n }(this.remoteStore), null === (e = this.datastore) || void 0 === e || e.terminate();\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * How many bytes to read each time when `ReadableStreamReader.read()` is\n * called. Only applicable for byte streams that we control (e.g. those backed\n * by an UInt8Array).\n */\n/**\n * Builds a `ByteStreamReader` from a UInt8Array.\n * @param source - The data source to use.\n * @param bytesPerRead - How many bytes each `read()` from the returned reader\n * will read.\n */\nfunction __PRIVATE_toByteStreamReaderHelper(e, t = 10240) {\n let n = 0;\n // The TypeScript definition for ReadableStreamReader changed. We use\n // `any` here to allow this code to compile with different versions.\n // See https://github.com/microsoft/TypeScript/issues/42970\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n async read() {\n if (n < e.byteLength) {\n const r = {\n value: e.slice(n, n + t),\n done: !1\n };\n return n += t, r;\n }\n return {\n done: !0\n };\n },\n async cancel() {},\n releaseLock() {},\n closed: Promise.resolve()\n };\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * On web, a `ReadableStream` is wrapped around by a `ByteStreamReader`.\n */\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/*\n * A wrapper implementation of Observer that will dispatch events\n * asynchronously. To allow immediate silencing, a mute call is added which\n * causes events scheduled to no longer be raised.\n */\nclass __PRIVATE_AsyncObserver {\n constructor(e) {\n this.observer = e,\n /**\n * When set to true, will not raise future events. Necessary to deal with\n * async detachment of listener.\n */\n this.muted = !1;\n }\n next(e) {\n this.observer.next && this.Ka(this.observer.next, e);\n }\n error(e) {\n this.observer.error ? this.Ka(this.observer.error, e) : __PRIVATE_logError(\"Uncaught Error in snapshot listener:\", e.toString());\n }\n $a() {\n this.muted = !0;\n }\n Ka(e, t) {\n this.muted || setTimeout(() => {\n this.muted || e(t);\n }, 0);\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A class representing a bundle.\n *\n * Takes a bundle stream or buffer, and presents abstractions to read bundled\n * elements out of the underlying content.\n */\nclass __PRIVATE_BundleReaderImpl {\n constructor(/** The reader to read from underlying binary bundle data source. */\n e, t) {\n this.Ua = e, this.serializer = t, /** Cached bundle metadata. */\n this.metadata = new __PRIVATE_Deferred(),\n /**\n * Internal buffer to hold bundle content, accumulating incomplete element\n * content.\n */\n this.buffer = new Uint8Array(), this.Wa = function __PRIVATE_newTextDecoder() {\n return new TextDecoder(\"utf-8\");\n }(),\n // Read the metadata (which is the first element).\n this.Ga().then(e => {\n e && e.sa() ? this.metadata.resolve(e.ia.metadata) : this.metadata.reject(new Error(`The first element of the bundle is not a metadata, it is\\n ${JSON.stringify(null == e ? void 0 : e.ia)}`));\n }, e => this.metadata.reject(e));\n }\n close() {\n return this.Ua.cancel();\n }\n async getMetadata() {\n return this.metadata.promise;\n }\n async qa() {\n // Makes sure metadata is read before proceeding.\n return await this.getMetadata(), this.Ga();\n }\n /**\n * Reads from the head of internal buffer, and pulling more data from\n * underlying stream if a complete element cannot be found, until an\n * element(including the prefixed length and the JSON string) is found.\n *\n * Once a complete element is read, it is dropped from internal buffer.\n *\n * Returns either the bundled element, or null if we have reached the end of\n * the stream.\n */\n async Ga() {\n const e = await this.za();\n if (null === e) return null;\n const t = this.Wa.decode(e),\n n = Number(t);\n isNaN(n) && this.ja(`length string (${t}) is not valid number`);\n const r = await this.Ha(n);\n return new __PRIVATE_SizedBundleElement(JSON.parse(r), e.length + n);\n }\n /** First index of '{' from the underlying buffer. */\n Ja() {\n return this.buffer.findIndex(e => e === \"{\".charCodeAt(0));\n }\n /**\n * Reads from the beginning of the internal buffer, until the first '{', and\n * return the content.\n *\n * If reached end of the stream, returns a null.\n */\n async za() {\n for (; this.Ja() < 0;) {\n if (await this.Ya()) break;\n }\n // Broke out of the loop because underlying stream is closed, and there\n // happens to be no more data to process.\n if (0 === this.buffer.length) return null;\n const e = this.Ja();\n // Broke out of the loop because underlying stream is closed, but still\n // cannot find an open bracket.\n e < 0 && this.ja(\"Reached the end of bundle when a length string is expected.\");\n const t = this.buffer.slice(0, e);\n // Update the internal buffer to drop the read length.\n return this.buffer = this.buffer.slice(e), t;\n }\n /**\n * Reads from a specified position from the internal buffer, for a specified\n * number of bytes, pulling more data from the underlying stream if needed.\n *\n * Returns a string decoded from the read bytes.\n */\n async Ha(e) {\n for (; this.buffer.length < e;) {\n (await this.Ya()) && this.ja(\"Reached the end of bundle when more is expected.\");\n }\n const t = this.Wa.decode(this.buffer.slice(0, e));\n // Update the internal buffer to drop the read json string.\n return this.buffer = this.buffer.slice(e), t;\n }\n ja(e) {\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n throw this.Ua.cancel(), new Error(`Invalid bundle format: ${e}`);\n }\n /**\n * Pulls more data from underlying stream to internal buffer.\n * Returns a boolean indicating whether the stream is finished.\n */\n async Ya() {\n const e = await this.Ua.read();\n if (!e.done) {\n const t = new Uint8Array(this.buffer.length + e.value.length);\n t.set(this.buffer), t.set(e.value, this.buffer.length), this.buffer = t;\n }\n return e.done;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Internal transaction object responsible for accumulating the mutations to\n * perform and the base versions for any documents read.\n */\nclass Transaction$2 {\n constructor(e) {\n this.datastore = e,\n // The version of each document that was read during this transaction.\n this.readVersions = new Map(), this.mutations = [], this.committed = !1,\n /**\n * A deferred usage error that occurred previously in this transaction that\n * will cause the transaction to fail once it actually commits.\n */\n this.lastTransactionError = null,\n /**\n * Set of documents that have been written in the transaction.\n *\n * When there's more than one write to the same key in a transaction, any\n * writes after the first are handled differently.\n */\n this.writtenDocs = new Set();\n }\n async lookup(e) {\n if (this.ensureCommitNotCalled(), this.mutations.length > 0) throw this.lastTransactionError = new FirestoreError(C.INVALID_ARGUMENT, \"Firestore transactions require all reads to be executed before all writes.\"), this.lastTransactionError;\n const t = await async function __PRIVATE_invokeBatchGetDocumentsRpc(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = {\n documents: t.map(e => __PRIVATE_toName(n.serializer, e))\n },\n i = await n.xo(\"BatchGetDocuments\", n.serializer.databaseId, ResourcePath.emptyPath(), r, t.length),\n s = new Map();\n i.forEach(e => {\n const t = __PRIVATE_fromBatchGetDocumentsResponse(n.serializer, e);\n s.set(t.key.toString(), t);\n });\n const o = [];\n return t.forEach(e => {\n const t = s.get(e.toString());\n __PRIVATE_hardAssert(!!t), o.push(t);\n }), o;\n }(this.datastore, e);\n return t.forEach(e => this.recordVersion(e)), t;\n }\n set(e, t) {\n this.write(t.toMutation(e, this.precondition(e))), this.writtenDocs.add(e.toString());\n }\n update(e, t) {\n try {\n this.write(t.toMutation(e, this.preconditionForUpdate(e)));\n } catch (e) {\n this.lastTransactionError = e;\n }\n this.writtenDocs.add(e.toString());\n }\n delete(e) {\n this.write(new __PRIVATE_DeleteMutation(e, this.precondition(e))), this.writtenDocs.add(e.toString());\n }\n async commit() {\n if (this.ensureCommitNotCalled(), this.lastTransactionError) throw this.lastTransactionError;\n const e = this.readVersions;\n // For each mutation, note that the doc was written.\n this.mutations.forEach(t => {\n e.delete(t.key.toString());\n }),\n // For each document that was read but not written to, we want to perform\n // a `verify` operation.\n e.forEach((e, t) => {\n const n = DocumentKey.fromPath(t);\n this.mutations.push(new __PRIVATE_VerifyMutation(n, this.precondition(n)));\n }), await async function __PRIVATE_invokeCommitRpc(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = {\n writes: t.map(e => toMutation(n.serializer, e))\n };\n await n.Co(\"Commit\", n.serializer.databaseId, ResourcePath.emptyPath(), r);\n }(this.datastore, this.mutations), this.committed = !0;\n }\n recordVersion(e) {\n let t;\n if (e.isFoundDocument()) t = e.version;else {\n if (!e.isNoDocument()) throw fail();\n // Represent a deleted doc using SnapshotVersion.min().\n t = SnapshotVersion.min();\n }\n const n = this.readVersions.get(e.key.toString());\n if (n) {\n if (!t.isEqual(n))\n // This transaction will fail no matter what.\n throw new FirestoreError(C.ABORTED, \"Document version changed between two reads.\");\n } else this.readVersions.set(e.key.toString(), t);\n }\n /**\n * Returns the version of this document when it was read in this transaction,\n * as a precondition, or no precondition if it was not read.\n */\n precondition(e) {\n const t = this.readVersions.get(e.toString());\n return !this.writtenDocs.has(e.toString()) && t ? t.isEqual(SnapshotVersion.min()) ? Precondition.exists(!1) : Precondition.updateTime(t) : Precondition.none();\n }\n /**\n * Returns the precondition for a document if the operation is an update.\n */\n preconditionForUpdate(e) {\n const t = this.readVersions.get(e.toString());\n // The first time a document is written, we want to take into account the\n // read time and existence\n if (!this.writtenDocs.has(e.toString()) && t) {\n if (t.isEqual(SnapshotVersion.min()))\n // The document doesn't exist, so fail the transaction.\n // This has to be validated locally because you can't send a\n // precondition that a document does not exist without changing the\n // semantics of the backend write to be an insert. This is the reverse\n // of what we want, since we want to assert that the document doesn't\n // exist but then send the update and have it fail. Since we can't\n // express that to the backend, we have to validate locally.\n // Note: this can change once we can send separate verify writes in the\n // transaction.\n throw new FirestoreError(C.INVALID_ARGUMENT, \"Can't update a document that doesn't exist.\");\n // Document exists, base precondition on document update time.\n return Precondition.updateTime(t);\n }\n // Document was not read, so we just use the preconditions for a blind\n // update.\n return Precondition.exists(!0);\n }\n write(e) {\n this.ensureCommitNotCalled(), this.mutations.push(e);\n }\n ensureCommitNotCalled() {}\n}\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * TransactionRunner encapsulates the logic needed to run and retry transactions\n * with backoff.\n */\nclass __PRIVATE_TransactionRunner {\n constructor(e, t, n, r, i) {\n this.asyncQueue = e, this.datastore = t, this.options = n, this.updateFunction = r, this.deferred = i, this.Za = n.maxAttempts, this.Yo = new __PRIVATE_ExponentialBackoff(this.asyncQueue, \"transaction_retry\" /* TimerId.TransactionRetry */);\n }\n /** Runs the transaction and sets the result on deferred. */\n Xa() {\n this.Za -= 1, this.eu();\n }\n eu() {\n this.Yo.$o(async () => {\n const e = new Transaction$2(this.datastore),\n t = this.tu(e);\n t && t.then(t => {\n this.asyncQueue.enqueueAndForget(() => e.commit().then(() => {\n this.deferred.resolve(t);\n }).catch(e => {\n this.nu(e);\n }));\n }).catch(e => {\n this.nu(e);\n });\n });\n }\n tu(e) {\n try {\n const t = this.updateFunction(e);\n return !__PRIVATE_isNullOrUndefined(t) && t.catch && t.then ? t : (this.deferred.reject(Error(\"Transaction callback must return a Promise\")), null);\n } catch (e) {\n // Do not retry errors thrown by user provided updateFunction.\n return this.deferred.reject(e), null;\n }\n }\n nu(e) {\n this.Za > 0 && this.ru(e) ? (this.Za -= 1, this.asyncQueue.enqueueAndForget(() => (this.eu(), Promise.resolve()))) : this.deferred.reject(e);\n }\n ru(e) {\n if (\"FirebaseError\" === e.name) {\n // In transactions, the backend will fail outdated reads with FAILED_PRECONDITION and\n // non-matching document versions with ABORTED. These errors should be retried.\n const t = e.code;\n return \"aborted\" === t || \"failed-precondition\" === t || \"already-exists\" === t || !__PRIVATE_isPermanentError(t);\n }\n return !1;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * FirestoreClient is a top-level class that constructs and owns all of the //\n * pieces of the client SDK architecture. It is responsible for creating the //\n * async queue that is shared by all of the other components in the system. //\n */\nclass FirestoreClient {\n constructor(e, t,\n /**\n * Asynchronous queue responsible for all of our internal processing. When\n * we get incoming work from the user (via public API) or the network\n * (incoming GRPC messages), we should always schedule onto this queue.\n * This ensures all of our work is properly serialized (e.g. we don't\n * start processing a new operation while the previous one is waiting for\n * an async I/O to complete).\n */\n n, r) {\n this.authCredentials = e, this.appCheckCredentials = t, this.asyncQueue = n, this.databaseInfo = r, this.user = User.UNAUTHENTICATED, this.clientId = __PRIVATE_AutoId.newId(), this.authCredentialListener = () => Promise.resolve(), this.appCheckCredentialListener = () => Promise.resolve(), this.authCredentials.start(n, async e => {\n __PRIVATE_logDebug(\"FirestoreClient\", \"Received user=\", e.uid), await this.authCredentialListener(e), this.user = e;\n }), this.appCheckCredentials.start(n, e => (__PRIVATE_logDebug(\"FirestoreClient\", \"Received new app check token=\", e), this.appCheckCredentialListener(e, this.user)));\n }\n get configuration() {\n return {\n asyncQueue: this.asyncQueue,\n databaseInfo: this.databaseInfo,\n clientId: this.clientId,\n authCredentials: this.authCredentials,\n appCheckCredentials: this.appCheckCredentials,\n initialUser: this.user,\n maxConcurrentLimboResolutions: 100\n };\n }\n setCredentialChangeListener(e) {\n this.authCredentialListener = e;\n }\n setAppCheckTokenChangeListener(e) {\n this.appCheckCredentialListener = e;\n }\n /**\n * Checks that the client has not been terminated. Ensures that other methods on //\n * this class cannot be called after the client is terminated. //\n */\n verifyNotTerminated() {\n if (this.asyncQueue.isShuttingDown) throw new FirestoreError(C.FAILED_PRECONDITION, \"The client has already been terminated.\");\n }\n terminate() {\n this.asyncQueue.enterRestrictedMode();\n const e = new __PRIVATE_Deferred();\n return this.asyncQueue.enqueueAndForgetEvenWhileRestricted(async () => {\n try {\n this._onlineComponents && (await this._onlineComponents.terminate()), this._offlineComponents && (await this._offlineComponents.terminate()),\n // The credentials provider must be terminated after shutting down the\n // RemoteStore as it will prevent the RemoteStore from retrieving auth\n // tokens.\n this.authCredentials.shutdown(), this.appCheckCredentials.shutdown(), e.resolve();\n } catch (t) {\n const n = __PRIVATE_wrapInUserErrorIfRecoverable(t, \"Failed to shutdown persistence\");\n e.reject(n);\n }\n }), e.promise;\n }\n}\nasync function __PRIVATE_setOfflineComponentProvider(e, t) {\n e.asyncQueue.verifyOperationInProgress(), __PRIVATE_logDebug(\"FirestoreClient\", \"Initializing OfflineComponentProvider\");\n const n = e.configuration;\n await t.initialize(n);\n let r = n.initialUser;\n e.setCredentialChangeListener(async e => {\n r.isEqual(e) || (await __PRIVATE_localStoreHandleUserChange(t.localStore, e), r = e);\n }),\n // When a user calls clearPersistence() in one client, all other clients\n // need to be terminated to allow the delete to succeed.\n t.persistence.setDatabaseDeletedListener(() => e.terminate()), e._offlineComponents = t;\n}\nasync function __PRIVATE_setOnlineComponentProvider(e, t) {\n e.asyncQueue.verifyOperationInProgress();\n const n = await __PRIVATE_ensureOfflineComponents(e);\n __PRIVATE_logDebug(\"FirestoreClient\", \"Initializing OnlineComponentProvider\"), await t.initialize(n, e.configuration),\n // The CredentialChangeListener of the online component provider takes\n // precedence over the offline component provider.\n e.setCredentialChangeListener(e => __PRIVATE_remoteStoreHandleCredentialChange(t.remoteStore, e)), e.setAppCheckTokenChangeListener((e, n) => __PRIVATE_remoteStoreHandleCredentialChange(t.remoteStore, n)), e._onlineComponents = t;\n}\n\n/**\n * Decides whether the provided error allows us to gracefully disable\n * persistence (as opposed to crashing the client).\n */\nfunction __PRIVATE_canFallbackFromIndexedDbError(e) {\n return \"FirebaseError\" === e.name ? e.code === C.FAILED_PRECONDITION || e.code === C.UNIMPLEMENTED : !(\"undefined\" != typeof DOMException && e instanceof DOMException) ||\n // When the browser is out of quota we could get either quota exceeded\n // or an aborted error depending on whether the error happened during\n // schema migration.\n 22 === e.code || 20 === e.code ||\n // Firefox Private Browsing mode disables IndexedDb and returns\n // INVALID_STATE for any usage.\n 11 === e.code;\n}\nasync function __PRIVATE_ensureOfflineComponents(e) {\n if (!e._offlineComponents) if (e._uninitializedComponentsProvider) {\n __PRIVATE_logDebug(\"FirestoreClient\", \"Using user provided OfflineComponentProvider\");\n try {\n await __PRIVATE_setOfflineComponentProvider(e, e._uninitializedComponentsProvider._offline);\n } catch (t) {\n const n = t;\n if (!__PRIVATE_canFallbackFromIndexedDbError(n)) throw n;\n __PRIVATE_logWarn(\"Error using user provided cache. Falling back to memory cache: \" + n), await __PRIVATE_setOfflineComponentProvider(e, new MemoryOfflineComponentProvider());\n }\n } else __PRIVATE_logDebug(\"FirestoreClient\", \"Using default OfflineComponentProvider\"), await __PRIVATE_setOfflineComponentProvider(e, new MemoryOfflineComponentProvider());\n return e._offlineComponents;\n}\nasync function __PRIVATE_ensureOnlineComponents(e) {\n return e._onlineComponents || (e._uninitializedComponentsProvider ? (__PRIVATE_logDebug(\"FirestoreClient\", \"Using user provided OnlineComponentProvider\"), await __PRIVATE_setOnlineComponentProvider(e, e._uninitializedComponentsProvider._online)) : (__PRIVATE_logDebug(\"FirestoreClient\", \"Using default OnlineComponentProvider\"), await __PRIVATE_setOnlineComponentProvider(e, new OnlineComponentProvider()))), e._onlineComponents;\n}\nfunction __PRIVATE_getPersistence(e) {\n return __PRIVATE_ensureOfflineComponents(e).then(e => e.persistence);\n}\nfunction __PRIVATE_getLocalStore(e) {\n return __PRIVATE_ensureOfflineComponents(e).then(e => e.localStore);\n}\nfunction __PRIVATE_getRemoteStore(e) {\n return __PRIVATE_ensureOnlineComponents(e).then(e => e.remoteStore);\n}\nfunction __PRIVATE_getSyncEngine(e) {\n return __PRIVATE_ensureOnlineComponents(e).then(e => e.syncEngine);\n}\nfunction __PRIVATE_getDatastore(e) {\n return __PRIVATE_ensureOnlineComponents(e).then(e => e.datastore);\n}\nasync function __PRIVATE_getEventManager(e) {\n const t = await __PRIVATE_ensureOnlineComponents(e),\n n = t.eventManager;\n return n.onListen = __PRIVATE_syncEngineListen.bind(null, t.syncEngine), n.onUnlisten = __PRIVATE_syncEngineUnlisten.bind(null, t.syncEngine), n.onFirstRemoteStoreListen = __PRIVATE_triggerRemoteStoreListen.bind(null, t.syncEngine), n.onLastRemoteStoreUnlisten = __PRIVATE_triggerRemoteStoreUnlisten.bind(null, t.syncEngine), n;\n}\n\n/** Enables the network connection and re-enqueues all pending operations. */\nfunction __PRIVATE_firestoreClientEnableNetwork(e) {\n return e.asyncQueue.enqueue(async () => {\n const t = await __PRIVATE_getPersistence(e),\n n = await __PRIVATE_getRemoteStore(e);\n return t.setNetworkEnabled(!0), function __PRIVATE_remoteStoreEnableNetwork(e) {\n const t = __PRIVATE_debugCast(e);\n return t.M_.delete(0 /* OfflineCause.UserDisabled */), __PRIVATE_enableNetworkInternal(t);\n }(n);\n });\n}\n\n/** Disables the network connection. Pending operations will not complete. */\nfunction __PRIVATE_firestoreClientDisableNetwork(e) {\n return e.asyncQueue.enqueue(async () => {\n const t = await __PRIVATE_getPersistence(e),\n n = await __PRIVATE_getRemoteStore(e);\n return t.setNetworkEnabled(!1), async function __PRIVATE_remoteStoreDisableNetwork(e) {\n const t = __PRIVATE_debugCast(e);\n t.M_.add(0 /* OfflineCause.UserDisabled */), await __PRIVATE_disableNetworkInternal(t),\n // Set the OnlineState to Offline so get()s return from cache, etc.\n t.N_.set(\"Offline\" /* OnlineState.Offline */);\n }(n);\n });\n}\n\n/**\n * Returns a Promise that resolves when all writes that were pending at the time\n * this method was called received server acknowledgement. An acknowledgement\n * can be either acceptance or rejection.\n */\nfunction __PRIVATE_firestoreClientGetDocumentFromLocalCache(e, t) {\n const n = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => async function __PRIVATE_readDocumentFromCache(e, t, n) {\n try {\n const r = await function __PRIVATE_localStoreReadDocument(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"read document\", \"readonly\", e => n.localDocuments.getDocument(e, t));\n }(e, t);\n r.isFoundDocument() ? n.resolve(r) : r.isNoDocument() ? n.resolve(null) : n.reject(new FirestoreError(C.UNAVAILABLE, \"Failed to get document from cache. (However, this document may exist on the server. Run again without setting 'source' in the GetOptions to attempt to retrieve the document from the server.)\"));\n } catch (e) {\n const r = __PRIVATE_wrapInUserErrorIfRecoverable(e, `Failed to get document '${t} from cache`);\n n.reject(r);\n }\n }\n /**\n * Retrieves a latency-compensated document from the backend via a\n * SnapshotListener.\n */(await __PRIVATE_getLocalStore(e), t, n)), n.promise;\n}\nfunction __PRIVATE_firestoreClientGetDocumentViaSnapshotListener(e, t, n = {}) {\n const r = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => function __PRIVATE_readDocumentViaSnapshotListener(e, t, n, r, i) {\n const s = new __PRIVATE_AsyncObserver({\n next: s => {\n // Remove query first before passing event to user to avoid\n // user actions affecting the now stale query.\n t.enqueueAndForget(() => __PRIVATE_eventManagerUnlisten(e, o));\n const _ = s.docs.has(n);\n !_ && s.fromCache ?\n // TODO(dimond): If we're online and the document doesn't\n // exist then we resolve with a doc.exists set to false. If\n // we're offline however, we reject the Promise in this\n // case. Two options: 1) Cache the negative response from\n // the server so we can deliver that even when you're\n // offline 2) Actually reject the Promise in the online case\n // if the document doesn't exist.\n i.reject(new FirestoreError(C.UNAVAILABLE, \"Failed to get document because the client is offline.\")) : _ && s.fromCache && r && \"server\" === r.source ? i.reject(new FirestoreError(C.UNAVAILABLE, 'Failed to get document from server. (However, this document does exist in the local cache. Run again without setting source to \"server\" to retrieve the cached document.)')) : i.resolve(s);\n },\n error: e => i.reject(e)\n }),\n o = new __PRIVATE_QueryListener(__PRIVATE_newQueryForPath(n.path), s, {\n includeMetadataChanges: !0,\n ra: !0\n });\n return __PRIVATE_eventManagerListen(e, o);\n }(await __PRIVATE_getEventManager(e), e.asyncQueue, t, n, r)), r.promise;\n}\nfunction __PRIVATE_firestoreClientGetDocumentsFromLocalCache(e, t) {\n const n = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => async function __PRIVATE_executeQueryFromCache(e, t, n) {\n try {\n const r = await __PRIVATE_localStoreExecuteQuery(e, t, /* usePreviousResults= */!0),\n i = new __PRIVATE_View(t, r.hs),\n s = i.da(r.documents),\n o = i.applyChanges(s, /* limboResolutionEnabled= */!1);\n n.resolve(o.snapshot);\n } catch (e) {\n const r = __PRIVATE_wrapInUserErrorIfRecoverable(e, `Failed to execute query '${t} against cache`);\n n.reject(r);\n }\n }\n /**\n * Retrieves a latency-compensated query snapshot from the backend via a\n * SnapshotListener.\n */(await __PRIVATE_getLocalStore(e), t, n)), n.promise;\n}\nfunction __PRIVATE_firestoreClientGetDocumentsViaSnapshotListener(e, t, n = {}) {\n const r = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => function __PRIVATE_executeQueryViaSnapshotListener(e, t, n, r, i) {\n const s = new __PRIVATE_AsyncObserver({\n next: n => {\n // Remove query first before passing event to user to avoid\n // user actions affecting the now stale query.\n t.enqueueAndForget(() => __PRIVATE_eventManagerUnlisten(e, o)), n.fromCache && \"server\" === r.source ? i.reject(new FirestoreError(C.UNAVAILABLE, 'Failed to get documents from server. (However, these documents may exist in the local cache. Run again without setting source to \"server\" to retrieve the cached documents.)')) : i.resolve(n);\n },\n error: e => i.reject(e)\n }),\n o = new __PRIVATE_QueryListener(n, s, {\n includeMetadataChanges: !0,\n ra: !0\n });\n return __PRIVATE_eventManagerListen(e, o);\n }(await __PRIVATE_getEventManager(e), e.asyncQueue, t, n, r)), r.promise;\n}\nfunction __PRIVATE_firestoreClientRunAggregateQuery(e, t, n) {\n const r = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => {\n // Implement and call executeAggregateQueryViaSnapshotListener, similar\n // to the implementation in firestoreClientGetDocumentsViaSnapshotListener\n // above\n try {\n // TODO(b/277628384): check `canUseNetwork()` and handle multi-tab.\n const i = await __PRIVATE_getDatastore(e);\n r.resolve(async function __PRIVATE_invokeRunAggregationQueryRpc(e, t, n) {\n var r;\n const i = __PRIVATE_debugCast(e),\n {\n request: s,\n ut: o,\n parent: _\n } = __PRIVATE_toRunAggregationQueryRequest(i.serializer, __PRIVATE_queryToAggregateTarget(t), n);\n i.connection.Do || delete s.parent;\n const a = (await i.xo(\"RunAggregationQuery\", i.serializer.databaseId, _, s, /*expectedResponseCount=*/1)).filter(e => !!e.result);\n // Omit RunAggregationQueryResponse that only contain readTimes.\n __PRIVATE_hardAssert(1 === a.length);\n // Remap the short-form aliases that were sent to the server\n // to the client-side aliases. Users will access the results\n // using the client-side alias.\n const u = null === (r = a[0].result) || void 0 === r ? void 0 : r.aggregateFields;\n return Object.keys(u).reduce((e, t) => (e[o[t]] = u[t], e), {});\n }(i, t, n));\n } catch (e) {\n r.reject(e);\n }\n }), r.promise;\n}\nfunction __PRIVATE_firestoreClientAddSnapshotsInSyncListener(e, t) {\n const n = new __PRIVATE_AsyncObserver(t);\n return e.asyncQueue.enqueueAndForget(async () => function __PRIVATE_addSnapshotsInSyncListener(e, t) {\n __PRIVATE_debugCast(e).z_.add(t),\n // Immediately fire an initial event, indicating all existing listeners\n // are in-sync.\n t.next();\n }(await __PRIVATE_getEventManager(e), n)), () => {\n n.$a(), e.asyncQueue.enqueueAndForget(async () => function __PRIVATE_removeSnapshotsInSyncListener(e, t) {\n __PRIVATE_debugCast(e).z_.delete(t);\n }(await __PRIVATE_getEventManager(e), n));\n };\n}\n\n/**\n * Takes an updateFunction in which a set of reads and writes can be performed\n * atomically. In the updateFunction, the client can read and write values\n * using the supplied transaction object. After the updateFunction, all\n * changes will be committed. If a retryable error occurs (ex: some other\n * client has changed any of the data referenced), then the updateFunction\n * will be called again after a backoff. If the updateFunction still fails\n * after all retries, then the transaction will be rejected.\n *\n * The transaction object passed to the updateFunction contains methods for\n * accessing documents and collections. Unlike other datastore access, data\n * accessed with the transaction will not reflect local changes that have not\n * been committed. For this reason, it is required that all reads are\n * performed before any writes. Transactions must be performed while online.\n */\nfunction __PRIVATE_firestoreClientLoadBundle(e, t, n, r) {\n const i = function __PRIVATE_createBundleReader(e, t) {\n let n;\n n = \"string\" == typeof e ? __PRIVATE_newTextEncoder().encode(e) : e;\n return function __PRIVATE_newBundleReader(e, t) {\n return new __PRIVATE_BundleReaderImpl(e, t);\n }(function __PRIVATE_toByteStreamReader(e, t) {\n if (e instanceof Uint8Array) return __PRIVATE_toByteStreamReaderHelper(e, t);\n if (e instanceof ArrayBuffer) return __PRIVATE_toByteStreamReaderHelper(new Uint8Array(e), t);\n if (e instanceof ReadableStream) return e.getReader();\n throw new Error(\"Source of `toByteStreamReader` has to be a ArrayBuffer or ReadableStream\");\n }(n), t);\n }(n, __PRIVATE_newSerializer(t));\n e.asyncQueue.enqueueAndForget(async () => {\n __PRIVATE_syncEngineLoadBundle(await __PRIVATE_getSyncEngine(e), i, r);\n });\n}\nfunction __PRIVATE_firestoreClientGetNamedQuery(e, t) {\n return e.asyncQueue.enqueue(async () => function __PRIVATE_localStoreGetNamedQuery(e, t) {\n const n = __PRIVATE_debugCast(e);\n return n.persistence.runTransaction(\"Get named query\", \"readonly\", e => n.$r.getNamedQuery(e, t));\n }(await __PRIVATE_getLocalStore(e), t));\n}\nfunction __PRIVATE_firestoreClientSetIndexConfiguration(e, t) {\n return e.asyncQueue.enqueue(async () => async function __PRIVATE_localStoreConfigureFieldIndexes(e, t) {\n const n = __PRIVATE_debugCast(e),\n r = n.indexManager,\n i = [];\n return n.persistence.runTransaction(\"Configure indexes\", \"readwrite\", e => r.getFieldIndexes(e).next(n =>\n /**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n /**\n * Compares two array for equality using comparator. The method computes the\n * intersection and invokes `onAdd` for every element that is in `after` but not\n * `before`. `onRemove` is invoked for every element in `before` but missing\n * from `after`.\n *\n * The method creates a copy of both `before` and `after` and runs in O(n log\n * n), where n is the size of the two lists.\n *\n * @param before - The elements that exist in the original array.\n * @param after - The elements to diff against the original array.\n * @param comparator - The comparator for the elements in before and after.\n * @param onAdd - A function to invoke for every element that is part of `\n * after` but not `before`.\n * @param onRemove - A function to invoke for every element that is part of\n * `before` but not `after`.\n */\n function __PRIVATE_diffArrays(e, t, n, r, i) {\n e = [...e], t = [...t], e.sort(n), t.sort(n);\n const s = e.length,\n o = t.length;\n let _ = 0,\n a = 0;\n for (; _ < o && a < s;) {\n const s = n(e[a], t[_]);\n s < 0 ?\n // The element was removed if the next element in our ordered\n // walkthrough is only in `before`.\n i(e[a++]) : s > 0 ?\n // The element was added if the next element in our ordered walkthrough\n // is only in `after`.\n r(t[_++]) : (_++, a++);\n }\n for (; _ < o;) r(t[_++]);\n for (; a < s;) i(e[a++]);\n }(n, t, __PRIVATE_fieldIndexSemanticComparator, t => {\n i.push(r.addFieldIndex(e, t));\n }, t => {\n i.push(r.deleteFieldIndex(e, t));\n })).next(() => PersistencePromise.waitFor(i)));\n }(await __PRIVATE_getLocalStore(e), t));\n}\nfunction __PRIVATE_firestoreClientSetPersistentCacheIndexAutoCreationEnabled(e, t) {\n return e.asyncQueue.enqueue(async () => function __PRIVATE_localStoreSetIndexAutoCreationEnabled(e, t) {\n __PRIVATE_debugCast(e).ts.Ui = t;\n }(await __PRIVATE_getLocalStore(e), t));\n}\nfunction __PRIVATE_firestoreClientDeleteAllFieldIndexes(e) {\n return e.asyncQueue.enqueue(async () => function __PRIVATE_localStoreDeleteAllFieldIndexes(e) {\n const t = __PRIVATE_debugCast(e),\n n = t.indexManager;\n return t.persistence.runTransaction(\"Delete All Indexes\", \"readwrite\", e => n.deleteAllFieldIndexes(e));\n }\n /**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n // The format of the LocalStorage key that stores the client state is:\n // firestore_clients__\n (await __PRIVATE_getLocalStore(e)));\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Compares two `ExperimentalLongPollingOptions` objects for equality.\n */\n/**\n * Creates and returns a new `ExperimentalLongPollingOptions` with the same\n * option values as the given instance.\n */\nfunction __PRIVATE_cloneLongPollingOptions(e) {\n const t = {};\n return void 0 !== e.timeoutSeconds && (t.timeoutSeconds = e.timeoutSeconds), t;\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst ye = new Map();\n\n/**\n * An instance map that ensures only one Datastore exists per Firestore\n * instance.\n */\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nfunction __PRIVATE_validateNonEmptyArgument(e, t, n) {\n if (!n) throw new FirestoreError(C.INVALID_ARGUMENT, `Function ${e}() cannot be called with an empty ${t}.`);\n}\n\n/**\n * Validates that two boolean options are not set at the same time.\n * @internal\n */\nfunction __PRIVATE_validateIsNotUsedTogether(e, t, n, r) {\n if (!0 === t && !0 === r) throw new FirestoreError(C.INVALID_ARGUMENT, `${e} and ${n} cannot be used together.`);\n}\n\n/**\n * Validates that `path` refers to a document (indicated by the fact it contains\n * an even numbers of segments).\n */\nfunction __PRIVATE_validateDocumentPath(e) {\n if (!DocumentKey.isDocumentKey(e)) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid document reference. Document references must have an even number of segments, but ${e} has ${e.length}.`);\n}\n\n/**\n * Validates that `path` refers to a collection (indicated by the fact it\n * contains an odd numbers of segments).\n */\nfunction __PRIVATE_validateCollectionPath(e) {\n if (DocumentKey.isDocumentKey(e)) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid collection reference. Collection references must have an odd number of segments, but ${e} has ${e.length}.`);\n}\n\n/**\n * Returns true if it's a non-null object without a custom prototype\n * (i.e. excludes Array, Date, etc.).\n */\n/** Returns a string describing the type / value of the provided input. */\nfunction __PRIVATE_valueDescription(e) {\n if (void 0 === e) return \"undefined\";\n if (null === e) return \"null\";\n if (\"string\" == typeof e) return e.length > 20 && (e = `${e.substring(0, 20)}...`), JSON.stringify(e);\n if (\"number\" == typeof e || \"boolean\" == typeof e) return \"\" + e;\n if (\"object\" == typeof e) {\n if (e instanceof Array) return \"an array\";\n {\n const t = /** try to get the constructor name for an object. */\n function __PRIVATE_tryGetCustomObjectType(e) {\n if (e.constructor) return e.constructor.name;\n return null;\n }\n /**\n * Casts `obj` to `T`, optionally unwrapping Compat types to expose the\n * underlying instance. Throws if `obj` is not an instance of `T`.\n *\n * This cast is used in the Lite and Full SDK to verify instance types for\n * arguments passed to the public API.\n * @internal\n */(e);\n return t ? `a custom ${t} object` : \"an object\";\n }\n }\n return \"function\" == typeof e ? \"a function\" : fail();\n}\nfunction __PRIVATE_cast(e,\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nt) {\n if (\"_delegate\" in e && (\n // Unwrap Compat types\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n e = e._delegate), !(e instanceof t)) {\n if (t.name === e.constructor.name) throw new FirestoreError(C.INVALID_ARGUMENT, \"Type does not match the expected instance. Did you pass a reference from a different Firestore SDK?\");\n {\n const n = __PRIVATE_valueDescription(e);\n throw new FirestoreError(C.INVALID_ARGUMENT, `Expected type '${t.name}', but it was: ${n}`);\n }\n }\n return e;\n}\nfunction __PRIVATE_validatePositiveNumber(e, t) {\n if (t <= 0) throw new FirestoreError(C.INVALID_ARGUMENT, `Function ${e}() requires a positive number, but it was: ${t}.`);\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n// settings() defaults:\n/**\n * A concrete type describing all the values that can be applied via a\n * user-supplied `FirestoreSettings` object. This is a separate type so that\n * defaults can be supplied and the value can be checked for equality.\n */\nclass FirestoreSettingsImpl {\n constructor(e) {\n var t, n;\n if (void 0 === e.host) {\n if (void 0 !== e.ssl) throw new FirestoreError(C.INVALID_ARGUMENT, \"Can't provide ssl option if host option is not set\");\n this.host = \"firestore.googleapis.com\", this.ssl = true;\n } else this.host = e.host, this.ssl = null === (t = e.ssl) || void 0 === t || t;\n if (this.credentials = e.credentials, this.ignoreUndefinedProperties = !!e.ignoreUndefinedProperties, this.localCache = e.localCache, void 0 === e.cacheSizeBytes) this.cacheSizeBytes = 41943040;else {\n if (-1 !== e.cacheSizeBytes && e.cacheSizeBytes < 1048576) throw new FirestoreError(C.INVALID_ARGUMENT, \"cacheSizeBytes must be at least 1048576\");\n this.cacheSizeBytes = e.cacheSizeBytes;\n }\n __PRIVATE_validateIsNotUsedTogether(\"experimentalForceLongPolling\", e.experimentalForceLongPolling, \"experimentalAutoDetectLongPolling\", e.experimentalAutoDetectLongPolling), this.experimentalForceLongPolling = !!e.experimentalForceLongPolling, this.experimentalForceLongPolling ? this.experimentalAutoDetectLongPolling = !1 : void 0 === e.experimentalAutoDetectLongPolling ? this.experimentalAutoDetectLongPolling = true :\n // For backwards compatibility, coerce the value to boolean even though\n // the TypeScript compiler has narrowed the type to boolean already.\n // noinspection PointlessBooleanExpressionJS\n this.experimentalAutoDetectLongPolling = !!e.experimentalAutoDetectLongPolling, this.experimentalLongPollingOptions = __PRIVATE_cloneLongPollingOptions(null !== (n = e.experimentalLongPollingOptions) && void 0 !== n ? n : {}), function __PRIVATE_validateLongPollingOptions(e) {\n if (void 0 !== e.timeoutSeconds) {\n if (isNaN(e.timeoutSeconds)) throw new FirestoreError(C.INVALID_ARGUMENT, `invalid long polling timeout: ${e.timeoutSeconds} (must not be NaN)`);\n if (e.timeoutSeconds < 5) throw new FirestoreError(C.INVALID_ARGUMENT, `invalid long polling timeout: ${e.timeoutSeconds} (minimum allowed value is 5)`);\n if (e.timeoutSeconds > 30) throw new FirestoreError(C.INVALID_ARGUMENT, `invalid long polling timeout: ${e.timeoutSeconds} (maximum allowed value is 30)`);\n }\n }\n /**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n /**\n * The Cloud Firestore service interface.\n *\n * Do not call this constructor directly. Instead, use {@link (getFirestore:1)}.\n */(this.experimentalLongPollingOptions), this.useFetchStreams = !!e.useFetchStreams;\n }\n isEqual(e) {\n return this.host === e.host && this.ssl === e.ssl && this.credentials === e.credentials && this.cacheSizeBytes === e.cacheSizeBytes && this.experimentalForceLongPolling === e.experimentalForceLongPolling && this.experimentalAutoDetectLongPolling === e.experimentalAutoDetectLongPolling && function __PRIVATE_longPollingOptionsEqual(e, t) {\n return e.timeoutSeconds === t.timeoutSeconds;\n }(this.experimentalLongPollingOptions, e.experimentalLongPollingOptions) && this.ignoreUndefinedProperties === e.ignoreUndefinedProperties && this.useFetchStreams === e.useFetchStreams;\n }\n}\nclass Firestore$1 {\n /** @hideconstructor */\n constructor(e, t, n, r) {\n this._authCredentials = e, this._appCheckCredentials = t, this._databaseId = n, this._app = r,\n /**\n * Whether it's a Firestore or Firestore Lite instance.\n */\n this.type = \"firestore-lite\", this._persistenceKey = \"(lite)\", this._settings = new FirestoreSettingsImpl({}), this._settingsFrozen = !1;\n }\n /**\n * The {@link @firebase/app#FirebaseApp} associated with this `Firestore` service\n * instance.\n */\n get app() {\n if (!this._app) throw new FirestoreError(C.FAILED_PRECONDITION, \"Firestore was not initialized using the Firebase SDK. 'app' is not available\");\n return this._app;\n }\n get _initialized() {\n return this._settingsFrozen;\n }\n get _terminated() {\n return void 0 !== this._terminateTask;\n }\n _setSettings(e) {\n if (this._settingsFrozen) throw new FirestoreError(C.FAILED_PRECONDITION, \"Firestore has already been started and its settings can no longer be changed. You can only modify settings before calling any other methods on a Firestore object.\");\n this._settings = new FirestoreSettingsImpl(e), void 0 !== e.credentials && (this._authCredentials = function __PRIVATE_makeAuthCredentialsProvider(e) {\n if (!e) return new __PRIVATE_EmptyAuthCredentialsProvider();\n switch (e.type) {\n case \"firstParty\":\n return new __PRIVATE_FirstPartyAuthCredentialsProvider(e.sessionIndex || \"0\", e.iamToken || null, e.authTokenFactory || null);\n case \"provider\":\n return e.client;\n default:\n throw new FirestoreError(C.INVALID_ARGUMENT, \"makeAuthCredentialsProvider failed due to invalid credential type\");\n }\n }(e.credentials));\n }\n _getSettings() {\n return this._settings;\n }\n _freezeSettings() {\n return this._settingsFrozen = !0, this._settings;\n }\n _delete() {\n return this._terminateTask || (this._terminateTask = this._terminate()), this._terminateTask;\n }\n /** Returns a JSON-serializable representation of this `Firestore` instance. */\n toJSON() {\n return {\n app: this._app,\n databaseId: this._databaseId,\n settings: this._settings\n };\n }\n /**\n * Terminates all components used by this client. Subclasses can override\n * this method to clean up their own dependencies, but must also call this\n * method.\n *\n * Only ever called once.\n */\n _terminate() {\n /**\n * Removes all components associated with the provided instance. Must be called\n * when the `Firestore` instance is terminated.\n */\n return function __PRIVATE_removeComponents(e) {\n const t = ye.get(e);\n t && (__PRIVATE_logDebug(\"ComponentProvider\", \"Removing Datastore\"), ye.delete(e), t.terminate());\n }(this), Promise.resolve();\n }\n}\n\n/**\n * Modify this instance to communicate with the Cloud Firestore emulator.\n *\n * Note: This must be called before this instance has been used to do any\n * operations.\n *\n * @param firestore - The `Firestore` instance to configure to connect to the\n * emulator.\n * @param host - the emulator host (ex: localhost).\n * @param port - the emulator port (ex: 9000).\n * @param options.mockUserToken - the mock auth token to use for unit testing\n * Security Rules.\n */\nfunction connectFirestoreEmulator(e, t, n, r = {}) {\n var i;\n const s = (e = __PRIVATE_cast(e, Firestore$1))._getSettings(),\n o = `${t}:${n}`;\n if (\"firestore.googleapis.com\" !== s.host && s.host !== o && __PRIVATE_logWarn(\"Host has been set in both settings() and connectFirestoreEmulator(), emulator host will be used.\"), e._setSettings(Object.assign(Object.assign({}, s), {\n host: o,\n ssl: !1\n })), r.mockUserToken) {\n let t, n;\n if (\"string\" == typeof r.mockUserToken) t = r.mockUserToken, n = User.MOCK_USER;else {\n // Let createMockUserToken validate first (catches common mistakes like\n // invalid field \"uid\" and missing field \"sub\" / \"user_id\".)\n t = createMockUserToken(r.mockUserToken, null === (i = e._app) || void 0 === i ? void 0 : i.options.projectId);\n const s = r.mockUserToken.sub || r.mockUserToken.user_id;\n if (!s) throw new FirestoreError(C.INVALID_ARGUMENT, \"mockUserToken must contain 'sub' or 'user_id' field!\");\n n = new User(s);\n }\n e._authCredentials = new __PRIVATE_EmulatorAuthCredentialsProvider(new __PRIVATE_OAuthToken(t, n));\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A `Query` refers to a query which you can read or listen to. You can also\n * construct refined `Query` objects by adding filters and ordering.\n */\nclass Query {\n // This is the lite version of the Query class in the main SDK.\n /** @hideconstructor protected */\n constructor(e,\n /**\n * If provided, the `FirestoreDataConverter` associated with this instance.\n */\n t, n) {\n this.converter = t, this._query = n, /** The type of this Firestore reference. */\n this.type = \"query\", this.firestore = e;\n }\n withConverter(e) {\n return new Query(this.firestore, e, this._query);\n }\n}\n\n/**\n * A `DocumentReference` refers to a document location in a Firestore database\n * and can be used to write, read, or listen to the location. The document at\n * the referenced location may or may not exist.\n */\nclass DocumentReference {\n /** @hideconstructor */\n constructor(e,\n /**\n * If provided, the `FirestoreDataConverter` associated with this instance.\n */\n t, n) {\n this.converter = t, this._key = n, /** The type of this Firestore reference. */\n this.type = \"document\", this.firestore = e;\n }\n get _path() {\n return this._key.path;\n }\n /**\n * The document's identifier within its collection.\n */\n get id() {\n return this._key.path.lastSegment();\n }\n /**\n * A string representing the path of the referenced document (relative\n * to the root of the database).\n */\n get path() {\n return this._key.path.canonicalString();\n }\n /**\n * The collection this `DocumentReference` belongs to.\n */\n get parent() {\n return new CollectionReference(this.firestore, this.converter, this._key.path.popLast());\n }\n withConverter(e) {\n return new DocumentReference(this.firestore, e, this._key);\n }\n}\n\n/**\n * A `CollectionReference` object can be used for adding documents, getting\n * document references, and querying for documents (using {@link (query:1)}).\n */\nclass CollectionReference extends Query {\n /** @hideconstructor */\n constructor(e, t, n) {\n super(e, t, __PRIVATE_newQueryForPath(n)), this._path = n, /** The type of this Firestore reference. */\n this.type = \"collection\";\n }\n /** The collection's identifier. */\n get id() {\n return this._query.path.lastSegment();\n }\n /**\n * A string representing the path of the referenced collection (relative\n * to the root of the database).\n */\n get path() {\n return this._query.path.canonicalString();\n }\n /**\n * A reference to the containing `DocumentReference` if this is a\n * subcollection. If this isn't a subcollection, the reference is null.\n */\n get parent() {\n const e = this._path.popLast();\n return e.isEmpty() ? null : new DocumentReference(this.firestore, /* converter= */null, new DocumentKey(e));\n }\n withConverter(e) {\n return new CollectionReference(this.firestore, e, this._path);\n }\n}\nfunction collection(e, t, ...n) {\n if (e = getModularInstance(e), __PRIVATE_validateNonEmptyArgument(\"collection\", \"path\", t), e instanceof Firestore$1) {\n const r = ResourcePath.fromString(t, ...n);\n return __PRIVATE_validateCollectionPath(r), new CollectionReference(e, /* converter= */null, r);\n }\n {\n if (!(e instanceof DocumentReference || e instanceof CollectionReference)) throw new FirestoreError(C.INVALID_ARGUMENT, \"Expected first argument to collection() to be a CollectionReference, a DocumentReference or FirebaseFirestore\");\n const r = e._path.child(ResourcePath.fromString(t, ...n));\n return __PRIVATE_validateCollectionPath(r), new CollectionReference(e.firestore, /* converter= */null, r);\n }\n}\n\n// TODO(firestorelite): Consider using ErrorFactory -\n// https://github.com/firebase/firebase-js-sdk/blob/0131e1f/packages/util/src/errors.ts#L106\n/**\n * Creates and returns a new `Query` instance that includes all documents in the\n * database that are contained in a collection or subcollection with the\n * given `collectionId`.\n *\n * @param firestore - A reference to the root `Firestore` instance.\n * @param collectionId - Identifies the collections to query over. Every\n * collection or subcollection with this ID as the last segment of its path\n * will be included. Cannot contain a slash.\n * @returns The created `Query`.\n */\nfunction collectionGroup(e, t) {\n if (e = __PRIVATE_cast(e, Firestore$1), __PRIVATE_validateNonEmptyArgument(\"collectionGroup\", \"collection id\", t), t.indexOf(\"/\") >= 0) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid collection ID '${t}' passed to function collectionGroup(). Collection IDs must not contain '/'.`);\n return new Query(e, /* converter= */null, function __PRIVATE_newQueryForCollectionGroup(e) {\n return new __PRIVATE_QueryImpl(ResourcePath.emptyPath(), e);\n }(t));\n}\nfunction doc(e, t, ...n) {\n if (e = getModularInstance(e),\n // We allow omission of 'pathString' but explicitly prohibit passing in both\n // 'undefined' and 'null'.\n 1 === arguments.length && (t = __PRIVATE_AutoId.newId()), __PRIVATE_validateNonEmptyArgument(\"doc\", \"path\", t), e instanceof Firestore$1) {\n const r = ResourcePath.fromString(t, ...n);\n return __PRIVATE_validateDocumentPath(r), new DocumentReference(e, /* converter= */null, new DocumentKey(r));\n }\n {\n if (!(e instanceof DocumentReference || e instanceof CollectionReference)) throw new FirestoreError(C.INVALID_ARGUMENT, \"Expected first argument to collection() to be a CollectionReference, a DocumentReference or FirebaseFirestore\");\n const r = e._path.child(ResourcePath.fromString(t, ...n));\n return __PRIVATE_validateDocumentPath(r), new DocumentReference(e.firestore, e instanceof CollectionReference ? e.converter : null, new DocumentKey(r));\n }\n}\n\n/**\n * Returns true if the provided references are equal.\n *\n * @param left - A reference to compare.\n * @param right - A reference to compare.\n * @returns true if the references point to the same location in the same\n * Firestore database.\n */\nfunction refEqual(e, t) {\n return e = getModularInstance(e), t = getModularInstance(t), (e instanceof DocumentReference || e instanceof CollectionReference) && (t instanceof DocumentReference || t instanceof CollectionReference) && e.firestore === t.firestore && e.path === t.path && e.converter === t.converter;\n}\n\n/**\n * Returns true if the provided queries point to the same collection and apply\n * the same constraints.\n *\n * @param left - A `Query` to compare.\n * @param right - A `Query` to compare.\n * @returns true if the references point to the same location in the same\n * Firestore database.\n */\nfunction queryEqual(e, t) {\n return e = getModularInstance(e), t = getModularInstance(t), e instanceof Query && t instanceof Query && e.firestore === t.firestore && __PRIVATE_queryEquals(e._query, t._query) && e.converter === t.converter;\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nclass __PRIVATE_AsyncQueueImpl {\n constructor() {\n // The last promise in the queue.\n this.iu = Promise.resolve(),\n // A list of retryable operations. Retryable operations are run in order and\n // retried with backoff.\n this.su = [],\n // Is this AsyncQueue being shut down? Once it is set to true, it will not\n // be changed again.\n this.ou = !1,\n // Operations scheduled to be queued in the future. Operations are\n // automatically removed after they are run or canceled.\n this._u = [],\n // visible for testing\n this.au = null,\n // Flag set while there's an outstanding AsyncQueue operation, used for\n // assertion sanity-checks.\n this.uu = !1,\n // Enabled during shutdown on Safari to prevent future access to IndexedDB.\n this.cu = !1,\n // List of TimerIds to fast-forward delays for.\n this.lu = [],\n // Backoff timer used to schedule retries for retryable operations\n this.Yo = new __PRIVATE_ExponentialBackoff(this, \"async_queue_retry\" /* TimerId.AsyncQueueRetry */),\n // Visibility handler that triggers an immediate retry of all retryable\n // operations. Meant to speed up recovery when we regain file system access\n // after page comes into foreground.\n this.hu = () => {\n const e = getDocument();\n e && __PRIVATE_logDebug(\"AsyncQueue\", \"Visibility state changed to \" + e.visibilityState), this.Yo.Wo();\n };\n const e = getDocument();\n e && \"function\" == typeof e.addEventListener && e.addEventListener(\"visibilitychange\", this.hu);\n }\n get isShuttingDown() {\n return this.ou;\n }\n /**\n * Adds a new operation to the queue without waiting for it to complete (i.e.\n * we ignore the Promise result).\n */\n enqueueAndForget(e) {\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.enqueue(e);\n }\n enqueueAndForgetEvenWhileRestricted(e) {\n this.Pu(),\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.Iu(e);\n }\n enterRestrictedMode(e) {\n if (!this.ou) {\n this.ou = !0, this.cu = e || !1;\n const t = getDocument();\n t && \"function\" == typeof t.removeEventListener && t.removeEventListener(\"visibilitychange\", this.hu);\n }\n }\n enqueue(e) {\n if (this.Pu(), this.ou)\n // Return a Promise which never resolves.\n return new Promise(() => {});\n // Create a deferred Promise that we can return to the callee. This\n // allows us to return a \"hanging Promise\" only to the callee and still\n // advance the queue even when the operation is not run.\n const t = new __PRIVATE_Deferred();\n return this.Iu(() => this.ou && this.cu ? Promise.resolve() : (e().then(t.resolve, t.reject), t.promise)).then(() => t.promise);\n }\n enqueueRetryable(e) {\n this.enqueueAndForget(() => (this.su.push(e), this.Tu()));\n }\n /**\n * Runs the next operation from the retryable queue. If the operation fails,\n * reschedules with backoff.\n */\n async Tu() {\n if (0 !== this.su.length) {\n try {\n await this.su[0](), this.su.shift(), this.Yo.reset();\n } catch (e) {\n if (!__PRIVATE_isIndexedDbTransactionError(e)) throw e;\n // Failure will be handled by AsyncQueue\n __PRIVATE_logDebug(\"AsyncQueue\", \"Operation failed with retryable error: \" + e);\n }\n this.su.length > 0 &&\n // If there are additional operations, we re-schedule `retryNextOp()`.\n // This is necessary to run retryable operations that failed during\n // their initial attempt since we don't know whether they are already\n // enqueued. If, for example, `op1`, `op2`, `op3` are enqueued and `op1`\n // needs to be re-run, we will run `op1`, `op1`, `op2` using the\n // already enqueued calls to `retryNextOp()`. `op3()` will then run in the\n // call scheduled here.\n // Since `backoffAndRun()` cancels an existing backoff and schedules a\n // new backoff on every call, there is only ever a single additional\n // operation in the queue.\n this.Yo.$o(() => this.Tu());\n }\n }\n Iu(e) {\n const t = this.iu.then(() => (this.uu = !0, e().catch(e => {\n this.au = e, this.uu = !1;\n const t =\n /**\n * Chrome includes Error.message in Error.stack. Other browsers do not.\n * This returns expected output of message + stack when available.\n * @param error - Error or FirestoreError\n */\n function __PRIVATE_getMessageOrStack(e) {\n let t = e.message || \"\";\n e.stack && (t = e.stack.includes(e.message) ? e.stack : e.message + \"\\n\" + e.stack);\n return t;\n }\n /**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */(e);\n // Re-throw the error so that this.tail becomes a rejected Promise and\n // all further attempts to chain (via .then) will just short-circuit\n // and return the rejected Promise.\n throw __PRIVATE_logError(\"INTERNAL UNHANDLED ERROR: \", t), e;\n }).then(e => (this.uu = !1, e))));\n return this.iu = t, t;\n }\n enqueueAfterDelay(e, t, n) {\n this.Pu(),\n // Fast-forward delays for timerIds that have been overriden.\n this.lu.indexOf(e) > -1 && (t = 0);\n const r = DelayedOperation.createAndSchedule(this, e, t, n, e => this.Eu(e));\n return this._u.push(r), r;\n }\n Pu() {\n this.au && fail();\n }\n verifyOperationInProgress() {}\n /**\n * Waits until all currently queued tasks are finished executing. Delayed\n * operations are not run.\n */\n async du() {\n // Operations in the queue prior to draining may have enqueued additional\n // operations. Keep draining the queue until the tail is no longer advanced,\n // which indicates that no more new operations were enqueued and that all\n // operations were executed.\n let e;\n do {\n e = this.iu, await e;\n } while (e !== this.iu);\n }\n /**\n * For Tests: Determine if a delayed operation with a particular TimerId\n * exists.\n */\n Au(e) {\n for (const t of this._u) if (t.timerId === e) return !0;\n return !1;\n }\n /**\n * For Tests: Runs some or all delayed operations early.\n *\n * @param lastTimerId - Delayed operations up to and including this TimerId\n * will be drained. Pass TimerId.All to run all delayed operations.\n * @returns a Promise that resolves once all operations have been run.\n */\n Ru(e) {\n // Note that draining may generate more delayed ops, so we do that first.\n return this.du().then(() => {\n // Run ops in the same order they'd run if they ran naturally.\n this._u.sort((e, t) => e.targetTimeMs - t.targetTimeMs);\n for (const t of this._u) if (t.skipDelay(), \"all\" /* TimerId.All */ !== e && t.timerId === e) break;\n return this.du();\n });\n }\n /**\n * For Tests: Skip all subsequent delays for a timer id.\n */\n Vu(e) {\n this.lu.push(e);\n }\n /** Called once a DelayedOperation is run or canceled. */\n Eu(e) {\n // NOTE: indexOf / slice are O(n), but delayedOperations is expected to be small.\n const t = this._u.indexOf(e);\n this._u.splice(t, 1);\n }\n}\nfunction __PRIVATE_isPartialObserver(e) {\n /**\n * Returns true if obj is an object and contains at least one of the specified\n * methods.\n */\n return function __PRIVATE_implementsAnyMethods(e, t) {\n if (\"object\" != typeof e || null === e) return !1;\n const n = e;\n for (const e of t) if (e in n && \"function\" == typeof n[e]) return !0;\n return !1;\n }\n /**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n /**\n * Represents the task of loading a Firestore bundle. It provides progress of bundle\n * loading, as well as task completion and error events.\n *\n * The API is compatible with `Promise`.\n */(e, [\"next\", \"error\", \"complete\"]);\n}\nclass LoadBundleTask {\n constructor() {\n this._progressObserver = {}, this._taskCompletionResolver = new __PRIVATE_Deferred(), this._lastProgress = {\n taskState: \"Running\",\n totalBytes: 0,\n totalDocuments: 0,\n bytesLoaded: 0,\n documentsLoaded: 0\n };\n }\n /**\n * Registers functions to listen to bundle loading progress events.\n * @param next - Called when there is a progress update from bundle loading. Typically `next` calls occur\n * each time a Firestore document is loaded from the bundle.\n * @param error - Called when an error occurs during bundle loading. The task aborts after reporting the\n * error, and there should be no more updates after this.\n * @param complete - Called when the loading task is complete.\n */\n onProgress(e, t, n) {\n this._progressObserver = {\n next: e,\n error: t,\n complete: n\n };\n }\n /**\n * Implements the `Promise.catch` interface.\n *\n * @param onRejected - Called when an error occurs during bundle loading.\n */\n catch(e) {\n return this._taskCompletionResolver.promise.catch(e);\n }\n /**\n * Implements the `Promise.then` interface.\n *\n * @param onFulfilled - Called on the completion of the loading task with a final `LoadBundleTaskProgress` update.\n * The update will always have its `taskState` set to `\"Success\"`.\n * @param onRejected - Called when an error occurs during bundle loading.\n */\n then(e, t) {\n return this._taskCompletionResolver.promise.then(e, t);\n }\n /**\n * Notifies all observers that bundle loading has completed, with a provided\n * `LoadBundleTaskProgress` object.\n *\n * @private\n */\n _completeWith(e) {\n this._updateProgress(e), this._progressObserver.complete && this._progressObserver.complete(), this._taskCompletionResolver.resolve(e);\n }\n /**\n * Notifies all observers that bundle loading has failed, with a provided\n * `Error` as the reason.\n *\n * @private\n */\n _failWith(e) {\n this._lastProgress.taskState = \"Error\", this._progressObserver.next && this._progressObserver.next(this._lastProgress), this._progressObserver.error && this._progressObserver.error(e), this._taskCompletionResolver.reject(e);\n }\n /**\n * Notifies a progress update of loading a bundle.\n * @param progress - The new progress.\n *\n * @private\n */\n _updateProgress(e) {\n this._lastProgress = e, this._progressObserver.next && this._progressObserver.next(e);\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Constant used to indicate the LRU garbage collection should be disabled.\n * Set this value as the `cacheSizeBytes` on the settings passed to the\n * {@link Firestore} instance.\n */\nconst we = -1;\n\n/**\n * The Cloud Firestore service interface.\n *\n * Do not call this constructor directly. Instead, use {@link (getFirestore:1)}.\n */\nclass Firestore extends Firestore$1 {\n /** @hideconstructor */\n constructor(e, t, n, r) {\n super(e, t, n, r),\n /**\n * Whether it's a {@link Firestore} or Firestore Lite instance.\n */\n this.type = \"firestore\", this._queue = function __PRIVATE_newAsyncQueue() {\n return new __PRIVATE_AsyncQueueImpl();\n }(), this._persistenceKey = (null == r ? void 0 : r.name) || \"[DEFAULT]\";\n }\n _terminate() {\n return this._firestoreClient ||\n // The client must be initialized to ensure that all subsequent API\n // usage throws an exception.\n __PRIVATE_configureFirestore(this), this._firestoreClient.terminate();\n }\n}\n\n/**\n * Initializes a new instance of {@link Firestore} with the provided settings.\n * Can only be called before any other function, including\n * {@link (getFirestore:1)}. If the custom settings are empty, this function is\n * equivalent to calling {@link (getFirestore:1)}.\n *\n * @param app - The {@link @firebase/app#FirebaseApp} with which the {@link Firestore} instance will\n * be associated.\n * @param settings - A settings object to configure the {@link Firestore} instance.\n * @param databaseId - The name of the database.\n * @returns A newly initialized {@link Firestore} instance.\n */\nfunction initializeFirestore(e, t, n) {\n n || (n = \"(default)\");\n const r = _getProvider(e, \"firestore\");\n if (r.isInitialized(n)) {\n const e = r.getImmediate({\n identifier: n\n }),\n i = r.getOptions(n);\n if (deepEqual(i, t)) return e;\n throw new FirestoreError(C.FAILED_PRECONDITION, \"initializeFirestore() has already been called with different options. To avoid this error, call initializeFirestore() with the same options as when it was originally called, or call getFirestore() to return the already initialized instance.\");\n }\n if (void 0 !== t.cacheSizeBytes && void 0 !== t.localCache) throw new FirestoreError(C.INVALID_ARGUMENT, \"cache and cacheSizeBytes cannot be specified at the same time as cacheSizeBytes willbe deprecated. Instead, specify the cache size in the cache object\");\n if (void 0 !== t.cacheSizeBytes && -1 !== t.cacheSizeBytes && t.cacheSizeBytes < 1048576) throw new FirestoreError(C.INVALID_ARGUMENT, \"cacheSizeBytes must be at least 1048576\");\n return r.initialize({\n options: t,\n instanceIdentifier: n\n });\n}\nfunction getFirestore(t, n) {\n const r = \"object\" == typeof t ? t : getApp(),\n i = \"string\" == typeof t ? t : n || \"(default)\",\n s = _getProvider(r, \"firestore\").getImmediate({\n identifier: i\n });\n if (!s._initialized) {\n const e = getDefaultEmulatorHostnameAndPort(\"firestore\");\n e && connectFirestoreEmulator(s, ...e);\n }\n return s;\n}\n\n/**\n * @internal\n */\nfunction ensureFirestoreConfigured(e) {\n return e._firestoreClient || __PRIVATE_configureFirestore(e), e._firestoreClient.verifyNotTerminated(), e._firestoreClient;\n}\nfunction __PRIVATE_configureFirestore(e) {\n var t, n, r;\n const i = e._freezeSettings(),\n s = function __PRIVATE_makeDatabaseInfo(e, t, n, r) {\n return new DatabaseInfo(e, t, n, r.host, r.ssl, r.experimentalForceLongPolling, r.experimentalAutoDetectLongPolling, __PRIVATE_cloneLongPollingOptions(r.experimentalLongPollingOptions), r.useFetchStreams);\n }(e._databaseId, (null === (t = e._app) || void 0 === t ? void 0 : t.options.appId) || \"\", e._persistenceKey, i);\n e._firestoreClient = new FirestoreClient(e._authCredentials, e._appCheckCredentials, e._queue, s), (null === (n = i.localCache) || void 0 === n ? void 0 : n._offlineComponentProvider) && (null === (r = i.localCache) || void 0 === r ? void 0 : r._onlineComponentProvider) && (e._firestoreClient._uninitializedComponentsProvider = {\n _offlineKind: i.localCache.kind,\n _offline: i.localCache._offlineComponentProvider,\n _online: i.localCache._onlineComponentProvider\n });\n}\n\n/**\n * Attempts to enable persistent storage, if possible.\n *\n * On failure, `enableIndexedDbPersistence()` will reject the promise or\n * throw an exception. There are several reasons why this can fail, which can be\n * identified by the `code` on the error.\n *\n * * failed-precondition: The app is already open in another browser tab.\n * * unimplemented: The browser is incompatible with the offline persistence\n * implementation.\n *\n * Note that even after a failure, the {@link Firestore} instance will remain\n * usable, however offline persistence will be disabled.\n *\n * Note: `enableIndexedDbPersistence()` must be called before any other functions\n * (other than {@link initializeFirestore}, {@link (getFirestore:1)} or\n * {@link clearIndexedDbPersistence}.\n *\n * Persistence cannot be used in a Node.js environment.\n *\n * @param firestore - The {@link Firestore} instance to enable persistence for.\n * @param persistenceSettings - Optional settings object to configure\n * persistence.\n * @returns A `Promise` that represents successfully enabling persistent storage.\n * @deprecated This function will be removed in a future major release. Instead, set\n * `FirestoreSettings.localCache` to an instance of `PersistentLocalCache` to\n * turn on IndexedDb cache. Calling this function when `FirestoreSettings.localCache`\n * is already specified will throw an exception.\n */\nfunction enableIndexedDbPersistence(e, t) {\n __PRIVATE_verifyNotInitialized(e = __PRIVATE_cast(e, Firestore));\n const n = ensureFirestoreConfigured(e);\n if (n._uninitializedComponentsProvider) throw new FirestoreError(C.FAILED_PRECONDITION, \"SDK cache is already specified.\");\n __PRIVATE_logWarn(\"enableIndexedDbPersistence() will be deprecated in the future, you can use `FirestoreSettings.cache` instead.\");\n const r = e._freezeSettings(),\n i = new OnlineComponentProvider();\n return __PRIVATE_setPersistenceProviders(n, i, new __PRIVATE_IndexedDbOfflineComponentProvider(i, r.cacheSizeBytes, null == t ? void 0 : t.forceOwnership));\n}\n\n/**\n * Attempts to enable multi-tab persistent storage, if possible. If enabled\n * across all tabs, all operations share access to local persistence, including\n * shared execution of queries and latency-compensated local document updates\n * across all connected instances.\n *\n * On failure, `enableMultiTabIndexedDbPersistence()` will reject the promise or\n * throw an exception. There are several reasons why this can fail, which can be\n * identified by the `code` on the error.\n *\n * * failed-precondition: The app is already open in another browser tab and\n * multi-tab is not enabled.\n * * unimplemented: The browser is incompatible with the offline persistence\n * implementation.\n *\n * Note that even after a failure, the {@link Firestore} instance will remain\n * usable, however offline persistence will be disabled.\n *\n * @param firestore - The {@link Firestore} instance to enable persistence for.\n * @returns A `Promise` that represents successfully enabling persistent\n * storage.\n * @deprecated This function will be removed in a future major release. Instead, set\n * `FirestoreSettings.localCache` to an instance of `PersistentLocalCache` to\n * turn on indexeddb cache. Calling this function when `FirestoreSettings.localCache`\n * is already specified will throw an exception.\n */\nfunction enableMultiTabIndexedDbPersistence(e) {\n __PRIVATE_verifyNotInitialized(e = __PRIVATE_cast(e, Firestore));\n const t = ensureFirestoreConfigured(e);\n if (t._uninitializedComponentsProvider) throw new FirestoreError(C.FAILED_PRECONDITION, \"SDK cache is already specified.\");\n __PRIVATE_logWarn(\"enableMultiTabIndexedDbPersistence() will be deprecated in the future, you can use `FirestoreSettings.cache` instead.\");\n const n = e._freezeSettings(),\n r = new OnlineComponentProvider();\n return __PRIVATE_setPersistenceProviders(t, r, new __PRIVATE_MultiTabOfflineComponentProvider(r, n.cacheSizeBytes));\n}\n\n/**\n * Registers both the `OfflineComponentProvider` and `OnlineComponentProvider`.\n * If the operation fails with a recoverable error (see\n * `canRecoverFromIndexedDbError()` below), the returned Promise is rejected\n * but the client remains usable.\n */\nfunction __PRIVATE_setPersistenceProviders(e, t, n) {\n const r = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueue(async () => {\n try {\n await __PRIVATE_setOfflineComponentProvider(e, n), await __PRIVATE_setOnlineComponentProvider(e, t), r.resolve();\n } catch (e) {\n const t = e;\n if (!__PRIVATE_canFallbackFromIndexedDbError(t)) throw t;\n __PRIVATE_logWarn(\"Error enabling indexeddb cache. Falling back to memory cache: \" + t), r.reject(t);\n }\n }).then(() => r.promise);\n}\n\n/**\n * Clears the persistent storage. This includes pending writes and cached\n * documents.\n *\n * Must be called while the {@link Firestore} instance is not started (after the app is\n * terminated or when the app is first initialized). On startup, this function\n * must be called before other functions (other than {@link\n * initializeFirestore} or {@link (getFirestore:1)})). If the {@link Firestore}\n * instance is still running, the promise will be rejected with the error code\n * of `failed-precondition`.\n *\n * Note: `clearIndexedDbPersistence()` is primarily intended to help write\n * reliable tests that use Cloud Firestore. It uses an efficient mechanism for\n * dropping existing data but does not attempt to securely overwrite or\n * otherwise make cached data unrecoverable. For applications that are sensitive\n * to the disclosure of cached data in between user sessions, we strongly\n * recommend not enabling persistence at all.\n *\n * @param firestore - The {@link Firestore} instance to clear persistence for.\n * @returns A `Promise` that is resolved when the persistent storage is\n * cleared. Otherwise, the promise is rejected with an error.\n */\nfunction clearIndexedDbPersistence(e) {\n if (e._initialized && !e._terminated) throw new FirestoreError(C.FAILED_PRECONDITION, \"Persistence can only be cleared before a Firestore instance is initialized or after it is terminated.\");\n const t = new __PRIVATE_Deferred();\n return e._queue.enqueueAndForgetEvenWhileRestricted(async () => {\n try {\n await async function __PRIVATE_indexedDbClearPersistence(e) {\n if (!__PRIVATE_SimpleDb.D()) return Promise.resolve();\n const t = e + \"main\";\n await __PRIVATE_SimpleDb.delete(t);\n }(__PRIVATE_indexedDbStoragePrefix(e._databaseId, e._persistenceKey)), t.resolve();\n } catch (e) {\n t.reject(e);\n }\n }), t.promise;\n}\n\n/**\n * Waits until all currently pending writes for the active user have been\n * acknowledged by the backend.\n *\n * The returned promise resolves immediately if there are no outstanding writes.\n * Otherwise, the promise waits for all previously issued writes (including\n * those written in a previous app session), but it does not wait for writes\n * that were added after the function is called. If you want to wait for\n * additional writes, call `waitForPendingWrites()` again.\n *\n * Any outstanding `waitForPendingWrites()` promises are rejected during user\n * changes.\n *\n * @returns A `Promise` which resolves when all currently pending writes have been\n * acknowledged by the backend.\n */\nfunction waitForPendingWrites(e) {\n return function __PRIVATE_firestoreClientWaitForPendingWrites(e) {\n const t = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => __PRIVATE_syncEngineRegisterPendingWritesCallback(await __PRIVATE_getSyncEngine(e), t)), t.promise;\n }(ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)));\n}\n\n/**\n * Re-enables use of the network for this {@link Firestore} instance after a prior\n * call to {@link disableNetwork}.\n *\n * @returns A `Promise` that is resolved once the network has been enabled.\n */\nfunction enableNetwork(e) {\n return __PRIVATE_firestoreClientEnableNetwork(ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)));\n}\n\n/**\n * Disables network usage for this instance. It can be re-enabled via {@link\n * enableNetwork}. While the network is disabled, any snapshot listeners,\n * `getDoc()` or `getDocs()` calls will return results from cache, and any write\n * operations will be queued until the network is restored.\n *\n * @returns A `Promise` that is resolved once the network has been disabled.\n */\nfunction disableNetwork(e) {\n return __PRIVATE_firestoreClientDisableNetwork(ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)));\n}\n\n/**\n * Terminates the provided {@link Firestore} instance.\n *\n * After calling `terminate()` only the `clearIndexedDbPersistence()` function\n * may be used. Any other function will throw a `FirestoreError`.\n *\n * To restart after termination, create a new instance of FirebaseFirestore with\n * {@link (getFirestore:1)}.\n *\n * Termination does not cancel any pending writes, and any promises that are\n * awaiting a response from the server will not be resolved. If you have\n * persistence enabled, the next time you start this instance, it will resume\n * sending these writes to the server.\n *\n * Note: Under normal circumstances, calling `terminate()` is not required. This\n * function is useful only when you want to force this instance to release all\n * of its resources or in combination with `clearIndexedDbPersistence()` to\n * ensure that all local state is destroyed between test runs.\n *\n * @returns A `Promise` that is resolved when the instance has been successfully\n * terminated.\n */\nfunction terminate(e) {\n return _removeServiceInstance(e.app, \"firestore\", e._databaseId.database), e._delete();\n}\n\n/**\n * Loads a Firestore bundle into the local cache.\n *\n * @param firestore - The {@link Firestore} instance to load bundles for.\n * @param bundleData - An object representing the bundle to be loaded. Valid\n * objects are `ArrayBuffer`, `ReadableStream` or `string`.\n *\n * @returns A `LoadBundleTask` object, which notifies callers with progress\n * updates, and completion or error events. It can be used as a\n * `Promise`.\n */\nfunction loadBundle(e, t) {\n const n = ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)),\n r = new LoadBundleTask();\n return __PRIVATE_firestoreClientLoadBundle(n, e._databaseId, t, r), r;\n}\n\n/**\n * Reads a Firestore {@link Query} from local cache, identified by the given\n * name.\n *\n * The named queries are packaged into bundles on the server side (along\n * with resulting documents), and loaded to local cache using `loadBundle`. Once\n * in local cache, use this method to extract a {@link Query} by name.\n *\n * @param firestore - The {@link Firestore} instance to read the query from.\n * @param name - The name of the query.\n * @returns A `Promise` that is resolved with the Query or `null`.\n */\nfunction namedQuery(e, t) {\n return __PRIVATE_firestoreClientGetNamedQuery(ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)), t).then(t => t ? new Query(e, null, t.query) : null);\n}\nfunction __PRIVATE_verifyNotInitialized(e) {\n if (e._initialized || e._terminated) throw new FirestoreError(C.FAILED_PRECONDITION, \"Firestore has already been started and persistence can no longer be enabled. You can only enable persistence before calling any other methods on a Firestore object.\");\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Represents an aggregation that can be performed by Firestore.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nclass AggregateField {\n /**\n * Create a new AggregateField\n * @param aggregateType Specifies the type of aggregation operation to perform.\n * @param _internalFieldPath Optionally specifies the field that is aggregated.\n * @internal\n */\n constructor(e = \"count\", t) {\n this._internalFieldPath = t, /** A type string to uniquely identify instances of this class. */\n this.type = \"AggregateField\", this.aggregateType = e;\n }\n}\n\n/**\n * The results of executing an aggregation query.\n */\nclass AggregateQuerySnapshot {\n /** @hideconstructor */\n constructor(e, t, n) {\n this._userDataWriter = t, this._data = n, /** A type string to uniquely identify instances of this class. */\n this.type = \"AggregateQuerySnapshot\", this.query = e;\n }\n /**\n * Returns the results of the aggregations performed over the underlying\n * query.\n *\n * The keys of the returned object will be the same as those of the\n * `AggregateSpec` object specified to the aggregation method, and the values\n * will be the corresponding aggregation result.\n *\n * @returns The results of the aggregations performed over the underlying\n * query.\n */\n data() {\n return this._userDataWriter.convertObjectMap(this._data);\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * An immutable object representing an array of bytes.\n */\nclass Bytes {\n /** @hideconstructor */\n constructor(e) {\n this._byteString = e;\n }\n /**\n * Creates a new `Bytes` object from the given Base64 string, converting it to\n * bytes.\n *\n * @param base64 - The Base64 string used to create the `Bytes` object.\n */\n static fromBase64String(e) {\n try {\n return new Bytes(ByteString.fromBase64String(e));\n } catch (e) {\n throw new FirestoreError(C.INVALID_ARGUMENT, \"Failed to construct data from Base64 string: \" + e);\n }\n }\n /**\n * Creates a new `Bytes` object from the given Uint8Array.\n *\n * @param array - The Uint8Array used to create the `Bytes` object.\n */\n static fromUint8Array(e) {\n return new Bytes(ByteString.fromUint8Array(e));\n }\n /**\n * Returns the underlying bytes as a Base64-encoded string.\n *\n * @returns The Base64-encoded string created from the `Bytes` object.\n */\n toBase64() {\n return this._byteString.toBase64();\n }\n /**\n * Returns the underlying bytes in a new `Uint8Array`.\n *\n * @returns The Uint8Array created from the `Bytes` object.\n */\n toUint8Array() {\n return this._byteString.toUint8Array();\n }\n /**\n * Returns a string representation of the `Bytes` object.\n *\n * @returns A string representation of the `Bytes` object.\n */\n toString() {\n return \"Bytes(base64: \" + this.toBase64() + \")\";\n }\n /**\n * Returns true if this `Bytes` object is equal to the provided one.\n *\n * @param other - The `Bytes` object to compare against.\n * @returns true if this `Bytes` object is equal to the provided one.\n */\n isEqual(e) {\n return this._byteString.isEqual(e._byteString);\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A `FieldPath` refers to a field in a document. The path may consist of a\n * single field name (referring to a top-level field in the document), or a\n * list of field names (referring to a nested field in the document).\n *\n * Create a `FieldPath` by providing field names. If more than one field\n * name is provided, the path will point to a nested field in a document.\n */\nclass FieldPath {\n /**\n * Creates a `FieldPath` from the provided field names. If more than one field\n * name is provided, the path will point to a nested field in a document.\n *\n * @param fieldNames - A list of field names.\n */\n constructor(...e) {\n for (let t = 0; t < e.length; ++t) if (0 === e[t].length) throw new FirestoreError(C.INVALID_ARGUMENT, \"Invalid field name at argument $(i + 1). Field names must not be empty.\");\n this._internalPath = new FieldPath$1(e);\n }\n /**\n * Returns true if this `FieldPath` is equal to the provided one.\n *\n * @param other - The `FieldPath` to compare against.\n * @returns true if this `FieldPath` is equal to the provided one.\n */\n isEqual(e) {\n return this._internalPath.isEqual(e._internalPath);\n }\n}\n\n/**\n * Returns a special sentinel `FieldPath` to refer to the ID of a document.\n * It can be used in queries to sort or filter by the document ID.\n */\nfunction documentId() {\n return new FieldPath(\"__name__\");\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Sentinel values that can be used when writing document fields with `set()`\n * or `update()`.\n */\nclass FieldValue {\n /**\n * @param _methodName - The public API endpoint that returns this class.\n * @hideconstructor\n */\n constructor(e) {\n this._methodName = e;\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * An immutable object representing a geographic location in Firestore. The\n * location is represented as latitude/longitude pair.\n *\n * Latitude values are in the range of [-90, 90].\n * Longitude values are in the range of [-180, 180].\n */\nclass GeoPoint {\n /**\n * Creates a new immutable `GeoPoint` object with the provided latitude and\n * longitude values.\n * @param latitude - The latitude as number between -90 and 90.\n * @param longitude - The longitude as number between -180 and 180.\n */\n constructor(e, t) {\n if (!isFinite(e) || e < -90 || e > 90) throw new FirestoreError(C.INVALID_ARGUMENT, \"Latitude must be a number between -90 and 90, but was: \" + e);\n if (!isFinite(t) || t < -180 || t > 180) throw new FirestoreError(C.INVALID_ARGUMENT, \"Longitude must be a number between -180 and 180, but was: \" + t);\n this._lat = e, this._long = t;\n }\n /**\n * The latitude of this `GeoPoint` instance.\n */\n get latitude() {\n return this._lat;\n }\n /**\n * The longitude of this `GeoPoint` instance.\n */\n get longitude() {\n return this._long;\n }\n /**\n * Returns true if this `GeoPoint` is equal to the provided one.\n *\n * @param other - The `GeoPoint` to compare against.\n * @returns true if this `GeoPoint` is equal to the provided one.\n */\n isEqual(e) {\n return this._lat === e._lat && this._long === e._long;\n }\n /** Returns a JSON-serializable representation of this GeoPoint. */\n toJSON() {\n return {\n latitude: this._lat,\n longitude: this._long\n };\n }\n /**\n * Actually private to JS consumers of our API, so this function is prefixed\n * with an underscore.\n */\n _compareTo(e) {\n return __PRIVATE_primitiveComparator(this._lat, e._lat) || __PRIVATE_primitiveComparator(this._long, e._long);\n }\n}\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst Se = /^__.*__$/;\n\n/** The result of parsing document data (e.g. for a setData call). */\nclass ParsedSetData {\n constructor(e, t, n) {\n this.data = e, this.fieldMask = t, this.fieldTransforms = n;\n }\n toMutation(e, t) {\n return null !== this.fieldMask ? new __PRIVATE_PatchMutation(e, this.data, this.fieldMask, t, this.fieldTransforms) : new __PRIVATE_SetMutation(e, this.data, t, this.fieldTransforms);\n }\n}\n\n/** The result of parsing \"update\" data (i.e. for an updateData call). */\nclass ParsedUpdateData {\n constructor(e,\n // The fieldMask does not include document transforms.\n t, n) {\n this.data = e, this.fieldMask = t, this.fieldTransforms = n;\n }\n toMutation(e, t) {\n return new __PRIVATE_PatchMutation(e, this.data, this.fieldMask, t, this.fieldTransforms);\n }\n}\nfunction __PRIVATE_isWrite(e) {\n switch (e) {\n case 0 /* UserDataSource.Set */:\n // fall through\n case 2 /* UserDataSource.MergeSet */:\n // fall through\n case 1 /* UserDataSource.Update */:\n return !0;\n case 3 /* UserDataSource.Argument */:\n case 4 /* UserDataSource.ArrayArgument */:\n return !1;\n default:\n throw fail();\n }\n}\n\n/** A \"context\" object passed around while parsing user data. */\nclass __PRIVATE_ParseContextImpl {\n /**\n * Initializes a ParseContext with the given source and path.\n *\n * @param settings - The settings for the parser.\n * @param databaseId - The database ID of the Firestore instance.\n * @param serializer - The serializer to use to generate the Value proto.\n * @param ignoreUndefinedProperties - Whether to ignore undefined properties\n * rather than throw.\n * @param fieldTransforms - A mutable list of field transforms encountered\n * while parsing the data.\n * @param fieldMask - A mutable list of field paths encountered while parsing\n * the data.\n *\n * TODO(b/34871131): We don't support array paths right now, so path can be\n * null to indicate the context represents any location within an array (in\n * which case certain features will not work and errors will be somewhat\n * compromised).\n */\n constructor(e, t, n, r, i, s) {\n this.settings = e, this.databaseId = t, this.serializer = n, this.ignoreUndefinedProperties = r,\n // Minor hack: If fieldTransforms is undefined, we assume this is an\n // external call and we need to validate the entire path.\n void 0 === i && this.mu(), this.fieldTransforms = i || [], this.fieldMask = s || [];\n }\n get path() {\n return this.settings.path;\n }\n get fu() {\n return this.settings.fu;\n }\n /** Returns a new context with the specified settings overwritten. */\n gu(e) {\n return new __PRIVATE_ParseContextImpl(Object.assign(Object.assign({}, this.settings), e), this.databaseId, this.serializer, this.ignoreUndefinedProperties, this.fieldTransforms, this.fieldMask);\n }\n pu(e) {\n var t;\n const n = null === (t = this.path) || void 0 === t ? void 0 : t.child(e),\n r = this.gu({\n path: n,\n yu: !1\n });\n return r.wu(e), r;\n }\n Su(e) {\n var t;\n const n = null === (t = this.path) || void 0 === t ? void 0 : t.child(e),\n r = this.gu({\n path: n,\n yu: !1\n });\n return r.mu(), r;\n }\n bu(e) {\n // TODO(b/34871131): We don't support array paths right now; so make path\n // undefined.\n return this.gu({\n path: void 0,\n yu: !0\n });\n }\n Du(e) {\n return __PRIVATE_createError(e, this.settings.methodName, this.settings.Cu || !1, this.path, this.settings.vu);\n }\n /** Returns 'true' if 'fieldPath' was traversed when creating this context. */\n contains(e) {\n return void 0 !== this.fieldMask.find(t => e.isPrefixOf(t)) || void 0 !== this.fieldTransforms.find(t => e.isPrefixOf(t.field));\n }\n mu() {\n // TODO(b/34871131): Remove null check once we have proper paths for fields\n // within arrays.\n if (this.path) for (let e = 0; e < this.path.length; e++) this.wu(this.path.get(e));\n }\n wu(e) {\n if (0 === e.length) throw this.Du(\"Document fields must not be empty\");\n if (__PRIVATE_isWrite(this.fu) && Se.test(e)) throw this.Du('Document fields cannot begin and end with \"__\"');\n }\n}\n\n/**\n * Helper for parsing raw user input (provided via the API) into internal model\n * classes.\n */\nclass __PRIVATE_UserDataReader {\n constructor(e, t, n) {\n this.databaseId = e, this.ignoreUndefinedProperties = t, this.serializer = n || __PRIVATE_newSerializer(e);\n }\n /** Creates a new top-level parse context. */\n Fu(e, t, n, r = !1) {\n return new __PRIVATE_ParseContextImpl({\n fu: e,\n methodName: t,\n vu: n,\n path: FieldPath$1.emptyPath(),\n yu: !1,\n Cu: r\n }, this.databaseId, this.serializer, this.ignoreUndefinedProperties);\n }\n}\nfunction __PRIVATE_newUserDataReader(e) {\n const t = e._freezeSettings(),\n n = __PRIVATE_newSerializer(e._databaseId);\n return new __PRIVATE_UserDataReader(e._databaseId, !!t.ignoreUndefinedProperties, n);\n}\n\n/** Parse document data from a set() call. */\nfunction __PRIVATE_parseSetData(e, t, n, r, i, s = {}) {\n const o = e.Fu(s.merge || s.mergeFields ? 2 /* UserDataSource.MergeSet */ : 0 /* UserDataSource.Set */, t, n, i);\n __PRIVATE_validatePlainObject(\"Data must be an object, but it was:\", o, r);\n const _ = __PRIVATE_parseObject(r, o);\n let a, u;\n if (s.merge) a = new FieldMask(o.fieldMask), u = o.fieldTransforms;else if (s.mergeFields) {\n const e = [];\n for (const r of s.mergeFields) {\n const i = __PRIVATE_fieldPathFromArgument$1(t, r, n);\n if (!o.contains(i)) throw new FirestoreError(C.INVALID_ARGUMENT, `Field '${i}' is specified in your field mask but missing from your input data.`);\n __PRIVATE_fieldMaskContains(e, i) || e.push(i);\n }\n a = new FieldMask(e), u = o.fieldTransforms.filter(e => a.covers(e.field));\n } else a = null, u = o.fieldTransforms;\n return new ParsedSetData(new ObjectValue(_), a, u);\n}\nclass __PRIVATE_DeleteFieldValueImpl extends FieldValue {\n _toFieldTransform(e) {\n if (2 /* UserDataSource.MergeSet */ !== e.fu) throw 1 /* UserDataSource.Update */ === e.fu ? e.Du(`${this._methodName}() can only appear at the top level of your update data`) : e.Du(`${this._methodName}() cannot be used with set() unless you pass {merge:true}`);\n // No transform to add for a delete, but we need to add it to our\n // fieldMask so it gets deleted.\n return e.fieldMask.push(e.path), null;\n }\n isEqual(e) {\n return e instanceof __PRIVATE_DeleteFieldValueImpl;\n }\n}\n\n/**\n * Creates a child context for parsing SerializableFieldValues.\n *\n * This is different than calling `ParseContext.contextWith` because it keeps\n * the fieldTransforms and fieldMask separate.\n *\n * The created context has its `dataSource` set to `UserDataSource.Argument`.\n * Although these values are used with writes, any elements in these FieldValues\n * are not considered writes since they cannot contain any FieldValue sentinels,\n * etc.\n *\n * @param fieldValue - The sentinel FieldValue for which to create a child\n * context.\n * @param context - The parent context.\n * @param arrayElement - Whether or not the FieldValue has an array.\n */\nfunction __PRIVATE_createSentinelChildContext(e, t, n) {\n return new __PRIVATE_ParseContextImpl({\n fu: 3 /* UserDataSource.Argument */,\n vu: t.settings.vu,\n methodName: e._methodName,\n yu: n\n }, t.databaseId, t.serializer, t.ignoreUndefinedProperties);\n}\nclass __PRIVATE_ServerTimestampFieldValueImpl extends FieldValue {\n _toFieldTransform(e) {\n return new FieldTransform(e.path, new __PRIVATE_ServerTimestampTransform());\n }\n isEqual(e) {\n return e instanceof __PRIVATE_ServerTimestampFieldValueImpl;\n }\n}\nclass __PRIVATE_ArrayUnionFieldValueImpl extends FieldValue {\n constructor(e, t) {\n super(e), this.Mu = t;\n }\n _toFieldTransform(e) {\n const t = __PRIVATE_createSentinelChildContext(this, e, /*array=*/!0),\n n = this.Mu.map(e => __PRIVATE_parseData(e, t)),\n r = new __PRIVATE_ArrayUnionTransformOperation(n);\n return new FieldTransform(e.path, r);\n }\n isEqual(e) {\n return e instanceof __PRIVATE_ArrayUnionFieldValueImpl && deepEqual(this.Mu, e.Mu);\n }\n}\nclass __PRIVATE_ArrayRemoveFieldValueImpl extends FieldValue {\n constructor(e, t) {\n super(e), this.Mu = t;\n }\n _toFieldTransform(e) {\n const t = __PRIVATE_createSentinelChildContext(this, e, /*array=*/!0),\n n = this.Mu.map(e => __PRIVATE_parseData(e, t)),\n r = new __PRIVATE_ArrayRemoveTransformOperation(n);\n return new FieldTransform(e.path, r);\n }\n isEqual(e) {\n return e instanceof __PRIVATE_ArrayRemoveFieldValueImpl && deepEqual(this.Mu, e.Mu);\n }\n}\nclass __PRIVATE_NumericIncrementFieldValueImpl extends FieldValue {\n constructor(e, t) {\n super(e), this.xu = t;\n }\n _toFieldTransform(e) {\n const t = new __PRIVATE_NumericIncrementTransformOperation(e.serializer, toNumber(e.serializer, this.xu));\n return new FieldTransform(e.path, t);\n }\n isEqual(e) {\n return e instanceof __PRIVATE_NumericIncrementFieldValueImpl && this.xu === e.xu;\n }\n}\n\n/** Parse update data from an update() call. */\nfunction __PRIVATE_parseUpdateData(e, t, n, r) {\n const i = e.Fu(1 /* UserDataSource.Update */, t, n);\n __PRIVATE_validatePlainObject(\"Data must be an object, but it was:\", i, r);\n const s = [],\n o = ObjectValue.empty();\n forEach(r, (e, r) => {\n const _ = __PRIVATE_fieldPathFromDotSeparatedString(t, e, n);\n // For Compat types, we have to \"extract\" the underlying types before\n // performing validation.\n r = getModularInstance(r);\n const a = i.Su(_);\n if (r instanceof __PRIVATE_DeleteFieldValueImpl)\n // Add it to the field mask, but don't add anything to updateData.\n s.push(_);else {\n const e = __PRIVATE_parseData(r, a);\n null != e && (s.push(_), o.set(_, e));\n }\n });\n const _ = new FieldMask(s);\n return new ParsedUpdateData(o, _, i.fieldTransforms);\n}\n\n/** Parse update data from a list of field/value arguments. */\nfunction __PRIVATE_parseUpdateVarargs(e, t, n, r, i, s) {\n const o = e.Fu(1 /* UserDataSource.Update */, t, n),\n _ = [__PRIVATE_fieldPathFromArgument$1(t, r, n)],\n a = [i];\n if (s.length % 2 != 0) throw new FirestoreError(C.INVALID_ARGUMENT, `Function ${t}() needs to be called with an even number of arguments that alternate between field names and values.`);\n for (let e = 0; e < s.length; e += 2) _.push(__PRIVATE_fieldPathFromArgument$1(t, s[e])), a.push(s[e + 1]);\n const u = [],\n c = ObjectValue.empty();\n // We iterate in reverse order to pick the last value for a field if the\n // user specified the field multiple times.\n for (let e = _.length - 1; e >= 0; --e) if (!__PRIVATE_fieldMaskContains(u, _[e])) {\n const t = _[e];\n let n = a[e];\n // For Compat types, we have to \"extract\" the underlying types before\n // performing validation.\n n = getModularInstance(n);\n const r = o.Su(t);\n if (n instanceof __PRIVATE_DeleteFieldValueImpl)\n // Add it to the field mask, but don't add anything to updateData.\n u.push(t);else {\n const e = __PRIVATE_parseData(n, r);\n null != e && (u.push(t), c.set(t, e));\n }\n }\n const l = new FieldMask(u);\n return new ParsedUpdateData(c, l, o.fieldTransforms);\n}\n\n/**\n * Parse a \"query value\" (e.g. value in a where filter or a value in a cursor\n * bound).\n *\n * @param allowArrays - Whether the query value is an array that may directly\n * contain additional arrays (e.g. the operand of an `in` query).\n */\nfunction __PRIVATE_parseQueryValue(e, t, n, r = !1) {\n return __PRIVATE_parseData(n, e.Fu(r ? 4 /* UserDataSource.ArrayArgument */ : 3 /* UserDataSource.Argument */, t));\n}\n\n/**\n * Parses user data to Protobuf Values.\n *\n * @param input - Data to be parsed.\n * @param context - A context object representing the current path being parsed,\n * the source of the data being parsed, etc.\n * @returns The parsed value, or null if the value was a FieldValue sentinel\n * that should not be included in the resulting parsed data.\n */\nfunction __PRIVATE_parseData(e, t) {\n if (__PRIVATE_looksLikeJsonObject(\n // Unwrap the API type from the Compat SDK. This will return the API type\n // from firestore-exp.\n e = getModularInstance(e))) return __PRIVATE_validatePlainObject(\"Unsupported field value:\", t, e), __PRIVATE_parseObject(e, t);\n if (e instanceof FieldValue)\n // FieldValues usually parse into transforms (except deleteField())\n // in which case we do not want to include this field in our parsed data\n // (as doing so will overwrite the field directly prior to the transform\n // trying to transform it). So we don't add this location to\n // context.fieldMask and we return null as our parsing result.\n /**\n * \"Parses\" the provided FieldValueImpl, adding any necessary transforms to\n * context.fieldTransforms.\n */\n return function __PRIVATE_parseSentinelFieldValue(e, t) {\n // Sentinels are only supported with writes, and not within arrays.\n if (!__PRIVATE_isWrite(t.fu)) throw t.Du(`${e._methodName}() can only be used with update() and set()`);\n if (!t.path) throw t.Du(`${e._methodName}() is not currently supported inside arrays`);\n const n = e._toFieldTransform(t);\n n && t.fieldTransforms.push(n);\n }\n /**\n * Helper to parse a scalar value (i.e. not an Object, Array, or FieldValue)\n *\n * @returns The parsed value\n */(e, t), null;\n if (void 0 === e && t.ignoreUndefinedProperties)\n // If the input is undefined it can never participate in the fieldMask, so\n // don't handle this below. If `ignoreUndefinedProperties` is false,\n // `parseScalarValue` will reject an undefined value.\n return null;\n if (\n // If context.path is null we are inside an array and we don't support\n // field mask paths more granular than the top-level array.\n t.path && t.fieldMask.push(t.path), e instanceof Array) {\n // TODO(b/34871131): Include the path containing the array in the error\n // message.\n // In the case of IN queries, the parsed data is an array (representing\n // the set of values to be included for the IN query) that may directly\n // contain additional arrays (each representing an individual field\n // value), so we disable this validation.\n if (t.settings.yu && 4 /* UserDataSource.ArrayArgument */ !== t.fu) throw t.Du(\"Nested arrays are not supported\");\n return function __PRIVATE_parseArray(e, t) {\n const n = [];\n let r = 0;\n for (const i of e) {\n let e = __PRIVATE_parseData(i, t.bu(r));\n null == e && (\n // Just include nulls in the array for fields being replaced with a\n // sentinel.\n e = {\n nullValue: \"NULL_VALUE\"\n }), n.push(e), r++;\n }\n return {\n arrayValue: {\n values: n\n }\n };\n }(e, t);\n }\n return function __PRIVATE_parseScalarValue(e, t) {\n if (null === (e = getModularInstance(e))) return {\n nullValue: \"NULL_VALUE\"\n };\n if (\"number\" == typeof e) return toNumber(t.serializer, e);\n if (\"boolean\" == typeof e) return {\n booleanValue: e\n };\n if (\"string\" == typeof e) return {\n stringValue: e\n };\n if (e instanceof Date) {\n const n = Timestamp.fromDate(e);\n return {\n timestampValue: toTimestamp(t.serializer, n)\n };\n }\n if (e instanceof Timestamp) {\n // Firestore backend truncates precision down to microseconds. To ensure\n // offline mode works the same with regards to truncation, perform the\n // truncation immediately without waiting for the backend to do that.\n const n = new Timestamp(e.seconds, 1e3 * Math.floor(e.nanoseconds / 1e3));\n return {\n timestampValue: toTimestamp(t.serializer, n)\n };\n }\n if (e instanceof GeoPoint) return {\n geoPointValue: {\n latitude: e.latitude,\n longitude: e.longitude\n }\n };\n if (e instanceof Bytes) return {\n bytesValue: __PRIVATE_toBytes(t.serializer, e._byteString)\n };\n if (e instanceof DocumentReference) {\n const n = t.databaseId,\n r = e.firestore._databaseId;\n if (!r.isEqual(n)) throw t.Du(`Document reference is for database ${r.projectId}/${r.database} but should be for database ${n.projectId}/${n.database}`);\n return {\n referenceValue: __PRIVATE_toResourceName(e.firestore._databaseId || t.databaseId, e._key.path)\n };\n }\n throw t.Du(`Unsupported field value: ${__PRIVATE_valueDescription(e)}`);\n }\n /**\n * Checks whether an object looks like a JSON object that should be converted\n * into a struct. Normal class/prototype instances are considered to look like\n * JSON objects since they should be converted to a struct value. Arrays, Dates,\n * GeoPoints, etc. are not considered to look like JSON objects since they map\n * to specific FieldValue types other than ObjectValue.\n */(e, t);\n}\nfunction __PRIVATE_parseObject(e, t) {\n const n = {};\n return isEmpty(e) ?\n // If we encounter an empty object, we explicitly add it to the update\n // mask to ensure that the server creates a map entry.\n t.path && t.path.length > 0 && t.fieldMask.push(t.path) : forEach(e, (e, r) => {\n const i = __PRIVATE_parseData(r, t.pu(e));\n null != i && (n[e] = i);\n }), {\n mapValue: {\n fields: n\n }\n };\n}\nfunction __PRIVATE_looksLikeJsonObject(e) {\n return !(\"object\" != typeof e || null === e || e instanceof Array || e instanceof Date || e instanceof Timestamp || e instanceof GeoPoint || e instanceof Bytes || e instanceof DocumentReference || e instanceof FieldValue);\n}\nfunction __PRIVATE_validatePlainObject(e, t, n) {\n if (!__PRIVATE_looksLikeJsonObject(n) || !function __PRIVATE_isPlainObject(e) {\n return \"object\" == typeof e && null !== e && (Object.getPrototypeOf(e) === Object.prototype || null === Object.getPrototypeOf(e));\n }(n)) {\n const r = __PRIVATE_valueDescription(n);\n throw \"an object\" === r ? t.Du(e + \" a custom object\") : t.Du(e + \" \" + r);\n }\n}\n\n/**\n * Helper that calls fromDotSeparatedString() but wraps any error thrown.\n */\nfunction __PRIVATE_fieldPathFromArgument$1(e, t, n) {\n if ((\n // If required, replace the FieldPath Compat class with with the firestore-exp\n // FieldPath.\n t = getModularInstance(t)) instanceof FieldPath) return t._internalPath;\n if (\"string\" == typeof t) return __PRIVATE_fieldPathFromDotSeparatedString(e, t);\n throw __PRIVATE_createError(\"Field path arguments must be of type string or \", e, /* hasConverter= */!1, /* path= */void 0, n);\n}\n\n/**\n * Matches any characters in a field path string that are reserved.\n */\nconst be = new RegExp(\"[~\\\\*/\\\\[\\\\]]\");\n\n/**\n * Wraps fromDotSeparatedString with an error message about the method that\n * was thrown.\n * @param methodName - The publicly visible method name\n * @param path - The dot-separated string form of a field path which will be\n * split on dots.\n * @param targetDoc - The document against which the field path will be\n * evaluated.\n */\nfunction __PRIVATE_fieldPathFromDotSeparatedString(e, t, n) {\n if (t.search(be) >= 0) throw __PRIVATE_createError(`Invalid field path (${t}). Paths must not contain '~', '*', '/', '[', or ']'`, e, /* hasConverter= */!1, /* path= */void 0, n);\n try {\n return new FieldPath(...t.split(\".\"))._internalPath;\n } catch (r) {\n throw __PRIVATE_createError(`Invalid field path (${t}). Paths must not be empty, begin with '.', end with '.', or contain '..'`, e, /* hasConverter= */!1, /* path= */void 0, n);\n }\n}\nfunction __PRIVATE_createError(e, t, n, r, i) {\n const s = r && !r.isEmpty(),\n o = void 0 !== i;\n let _ = `Function ${t}() called with invalid data`;\n n && (_ += \" (via `toFirestore()`)\"), _ += \". \";\n let a = \"\";\n return (s || o) && (a += \" (found\", s && (a += ` in field ${r}`), o && (a += ` in document ${i}`), a += \")\"), new FirestoreError(C.INVALID_ARGUMENT, _ + e + a);\n}\n\n/** Checks `haystack` if FieldPath `needle` is present. Runs in O(n). */\nfunction __PRIVATE_fieldMaskContains(e, t) {\n return e.some(e => e.isEqual(t));\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A `DocumentSnapshot` contains data read from a document in your Firestore\n * database. The data can be extracted with `.data()` or `.get()` to\n * get a specific field.\n *\n * For a `DocumentSnapshot` that points to a non-existing document, any data\n * access will return 'undefined'. You can use the `exists()` method to\n * explicitly verify a document's existence.\n */\nclass DocumentSnapshot$1 {\n // Note: This class is stripped down version of the DocumentSnapshot in\n // the legacy SDK. The changes are:\n // - No support for SnapshotMetadata.\n // - No support for SnapshotOptions.\n /** @hideconstructor protected */\n constructor(e, t, n, r, i) {\n this._firestore = e, this._userDataWriter = t, this._key = n, this._document = r, this._converter = i;\n }\n /** Property of the `DocumentSnapshot` that provides the document's ID. */\n get id() {\n return this._key.path.lastSegment();\n }\n /**\n * The `DocumentReference` for the document included in the `DocumentSnapshot`.\n */\n get ref() {\n return new DocumentReference(this._firestore, this._converter, this._key);\n }\n /**\n * Signals whether or not the document at the snapshot's location exists.\n *\n * @returns true if the document exists.\n */\n exists() {\n return null !== this._document;\n }\n /**\n * Retrieves all fields in the document as an `Object`. Returns `undefined` if\n * the document doesn't exist.\n *\n * @returns An `Object` containing all fields in the document or `undefined`\n * if the document doesn't exist.\n */\n data() {\n if (this._document) {\n if (this._converter) {\n // We only want to use the converter and create a new DocumentSnapshot\n // if a converter has been provided.\n const e = new QueryDocumentSnapshot$1(this._firestore, this._userDataWriter, this._key, this._document, /* converter= */null);\n return this._converter.fromFirestore(e);\n }\n return this._userDataWriter.convertValue(this._document.data.value);\n }\n }\n /**\n * Retrieves the field specified by `fieldPath`. Returns `undefined` if the\n * document or field doesn't exist.\n *\n * @param fieldPath - The path (for example 'foo' or 'foo.bar') to a specific\n * field.\n * @returns The data at the specified field location or undefined if no such\n * field exists in the document.\n */\n // We are using `any` here to avoid an explicit cast by our users.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n get(e) {\n if (this._document) {\n const t = this._document.data.field(__PRIVATE_fieldPathFromArgument(\"DocumentSnapshot.get\", e));\n if (null !== t) return this._userDataWriter.convertValue(t);\n }\n }\n}\n\n/**\n * A `QueryDocumentSnapshot` contains data read from a document in your\n * Firestore database as part of a query. The document is guaranteed to exist\n * and its data can be extracted with `.data()` or `.get()` to get a\n * specific field.\n *\n * A `QueryDocumentSnapshot` offers the same API surface as a\n * `DocumentSnapshot`. Since query results contain only existing documents, the\n * `exists` property will always be true and `data()` will never return\n * 'undefined'.\n */\nclass QueryDocumentSnapshot$1 extends DocumentSnapshot$1 {\n /**\n * Retrieves all fields in the document as an `Object`.\n *\n * @override\n * @returns An `Object` containing all fields in the document.\n */\n data() {\n return super.data();\n }\n}\n\n/**\n * Helper that calls `fromDotSeparatedString()` but wraps any error thrown.\n */\nfunction __PRIVATE_fieldPathFromArgument(e, t) {\n return \"string\" == typeof t ? __PRIVATE_fieldPathFromDotSeparatedString(e, t) : t instanceof FieldPath ? t._internalPath : t._delegate._internalPath;\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nfunction __PRIVATE_validateHasExplicitOrderByForLimitToLast(e) {\n if (\"L\" /* LimitType.Last */ === e.limitType && 0 === e.explicitOrderBy.length) throw new FirestoreError(C.UNIMPLEMENTED, \"limitToLast() queries require specifying at least one orderBy() clause\");\n}\n\n/**\n * An `AppliableConstraint` is an abstraction of a constraint that can be applied\n * to a Firestore query.\n */\nclass AppliableConstraint {}\n\n/**\n * A `QueryConstraint` is used to narrow the set of documents returned by a\n * Firestore query. `QueryConstraint`s are created by invoking {@link where},\n * {@link orderBy}, {@link (startAt:1)}, {@link (startAfter:1)}, {@link\n * (endBefore:1)}, {@link (endAt:1)}, {@link limit}, {@link limitToLast} and\n * can then be passed to {@link (query:1)} to create a new query instance that\n * also contains this `QueryConstraint`.\n */\nclass QueryConstraint extends AppliableConstraint {}\nfunction query(e, t, ...n) {\n let r = [];\n t instanceof AppliableConstraint && r.push(t), r = r.concat(n), function __PRIVATE_validateQueryConstraintArray(e) {\n const t = e.filter(e => e instanceof QueryCompositeFilterConstraint).length,\n n = e.filter(e => e instanceof QueryFieldFilterConstraint).length;\n if (t > 1 || t > 0 && n > 0) throw new FirestoreError(C.INVALID_ARGUMENT, \"InvalidQuery. When using composite filters, you cannot use more than one filter at the top level. Consider nesting the multiple filters within an `and(...)` statement. For example: change `query(query, where(...), or(...))` to `query(query, and(where(...), or(...)))`.\");\n }\n /**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n /**\n * Converts Firestore's internal types to the JavaScript types that we expose\n * to the user.\n *\n * @internal\n */(r);\n for (const t of r) e = t._apply(e);\n return e;\n}\n\n/**\n * A `QueryFieldFilterConstraint` is used to narrow the set of documents returned by\n * a Firestore query by filtering on one or more document fields.\n * `QueryFieldFilterConstraint`s are created by invoking {@link where} and can then\n * be passed to {@link (query:1)} to create a new query instance that also contains\n * this `QueryFieldFilterConstraint`.\n */\nclass QueryFieldFilterConstraint extends QueryConstraint {\n /**\n * @internal\n */\n constructor(e, t, n) {\n super(), this._field = e, this._op = t, this._value = n, /** The type of this query constraint */\n this.type = \"where\";\n }\n static _create(e, t, n) {\n return new QueryFieldFilterConstraint(e, t, n);\n }\n _apply(e) {\n const t = this._parse(e);\n return __PRIVATE_validateNewFieldFilter(e._query, t), new Query(e.firestore, e.converter, __PRIVATE_queryWithAddedFilter(e._query, t));\n }\n _parse(e) {\n const t = __PRIVATE_newUserDataReader(e.firestore),\n n = function __PRIVATE_newQueryFilter(e, t, n, r, i, s, o) {\n let _;\n if (i.isKeyField()) {\n if (\"array-contains\" /* Operator.ARRAY_CONTAINS */ === s || \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */ === s) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid Query. You can't perform '${s}' queries on documentId().`);\n if (\"in\" /* Operator.IN */ === s || \"not-in\" /* Operator.NOT_IN */ === s) {\n __PRIVATE_validateDisjunctiveFilterElements(o, s);\n const t = [];\n for (const n of o) t.push(__PRIVATE_parseDocumentIdValue(r, e, n));\n _ = {\n arrayValue: {\n values: t\n }\n };\n } else _ = __PRIVATE_parseDocumentIdValue(r, e, o);\n } else \"in\" /* Operator.IN */ !== s && \"not-in\" /* Operator.NOT_IN */ !== s && \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */ !== s || __PRIVATE_validateDisjunctiveFilterElements(o, s), _ = __PRIVATE_parseQueryValue(n, t, o, /* allowArrays= */\"in\" /* Operator.IN */ === s || \"not-in\" /* Operator.NOT_IN */ === s);\n return FieldFilter.create(i, s, _);\n }(e._query, \"where\", t, e.firestore._databaseId, this._field, this._op, this._value);\n return n;\n }\n}\n\n/**\n * Creates a {@link QueryFieldFilterConstraint} that enforces that documents\n * must contain the specified field and that the value should satisfy the\n * relation constraint provided.\n *\n * @param fieldPath - The path to compare\n * @param opStr - The operation string (e.g \"<\", \"<=\", \"==\", \"<\",\n * \"<=\", \"!=\").\n * @param value - The value for comparison\n * @returns The created {@link QueryFieldFilterConstraint}.\n */\nfunction where(e, t, n) {\n const r = t,\n i = __PRIVATE_fieldPathFromArgument(\"where\", e);\n return QueryFieldFilterConstraint._create(i, r, n);\n}\n\n/**\n * A `QueryCompositeFilterConstraint` is used to narrow the set of documents\n * returned by a Firestore query by performing the logical OR or AND of multiple\n * {@link QueryFieldFilterConstraint}s or {@link QueryCompositeFilterConstraint}s.\n * `QueryCompositeFilterConstraint`s are created by invoking {@link or} or\n * {@link and} and can then be passed to {@link (query:1)} to create a new query\n * instance that also contains the `QueryCompositeFilterConstraint`.\n */\nclass QueryCompositeFilterConstraint extends AppliableConstraint {\n /**\n * @internal\n */\n constructor(/** The type of this query constraint */\n e, t) {\n super(), this.type = e, this._queryConstraints = t;\n }\n static _create(e, t) {\n return new QueryCompositeFilterConstraint(e, t);\n }\n _parse(e) {\n const t = this._queryConstraints.map(t => t._parse(e)).filter(e => e.getFilters().length > 0);\n return 1 === t.length ? t[0] : CompositeFilter.create(t, this._getOperator());\n }\n _apply(e) {\n const t = this._parse(e);\n return 0 === t.getFilters().length ? e : (function __PRIVATE_validateNewFilter(e, t) {\n let n = e;\n const r = t.getFlattenedFilters();\n for (const e of r) __PRIVATE_validateNewFieldFilter(n, e), n = __PRIVATE_queryWithAddedFilter(n, e);\n }\n // Checks if any of the provided filter operators are included in the given list of filters and\n // returns the first one that is, or null if none are.\n (e._query, t), new Query(e.firestore, e.converter, __PRIVATE_queryWithAddedFilter(e._query, t)));\n }\n _getQueryConstraints() {\n return this._queryConstraints;\n }\n _getOperator() {\n return \"and\" === this.type ? \"and\" /* CompositeOperator.AND */ : \"or\" /* CompositeOperator.OR */;\n }\n}\n\n/**\n * Creates a new {@link QueryCompositeFilterConstraint} that is a disjunction of\n * the given filter constraints. A disjunction filter includes a document if it\n * satisfies any of the given filters.\n *\n * @param queryConstraints - Optional. The list of\n * {@link QueryFilterConstraint}s to perform a disjunction for. These must be\n * created with calls to {@link where}, {@link or}, or {@link and}.\n * @returns The newly created {@link QueryCompositeFilterConstraint}.\n */\nfunction or(...e) {\n // Only support QueryFilterConstraints\n return e.forEach(e => __PRIVATE_validateQueryFilterConstraint(\"or\", e)), QueryCompositeFilterConstraint._create(\"or\" /* CompositeOperator.OR */, e);\n}\n\n/**\n * Creates a new {@link QueryCompositeFilterConstraint} that is a conjunction of\n * the given filter constraints. A conjunction filter includes a document if it\n * satisfies all of the given filters.\n *\n * @param queryConstraints - Optional. The list of\n * {@link QueryFilterConstraint}s to perform a conjunction for. These must be\n * created with calls to {@link where}, {@link or}, or {@link and}.\n * @returns The newly created {@link QueryCompositeFilterConstraint}.\n */\nfunction and(...e) {\n // Only support QueryFilterConstraints\n return e.forEach(e => __PRIVATE_validateQueryFilterConstraint(\"and\", e)), QueryCompositeFilterConstraint._create(\"and\" /* CompositeOperator.AND */, e);\n}\n\n/**\n * A `QueryOrderByConstraint` is used to sort the set of documents returned by a\n * Firestore query. `QueryOrderByConstraint`s are created by invoking\n * {@link orderBy} and can then be passed to {@link (query:1)} to create a new query\n * instance that also contains this `QueryOrderByConstraint`.\n *\n * Note: Documents that do not contain the orderBy field will not be present in\n * the query result.\n */\nclass QueryOrderByConstraint extends QueryConstraint {\n /**\n * @internal\n */\n constructor(e, t) {\n super(), this._field = e, this._direction = t, /** The type of this query constraint */\n this.type = \"orderBy\";\n }\n static _create(e, t) {\n return new QueryOrderByConstraint(e, t);\n }\n _apply(e) {\n const t = function __PRIVATE_newQueryOrderBy(e, t, n) {\n if (null !== e.startAt) throw new FirestoreError(C.INVALID_ARGUMENT, \"Invalid query. You must not call startAt() or startAfter() before calling orderBy().\");\n if (null !== e.endAt) throw new FirestoreError(C.INVALID_ARGUMENT, \"Invalid query. You must not call endAt() or endBefore() before calling orderBy().\");\n return new OrderBy(t, n);\n }\n /**\n * Create a `Bound` from a query and a document.\n *\n * Note that the `Bound` will always include the key of the document\n * and so only the provided document will compare equal to the returned\n * position.\n *\n * Will throw if the document does not contain all fields of the order by\n * of the query or if any of the fields in the order by are an uncommitted\n * server timestamp.\n */(e._query, this._field, this._direction);\n return new Query(e.firestore, e.converter, function __PRIVATE_queryWithAddedOrderBy(e, t) {\n // TODO(dimond): validate that orderBy does not list the same key twice.\n const n = e.explicitOrderBy.concat([t]);\n return new __PRIVATE_QueryImpl(e.path, e.collectionGroup, n, e.filters.slice(), e.limit, e.limitType, e.startAt, e.endAt);\n }(e._query, t));\n }\n}\n\n/**\n * Creates a {@link QueryOrderByConstraint} that sorts the query result by the\n * specified field, optionally in descending order instead of ascending.\n *\n * Note: Documents that do not contain the specified field will not be present\n * in the query result.\n *\n * @param fieldPath - The field to sort by.\n * @param directionStr - Optional direction to sort by ('asc' or 'desc'). If\n * not specified, order will be ascending.\n * @returns The created {@link QueryOrderByConstraint}.\n */\nfunction orderBy(e, t = \"asc\") {\n const n = t,\n r = __PRIVATE_fieldPathFromArgument(\"orderBy\", e);\n return QueryOrderByConstraint._create(r, n);\n}\n\n/**\n * A `QueryLimitConstraint` is used to limit the number of documents returned by\n * a Firestore query.\n * `QueryLimitConstraint`s are created by invoking {@link limit} or\n * {@link limitToLast} and can then be passed to {@link (query:1)} to create a new\n * query instance that also contains this `QueryLimitConstraint`.\n */\nclass QueryLimitConstraint extends QueryConstraint {\n /**\n * @internal\n */\n constructor(/** The type of this query constraint */\n e, t, n) {\n super(), this.type = e, this._limit = t, this._limitType = n;\n }\n static _create(e, t, n) {\n return new QueryLimitConstraint(e, t, n);\n }\n _apply(e) {\n return new Query(e.firestore, e.converter, __PRIVATE_queryWithLimit(e._query, this._limit, this._limitType));\n }\n}\n\n/**\n * Creates a {@link QueryLimitConstraint} that only returns the first matching\n * documents.\n *\n * @param limit - The maximum number of items to return.\n * @returns The created {@link QueryLimitConstraint}.\n */\nfunction limit(e) {\n return __PRIVATE_validatePositiveNumber(\"limit\", e), QueryLimitConstraint._create(\"limit\", e, \"F\" /* LimitType.First */);\n}\n\n/**\n * Creates a {@link QueryLimitConstraint} that only returns the last matching\n * documents.\n *\n * You must specify at least one `orderBy` clause for `limitToLast` queries,\n * otherwise an exception will be thrown during execution.\n *\n * @param limit - The maximum number of items to return.\n * @returns The created {@link QueryLimitConstraint}.\n */\nfunction limitToLast(e) {\n return __PRIVATE_validatePositiveNumber(\"limitToLast\", e), QueryLimitConstraint._create(\"limitToLast\", e, \"L\" /* LimitType.Last */);\n}\n\n/**\n * A `QueryStartAtConstraint` is used to exclude documents from the start of a\n * result set returned by a Firestore query.\n * `QueryStartAtConstraint`s are created by invoking {@link (startAt:1)} or\n * {@link (startAfter:1)} and can then be passed to {@link (query:1)} to create a\n * new query instance that also contains this `QueryStartAtConstraint`.\n */\nclass QueryStartAtConstraint extends QueryConstraint {\n /**\n * @internal\n */\n constructor(/** The type of this query constraint */\n e, t, n) {\n super(), this.type = e, this._docOrFields = t, this._inclusive = n;\n }\n static _create(e, t, n) {\n return new QueryStartAtConstraint(e, t, n);\n }\n _apply(e) {\n const t = __PRIVATE_newQueryBoundFromDocOrFields(e, this.type, this._docOrFields, this._inclusive);\n return new Query(e.firestore, e.converter, function __PRIVATE_queryWithStartAt(e, t) {\n return new __PRIVATE_QueryImpl(e.path, e.collectionGroup, e.explicitOrderBy.slice(), e.filters.slice(), e.limit, e.limitType, t, e.endAt);\n }(e._query, t));\n }\n}\nfunction startAt(...e) {\n return QueryStartAtConstraint._create(\"startAt\", e, /*inclusive=*/!0);\n}\nfunction startAfter(...e) {\n return QueryStartAtConstraint._create(\"startAfter\", e, /*inclusive=*/!1);\n}\n\n/**\n * A `QueryEndAtConstraint` is used to exclude documents from the end of a\n * result set returned by a Firestore query.\n * `QueryEndAtConstraint`s are created by invoking {@link (endAt:1)} or\n * {@link (endBefore:1)} and can then be passed to {@link (query:1)} to create a new\n * query instance that also contains this `QueryEndAtConstraint`.\n */\nclass QueryEndAtConstraint extends QueryConstraint {\n /**\n * @internal\n */\n constructor(/** The type of this query constraint */\n e, t, n) {\n super(), this.type = e, this._docOrFields = t, this._inclusive = n;\n }\n static _create(e, t, n) {\n return new QueryEndAtConstraint(e, t, n);\n }\n _apply(e) {\n const t = __PRIVATE_newQueryBoundFromDocOrFields(e, this.type, this._docOrFields, this._inclusive);\n return new Query(e.firestore, e.converter, function __PRIVATE_queryWithEndAt(e, t) {\n return new __PRIVATE_QueryImpl(e.path, e.collectionGroup, e.explicitOrderBy.slice(), e.filters.slice(), e.limit, e.limitType, e.startAt, t);\n }(e._query, t));\n }\n}\nfunction endBefore(...e) {\n return QueryEndAtConstraint._create(\"endBefore\", e, /*inclusive=*/!1);\n}\nfunction endAt(...e) {\n return QueryEndAtConstraint._create(\"endAt\", e, /*inclusive=*/!0);\n}\n\n/** Helper function to create a bound from a document or fields */\nfunction __PRIVATE_newQueryBoundFromDocOrFields(e, t, n, r) {\n if (n[0] = getModularInstance(n[0]), n[0] instanceof DocumentSnapshot$1) return function __PRIVATE_newQueryBoundFromDocument(e, t, n, r, i) {\n if (!r) throw new FirestoreError(C.NOT_FOUND, `Can't use a DocumentSnapshot that doesn't exist for ${n}().`);\n const s = [];\n // Because people expect to continue/end a query at the exact document\n // provided, we need to use the implicit sort order rather than the explicit\n // sort order, because it's guaranteed to contain the document key. That way\n // the position becomes unambiguous and the query continues/ends exactly at\n // the provided document. Without the key (by using the explicit sort\n // orders), multiple documents could match the position, yielding duplicate\n // results.\n for (const n of __PRIVATE_queryNormalizedOrderBy(e)) if (n.field.isKeyField()) s.push(__PRIVATE_refValue(t, r.key));else {\n const e = r.data.field(n.field);\n if (__PRIVATE_isServerTimestamp(e)) throw new FirestoreError(C.INVALID_ARGUMENT, 'Invalid query. You are trying to start or end a query using a document for which the field \"' + n.field + '\" is an uncommitted server timestamp. (Since the value of this field is unknown, you cannot start/end a query with it.)');\n if (null === e) {\n const e = n.field.canonicalString();\n throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. You are trying to start or end a query using a document for which the field '${e}' (used as the orderBy) does not exist.`);\n }\n s.push(e);\n }\n return new Bound(s, i);\n }\n /**\n * Converts a list of field values to a `Bound` for the given query.\n */(e._query, e.firestore._databaseId, t, n[0]._document, r);\n {\n const i = __PRIVATE_newUserDataReader(e.firestore);\n return function __PRIVATE_newQueryBoundFromFields(e, t, n, r, i, s) {\n // Use explicit order by's because it has to match the query the user made\n const o = e.explicitOrderBy;\n if (i.length > o.length) throw new FirestoreError(C.INVALID_ARGUMENT, `Too many arguments provided to ${r}(). The number of arguments must be less than or equal to the number of orderBy() clauses`);\n const _ = [];\n for (let s = 0; s < i.length; s++) {\n const a = i[s];\n if (o[s].field.isKeyField()) {\n if (\"string\" != typeof a) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. Expected a string for document ID in ${r}(), but got a ${typeof a}`);\n if (!__PRIVATE_isCollectionGroupQuery(e) && -1 !== a.indexOf(\"/\")) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. When querying a collection and ordering by documentId(), the value passed to ${r}() must be a plain document ID, but '${a}' contains a slash.`);\n const n = e.path.child(ResourcePath.fromString(a));\n if (!DocumentKey.isDocumentKey(n)) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. When querying a collection group and ordering by documentId(), the value passed to ${r}() must result in a valid document path, but '${n}' is not because it contains an odd number of segments.`);\n const i = new DocumentKey(n);\n _.push(__PRIVATE_refValue(t, i));\n } else {\n const e = __PRIVATE_parseQueryValue(n, r, a);\n _.push(e);\n }\n }\n return new Bound(_, s);\n }\n /**\n * Parses the given `documentIdValue` into a `ReferenceValue`, throwing\n * appropriate errors if the value is anything other than a `DocumentReference`\n * or `string`, or if the string is malformed.\n */(e._query, e.firestore._databaseId, i, t, n, r);\n }\n}\nfunction __PRIVATE_parseDocumentIdValue(e, t, n) {\n if (\"string\" == typeof (n = getModularInstance(n))) {\n if (\"\" === n) throw new FirestoreError(C.INVALID_ARGUMENT, \"Invalid query. When querying with documentId(), you must provide a valid document ID, but it was an empty string.\");\n if (!__PRIVATE_isCollectionGroupQuery(t) && -1 !== n.indexOf(\"/\")) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. When querying a collection by documentId(), you must provide a plain document ID, but '${n}' contains a '/' character.`);\n const r = t.path.child(ResourcePath.fromString(n));\n if (!DocumentKey.isDocumentKey(r)) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. When querying a collection group by documentId(), the value provided must result in a valid document path, but '${r}' is not because it has an odd number of segments (${r.length}).`);\n return __PRIVATE_refValue(e, new DocumentKey(r));\n }\n if (n instanceof DocumentReference) return __PRIVATE_refValue(e, n._key);\n throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. When querying with documentId(), you must provide a valid string or a DocumentReference, but it was: ${__PRIVATE_valueDescription(n)}.`);\n}\n\n/**\n * Validates that the value passed into a disjunctive filter satisfies all\n * array requirements.\n */\nfunction __PRIVATE_validateDisjunctiveFilterElements(e, t) {\n if (!Array.isArray(e) || 0 === e.length) throw new FirestoreError(C.INVALID_ARGUMENT, `Invalid Query. A non-empty array is required for '${t.toString()}' filters.`);\n}\n\n/**\n * Given an operator, returns the set of operators that cannot be used with it.\n *\n * This is not a comprehensive check, and this function should be removed in the\n * long term. Validations should occur in the Firestore backend.\n *\n * Operators in a query must adhere to the following set of rules:\n * 1. Only one inequality per query.\n * 2. `NOT_IN` cannot be used with array, disjunctive, or `NOT_EQUAL` operators.\n */\nfunction __PRIVATE_validateNewFieldFilter(e, t) {\n const n = function __PRIVATE_findOpInsideFilters(e, t) {\n for (const n of e) for (const e of n.getFlattenedFilters()) if (t.indexOf(e.op) >= 0) return e.op;\n return null;\n }(e.filters, function __PRIVATE_conflictingOps(e) {\n switch (e) {\n case \"!=\" /* Operator.NOT_EQUAL */:\n return [\"!=\" /* Operator.NOT_EQUAL */, \"not-in\" /* Operator.NOT_IN */];\n case \"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */:\n case \"in\" /* Operator.IN */:\n return [\"not-in\" /* Operator.NOT_IN */];\n case \"not-in\" /* Operator.NOT_IN */:\n return [\"array-contains-any\" /* Operator.ARRAY_CONTAINS_ANY */, \"in\" /* Operator.IN */, \"not-in\" /* Operator.NOT_IN */, \"!=\" /* Operator.NOT_EQUAL */];\n default:\n return [];\n }\n }(t.op));\n if (null !== n)\n // Special case when it's a duplicate op to give a slightly clearer error message.\n throw n === t.op ? new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. You cannot use more than one '${t.op.toString()}' filter.`) : new FirestoreError(C.INVALID_ARGUMENT, `Invalid query. You cannot use '${t.op.toString()}' filters with '${n.toString()}' filters.`);\n}\nfunction __PRIVATE_validateQueryFilterConstraint(e, t) {\n if (!(t instanceof QueryFieldFilterConstraint || t instanceof QueryCompositeFilterConstraint)) throw new FirestoreError(C.INVALID_ARGUMENT, `Function ${e}() requires AppliableConstraints created with a call to 'where(...)', 'or(...)', or 'and(...)'.`);\n}\nclass AbstractUserDataWriter {\n convertValue(e, t = \"none\") {\n switch (__PRIVATE_typeOrder(e)) {\n case 0 /* TypeOrder.NullValue */:\n return null;\n case 1 /* TypeOrder.BooleanValue */:\n return e.booleanValue;\n case 2 /* TypeOrder.NumberValue */:\n return __PRIVATE_normalizeNumber(e.integerValue || e.doubleValue);\n case 3 /* TypeOrder.TimestampValue */:\n return this.convertTimestamp(e.timestampValue);\n case 4 /* TypeOrder.ServerTimestampValue */:\n return this.convertServerTimestamp(e, t);\n case 5 /* TypeOrder.StringValue */:\n return e.stringValue;\n case 6 /* TypeOrder.BlobValue */:\n return this.convertBytes(__PRIVATE_normalizeByteString(e.bytesValue));\n case 7 /* TypeOrder.RefValue */:\n return this.convertReference(e.referenceValue);\n case 8 /* TypeOrder.GeoPointValue */:\n return this.convertGeoPoint(e.geoPointValue);\n case 9 /* TypeOrder.ArrayValue */:\n return this.convertArray(e.arrayValue, t);\n case 10 /* TypeOrder.ObjectValue */:\n return this.convertObject(e.mapValue, t);\n default:\n throw fail();\n }\n }\n convertObject(e, t) {\n return this.convertObjectMap(e.fields, t);\n }\n /**\n * @internal\n */\n convertObjectMap(e, t = \"none\") {\n const n = {};\n return forEach(e, (e, r) => {\n n[e] = this.convertValue(r, t);\n }), n;\n }\n convertGeoPoint(e) {\n return new GeoPoint(__PRIVATE_normalizeNumber(e.latitude), __PRIVATE_normalizeNumber(e.longitude));\n }\n convertArray(e, t) {\n return (e.values || []).map(e => this.convertValue(e, t));\n }\n convertServerTimestamp(e, t) {\n switch (t) {\n case \"previous\":\n const n = __PRIVATE_getPreviousValue(e);\n return null == n ? null : this.convertValue(n, t);\n case \"estimate\":\n return this.convertTimestamp(__PRIVATE_getLocalWriteTime(e));\n default:\n return null;\n }\n }\n convertTimestamp(e) {\n const t = __PRIVATE_normalizeTimestamp(e);\n return new Timestamp(t.seconds, t.nanos);\n }\n convertDocumentKey(e, t) {\n const n = ResourcePath.fromString(e);\n __PRIVATE_hardAssert(__PRIVATE_isValidResourceName(n));\n const r = new DatabaseId(n.get(1), n.get(3)),\n i = new DocumentKey(n.popFirst(5));\n return r.isEqual(t) ||\n // TODO(b/64130202): Somehow support foreign references.\n __PRIVATE_logError(`Document ${i} contains a document reference within a different database (${r.projectId}/${r.database}) which is not supported. It will be treated as a reference in the current database (${t.projectId}/${t.database}) instead.`), i;\n }\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Converts custom model object of type T into `DocumentData` by applying the\n * converter if it exists.\n *\n * This function is used when converting user objects to `DocumentData`\n * because we want to provide the user with a more specific error message if\n * their `set()` or fails due to invalid data originating from a `toFirestore()`\n * call.\n */\nfunction __PRIVATE_applyFirestoreDataConverter(e, t, n) {\n let r;\n // Cast to `any` in order to satisfy the union type constraint on\n // toFirestore().\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return r = e ? n && (n.merge || n.mergeFields) ? e.toFirestore(t, n) : e.toFirestore(t) : t, r;\n}\nclass __PRIVATE_LiteUserDataWriter extends AbstractUserDataWriter {\n constructor(e) {\n super(), this.firestore = e;\n }\n convertBytes(e) {\n return new Bytes(e);\n }\n convertReference(e) {\n const t = this.convertDocumentKey(e, this.firestore._databaseId);\n return new DocumentReference(this.firestore, /* converter= */null, t);\n }\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Create an AggregateField object that can be used to compute the sum of\n * a specified field over a range of documents in the result set of a query.\n * @param field Specifies the field to sum across the result set.\n */\nfunction sum(e) {\n return new AggregateField(\"sum\", __PRIVATE_fieldPathFromArgument$1(\"sum\", e));\n}\n\n/**\n * Create an AggregateField object that can be used to compute the average of\n * a specified field over a range of documents in the result set of a query.\n * @param field Specifies the field to average across the result set.\n */\nfunction average(e) {\n return new AggregateField(\"avg\", __PRIVATE_fieldPathFromArgument$1(\"average\", e));\n}\n\n/**\n * Create an AggregateField object that can be used to compute the count of\n * documents in the result set of a query.\n */\nfunction count() {\n return new AggregateField(\"count\");\n}\n\n/**\n * Compares two 'AggregateField` instances for equality.\n *\n * @param left Compare this AggregateField to the `right`.\n * @param right Compare this AggregateField to the `left`.\n */\nfunction aggregateFieldEqual(e, t) {\n var n, r;\n return e instanceof AggregateField && t instanceof AggregateField && e.aggregateType === t.aggregateType && (null === (n = e._internalFieldPath) || void 0 === n ? void 0 : n.canonicalString()) === (null === (r = t._internalFieldPath) || void 0 === r ? void 0 : r.canonicalString());\n}\n\n/**\n * Compares two `AggregateQuerySnapshot` instances for equality.\n *\n * Two `AggregateQuerySnapshot` instances are considered \"equal\" if they have\n * underlying queries that compare equal, and the same data.\n *\n * @param left - The first `AggregateQuerySnapshot` to compare.\n * @param right - The second `AggregateQuerySnapshot` to compare.\n *\n * @returns `true` if the objects are \"equal\", as defined above, or `false`\n * otherwise.\n */\nfunction aggregateQuerySnapshotEqual(e, t) {\n return queryEqual(e.query, t.query) && deepEqual(e.data(), t.data());\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Metadata about a snapshot, describing the state of the snapshot.\n */\nclass SnapshotMetadata {\n /** @hideconstructor */\n constructor(e, t) {\n this.hasPendingWrites = e, this.fromCache = t;\n }\n /**\n * Returns true if this `SnapshotMetadata` is equal to the provided one.\n *\n * @param other - The `SnapshotMetadata` to compare against.\n * @returns true if this `SnapshotMetadata` is equal to the provided one.\n */\n isEqual(e) {\n return this.hasPendingWrites === e.hasPendingWrites && this.fromCache === e.fromCache;\n }\n}\n\n/**\n * A `DocumentSnapshot` contains data read from a document in your Firestore\n * database. The data can be extracted with `.data()` or `.get()` to\n * get a specific field.\n *\n * For a `DocumentSnapshot` that points to a non-existing document, any data\n * access will return 'undefined'. You can use the `exists()` method to\n * explicitly verify a document's existence.\n */\nclass DocumentSnapshot extends DocumentSnapshot$1 {\n /** @hideconstructor protected */\n constructor(e, t, n, r, i, s) {\n super(e, t, n, r, s), this._firestore = e, this._firestoreImpl = e, this.metadata = i;\n }\n /**\n * Returns whether or not the data exists. True if the document exists.\n */\n exists() {\n return super.exists();\n }\n /**\n * Retrieves all fields in the document as an `Object`. Returns `undefined` if\n * the document doesn't exist.\n *\n * By default, `serverTimestamp()` values that have not yet been\n * set to their final value will be returned as `null`. You can override\n * this by passing an options object.\n *\n * @param options - An options object to configure how data is retrieved from\n * the snapshot (for example the desired behavior for server timestamps that\n * have not yet been set to their final value).\n * @returns An `Object` containing all fields in the document or `undefined` if\n * the document doesn't exist.\n */\n data(e = {}) {\n if (this._document) {\n if (this._converter) {\n // We only want to use the converter and create a new DocumentSnapshot\n // if a converter has been provided.\n const t = new QueryDocumentSnapshot(this._firestore, this._userDataWriter, this._key, this._document, this.metadata, /* converter= */null);\n return this._converter.fromFirestore(t, e);\n }\n return this._userDataWriter.convertValue(this._document.data.value, e.serverTimestamps);\n }\n }\n /**\n * Retrieves the field specified by `fieldPath`. Returns `undefined` if the\n * document or field doesn't exist.\n *\n * By default, a `serverTimestamp()` that has not yet been set to\n * its final value will be returned as `null`. You can override this by\n * passing an options object.\n *\n * @param fieldPath - The path (for example 'foo' or 'foo.bar') to a specific\n * field.\n * @param options - An options object to configure how the field is retrieved\n * from the snapshot (for example the desired behavior for server timestamps\n * that have not yet been set to their final value).\n * @returns The data at the specified field location or undefined if no such\n * field exists in the document.\n */\n // We are using `any` here to avoid an explicit cast by our users.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n get(e, t = {}) {\n if (this._document) {\n const n = this._document.data.field(__PRIVATE_fieldPathFromArgument(\"DocumentSnapshot.get\", e));\n if (null !== n) return this._userDataWriter.convertValue(n, t.serverTimestamps);\n }\n }\n}\n\n/**\n * A `QueryDocumentSnapshot` contains data read from a document in your\n * Firestore database as part of a query. The document is guaranteed to exist\n * and its data can be extracted with `.data()` or `.get()` to get a\n * specific field.\n *\n * A `QueryDocumentSnapshot` offers the same API surface as a\n * `DocumentSnapshot`. Since query results contain only existing documents, the\n * `exists` property will always be true and `data()` will never return\n * 'undefined'.\n */\nclass QueryDocumentSnapshot extends DocumentSnapshot {\n /**\n * Retrieves all fields in the document as an `Object`.\n *\n * By default, `serverTimestamp()` values that have not yet been\n * set to their final value will be returned as `null`. You can override\n * this by passing an options object.\n *\n * @override\n * @param options - An options object to configure how data is retrieved from\n * the snapshot (for example the desired behavior for server timestamps that\n * have not yet been set to their final value).\n * @returns An `Object` containing all fields in the document.\n */\n data(e = {}) {\n return super.data(e);\n }\n}\n\n/**\n * A `QuerySnapshot` contains zero or more `DocumentSnapshot` objects\n * representing the results of a query. The documents can be accessed as an\n * array via the `docs` property or enumerated using the `forEach` method. The\n * number of documents can be determined via the `empty` and `size`\n * properties.\n */\nclass QuerySnapshot {\n /** @hideconstructor */\n constructor(e, t, n, r) {\n this._firestore = e, this._userDataWriter = t, this._snapshot = r, this.metadata = new SnapshotMetadata(r.hasPendingWrites, r.fromCache), this.query = n;\n }\n /** An array of all the documents in the `QuerySnapshot`. */\n get docs() {\n const e = [];\n return this.forEach(t => e.push(t)), e;\n }\n /** The number of documents in the `QuerySnapshot`. */\n get size() {\n return this._snapshot.docs.size;\n }\n /** True if there are no documents in the `QuerySnapshot`. */\n get empty() {\n return 0 === this.size;\n }\n /**\n * Enumerates all of the documents in the `QuerySnapshot`.\n *\n * @param callback - A callback to be called with a `QueryDocumentSnapshot` for\n * each document in the snapshot.\n * @param thisArg - The `this` binding for the callback.\n */\n forEach(e, t) {\n this._snapshot.docs.forEach(n => {\n e.call(t, new QueryDocumentSnapshot(this._firestore, this._userDataWriter, n.key, n, new SnapshotMetadata(this._snapshot.mutatedKeys.has(n.key), this._snapshot.fromCache), this.query.converter));\n });\n }\n /**\n * Returns an array of the documents changes since the last snapshot. If this\n * is the first snapshot, all documents will be in the list as 'added'\n * changes.\n *\n * @param options - `SnapshotListenOptions` that control whether metadata-only\n * changes (i.e. only `DocumentSnapshot.metadata` changed) should trigger\n * snapshot events.\n */\n docChanges(e = {}) {\n const t = !!e.includeMetadataChanges;\n if (t && this._snapshot.excludesMetadataChanges) throw new FirestoreError(C.INVALID_ARGUMENT, \"To include metadata changes with your document changes, you must also pass { includeMetadataChanges:true } to onSnapshot().\");\n return this._cachedChanges && this._cachedChangesIncludeMetadataChanges === t || (this._cachedChanges = /** Calculates the array of `DocumentChange`s for a given `ViewSnapshot`. */\n function __PRIVATE_changesFromSnapshot(e, t) {\n if (e._snapshot.oldDocs.isEmpty()) {\n let t = 0;\n return e._snapshot.docChanges.map(n => {\n const r = new QueryDocumentSnapshot(e._firestore, e._userDataWriter, n.doc.key, n.doc, new SnapshotMetadata(e._snapshot.mutatedKeys.has(n.doc.key), e._snapshot.fromCache), e.query.converter);\n return n.doc, {\n type: \"added\",\n doc: r,\n oldIndex: -1,\n newIndex: t++\n };\n });\n }\n {\n // A `DocumentSet` that is updated incrementally as changes are applied to use\n // to lookup the index of a document.\n let n = e._snapshot.oldDocs;\n return e._snapshot.docChanges.filter(e => t || 3 /* ChangeType.Metadata */ !== e.type).map(t => {\n const r = new QueryDocumentSnapshot(e._firestore, e._userDataWriter, t.doc.key, t.doc, new SnapshotMetadata(e._snapshot.mutatedKeys.has(t.doc.key), e._snapshot.fromCache), e.query.converter);\n let i = -1,\n s = -1;\n return 0 /* ChangeType.Added */ !== t.type && (i = n.indexOf(t.doc.key), n = n.delete(t.doc.key)), 1 /* ChangeType.Removed */ !== t.type && (n = n.add(t.doc), s = n.indexOf(t.doc.key)), {\n type: __PRIVATE_resultChangeType(t.type),\n doc: r,\n oldIndex: i,\n newIndex: s\n };\n });\n }\n }(this, t), this._cachedChangesIncludeMetadataChanges = t), this._cachedChanges;\n }\n}\nfunction __PRIVATE_resultChangeType(e) {\n switch (e) {\n case 0 /* ChangeType.Added */:\n return \"added\";\n case 2 /* ChangeType.Modified */:\n case 3 /* ChangeType.Metadata */:\n return \"modified\";\n case 1 /* ChangeType.Removed */:\n return \"removed\";\n default:\n return fail();\n }\n}\n\n// TODO(firestoreexp): Add tests for snapshotEqual with different snapshot\n// metadata\n/**\n * Returns true if the provided snapshots are equal.\n *\n * @param left - A snapshot to compare.\n * @param right - A snapshot to compare.\n * @returns true if the snapshots are equal.\n */\nfunction snapshotEqual(e, t) {\n return e instanceof DocumentSnapshot && t instanceof DocumentSnapshot ? e._firestore === t._firestore && e._key.isEqual(t._key) && (null === e._document ? null === t._document : e._document.isEqual(t._document)) && e._converter === t._converter : e instanceof QuerySnapshot && t instanceof QuerySnapshot && e._firestore === t._firestore && queryEqual(e.query, t.query) && e.metadata.isEqual(t.metadata) && e._snapshot.isEqual(t._snapshot);\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Reads the document referred to by this `DocumentReference`.\n *\n * Note: `getDoc()` attempts to provide up-to-date data when possible by waiting\n * for data from the server, but it may return cached data or fail if you are\n * offline and the server cannot be reached. To specify this behavior, invoke\n * {@link getDocFromCache} or {@link getDocFromServer}.\n *\n * @param reference - The reference of the document to fetch.\n * @returns A Promise resolved with a `DocumentSnapshot` containing the\n * current document contents.\n */\nfunction getDoc(e) {\n e = __PRIVATE_cast(e, DocumentReference);\n const t = __PRIVATE_cast(e.firestore, Firestore);\n return __PRIVATE_firestoreClientGetDocumentViaSnapshotListener(ensureFirestoreConfigured(t), e._key).then(n => __PRIVATE_convertToDocSnapshot(t, e, n));\n}\nclass __PRIVATE_ExpUserDataWriter extends AbstractUserDataWriter {\n constructor(e) {\n super(), this.firestore = e;\n }\n convertBytes(e) {\n return new Bytes(e);\n }\n convertReference(e) {\n const t = this.convertDocumentKey(e, this.firestore._databaseId);\n return new DocumentReference(this.firestore, /* converter= */null, t);\n }\n}\n\n/**\n * Reads the document referred to by this `DocumentReference` from cache.\n * Returns an error if the document is not currently cached.\n *\n * @returns A `Promise` resolved with a `DocumentSnapshot` containing the\n * current document contents.\n */\nfunction getDocFromCache(e) {\n e = __PRIVATE_cast(e, DocumentReference);\n const t = __PRIVATE_cast(e.firestore, Firestore),\n n = ensureFirestoreConfigured(t),\n r = new __PRIVATE_ExpUserDataWriter(t);\n return __PRIVATE_firestoreClientGetDocumentFromLocalCache(n, e._key).then(n => new DocumentSnapshot(t, r, e._key, n, new SnapshotMetadata(null !== n && n.hasLocalMutations, /* fromCache= */!0), e.converter));\n}\n\n/**\n * Reads the document referred to by this `DocumentReference` from the server.\n * Returns an error if the network is not available.\n *\n * @returns A `Promise` resolved with a `DocumentSnapshot` containing the\n * current document contents.\n */\nfunction getDocFromServer(e) {\n e = __PRIVATE_cast(e, DocumentReference);\n const t = __PRIVATE_cast(e.firestore, Firestore);\n return __PRIVATE_firestoreClientGetDocumentViaSnapshotListener(ensureFirestoreConfigured(t), e._key, {\n source: \"server\"\n }).then(n => __PRIVATE_convertToDocSnapshot(t, e, n));\n}\n\n/**\n * Executes the query and returns the results as a `QuerySnapshot`.\n *\n * Note: `getDocs()` attempts to provide up-to-date data when possible by\n * waiting for data from the server, but it may return cached data or fail if\n * you are offline and the server cannot be reached. To specify this behavior,\n * invoke {@link getDocsFromCache} or {@link getDocsFromServer}.\n *\n * @returns A `Promise` that will be resolved with the results of the query.\n */\nfunction getDocs(e) {\n e = __PRIVATE_cast(e, Query);\n const t = __PRIVATE_cast(e.firestore, Firestore),\n n = ensureFirestoreConfigured(t),\n r = new __PRIVATE_ExpUserDataWriter(t);\n return __PRIVATE_validateHasExplicitOrderByForLimitToLast(e._query), __PRIVATE_firestoreClientGetDocumentsViaSnapshotListener(n, e._query).then(n => new QuerySnapshot(t, r, e, n));\n}\n\n/**\n * Executes the query and returns the results as a `QuerySnapshot` from cache.\n * Returns an empty result set if no documents matching the query are currently\n * cached.\n *\n * @returns A `Promise` that will be resolved with the results of the query.\n */\nfunction getDocsFromCache(e) {\n e = __PRIVATE_cast(e, Query);\n const t = __PRIVATE_cast(e.firestore, Firestore),\n n = ensureFirestoreConfigured(t),\n r = new __PRIVATE_ExpUserDataWriter(t);\n return __PRIVATE_firestoreClientGetDocumentsFromLocalCache(n, e._query).then(n => new QuerySnapshot(t, r, e, n));\n}\n\n/**\n * Executes the query and returns the results as a `QuerySnapshot` from the\n * server. Returns an error if the network is not available.\n *\n * @returns A `Promise` that will be resolved with the results of the query.\n */\nfunction getDocsFromServer(e) {\n e = __PRIVATE_cast(e, Query);\n const t = __PRIVATE_cast(e.firestore, Firestore),\n n = ensureFirestoreConfigured(t),\n r = new __PRIVATE_ExpUserDataWriter(t);\n return __PRIVATE_firestoreClientGetDocumentsViaSnapshotListener(n, e._query, {\n source: \"server\"\n }).then(n => new QuerySnapshot(t, r, e, n));\n}\nfunction setDoc(e, t, n) {\n e = __PRIVATE_cast(e, DocumentReference);\n const r = __PRIVATE_cast(e.firestore, Firestore),\n i = __PRIVATE_applyFirestoreDataConverter(e.converter, t, n);\n return executeWrite(r, [__PRIVATE_parseSetData(__PRIVATE_newUserDataReader(r), \"setDoc\", e._key, i, null !== e.converter, n).toMutation(e._key, Precondition.none())]);\n}\nfunction updateDoc(e, t, n, ...r) {\n e = __PRIVATE_cast(e, DocumentReference);\n const i = __PRIVATE_cast(e.firestore, Firestore),\n s = __PRIVATE_newUserDataReader(i);\n let o;\n o = \"string\" == typeof (\n // For Compat types, we have to \"extract\" the underlying types before\n // performing validation.\n t = getModularInstance(t)) || t instanceof FieldPath ? __PRIVATE_parseUpdateVarargs(s, \"updateDoc\", e._key, t, n, r) : __PRIVATE_parseUpdateData(s, \"updateDoc\", e._key, t);\n return executeWrite(i, [o.toMutation(e._key, Precondition.exists(!0))]);\n}\n\n/**\n * Deletes the document referred to by the specified `DocumentReference`.\n *\n * @param reference - A reference to the document to delete.\n * @returns A Promise resolved once the document has been successfully\n * deleted from the backend (note that it won't resolve while you're offline).\n */\nfunction deleteDoc(e) {\n return executeWrite(__PRIVATE_cast(e.firestore, Firestore), [new __PRIVATE_DeleteMutation(e._key, Precondition.none())]);\n}\n\n/**\n * Add a new document to specified `CollectionReference` with the given data,\n * assigning it a document ID automatically.\n *\n * @param reference - A reference to the collection to add this document to.\n * @param data - An Object containing the data for the new document.\n * @returns A `Promise` resolved with a `DocumentReference` pointing to the\n * newly created document after it has been written to the backend (Note that it\n * won't resolve while you're offline).\n */\nfunction addDoc(e, t) {\n const n = __PRIVATE_cast(e.firestore, Firestore),\n r = doc(e),\n i = __PRIVATE_applyFirestoreDataConverter(e.converter, t);\n return executeWrite(n, [__PRIVATE_parseSetData(__PRIVATE_newUserDataReader(e.firestore), \"addDoc\", r._key, i, null !== e.converter, {}).toMutation(r._key, Precondition.exists(!1))]).then(() => r);\n}\nfunction onSnapshot(e, ...t) {\n var n, r, i;\n e = getModularInstance(e);\n let s = {\n includeMetadataChanges: !1,\n source: \"default\"\n },\n o = 0;\n \"object\" != typeof t[o] || __PRIVATE_isPartialObserver(t[o]) || (s = t[o], o++);\n const _ = {\n includeMetadataChanges: s.includeMetadataChanges,\n source: s.source\n };\n if (__PRIVATE_isPartialObserver(t[o])) {\n const e = t[o];\n t[o] = null === (n = e.next) || void 0 === n ? void 0 : n.bind(e), t[o + 1] = null === (r = e.error) || void 0 === r ? void 0 : r.bind(e), t[o + 2] = null === (i = e.complete) || void 0 === i ? void 0 : i.bind(e);\n }\n let a, u, c;\n if (e instanceof DocumentReference) u = __PRIVATE_cast(e.firestore, Firestore), c = __PRIVATE_newQueryForPath(e._key.path), a = {\n next: n => {\n t[o] && t[o](__PRIVATE_convertToDocSnapshot(u, e, n));\n },\n error: t[o + 1],\n complete: t[o + 2]\n };else {\n const n = __PRIVATE_cast(e, Query);\n u = __PRIVATE_cast(n.firestore, Firestore), c = n._query;\n const r = new __PRIVATE_ExpUserDataWriter(u);\n a = {\n next: e => {\n t[o] && t[o](new QuerySnapshot(u, r, n, e));\n },\n error: t[o + 1],\n complete: t[o + 2]\n }, __PRIVATE_validateHasExplicitOrderByForLimitToLast(e._query);\n }\n return function __PRIVATE_firestoreClientListen(e, t, n, r) {\n const i = new __PRIVATE_AsyncObserver(r),\n s = new __PRIVATE_QueryListener(t, i, n);\n return e.asyncQueue.enqueueAndForget(async () => __PRIVATE_eventManagerListen(await __PRIVATE_getEventManager(e), s)), () => {\n i.$a(), e.asyncQueue.enqueueAndForget(async () => __PRIVATE_eventManagerUnlisten(await __PRIVATE_getEventManager(e), s));\n };\n }(ensureFirestoreConfigured(u), c, _, a);\n}\nfunction onSnapshotsInSync(e, t) {\n return __PRIVATE_firestoreClientAddSnapshotsInSyncListener(ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)), __PRIVATE_isPartialObserver(t) ? t : {\n next: t\n });\n}\n\n/**\n * Locally writes `mutations` on the async queue.\n * @internal\n */\nfunction executeWrite(e, t) {\n return function __PRIVATE_firestoreClientWrite(e, t) {\n const n = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => __PRIVATE_syncEngineWrite(await __PRIVATE_getSyncEngine(e), t, n)), n.promise;\n }(ensureFirestoreConfigured(e), t);\n}\n\n/**\n * Converts a {@link ViewSnapshot} that contains the single document specified by `ref`\n * to a {@link DocumentSnapshot}.\n */\nfunction __PRIVATE_convertToDocSnapshot(e, t, n) {\n const r = n.docs.get(t._key),\n i = new __PRIVATE_ExpUserDataWriter(e);\n return new DocumentSnapshot(e, i, t._key, r, new SnapshotMetadata(n.hasPendingWrites, n.fromCache), t.converter);\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Calculates the number of documents in the result set of the given query\n * without actually downloading the documents.\n *\n * Using this function to count the documents is efficient because only the\n * final count, not the documents' data, is downloaded. This function can\n * count the documents in cases where the result set is prohibitively large to\n * download entirely (thousands of documents).\n *\n * The result received from the server is presented, unaltered, without\n * considering any local state. That is, documents in the local cache are not\n * taken into consideration, neither are local modifications not yet\n * synchronized with the server. Previously-downloaded results, if any, are not\n * used. Every invocation of this function necessarily involves a round trip to\n * the server.\n *\n * @param query The query whose result set size is calculated.\n * @returns A Promise that will be resolved with the count; the count can be\n * retrieved from `snapshot.data().count`, where `snapshot` is the\n * `AggregateQuerySnapshot` to which the returned Promise resolves.\n */\nfunction getCountFromServer(e) {\n return getAggregateFromServer(e, {\n count: count()\n });\n}\n\n/**\n * Calculates the specified aggregations over the documents in the result\n * set of the given query without actually downloading the documents.\n *\n * Using this function to perform aggregations is efficient because only the\n * final aggregation values, not the documents' data, are downloaded. This\n * function can perform aggregations of the documents in cases where the result\n * set is prohibitively large to download entirely (thousands of documents).\n *\n * The result received from the server is presented, unaltered, without\n * considering any local state. That is, documents in the local cache are not\n * taken into consideration, neither are local modifications not yet\n * synchronized with the server. Previously-downloaded results, if any, are not\n * used. Every invocation of this function necessarily involves a round trip to\n * the server.\n *\n * @param query The query whose result set is aggregated over.\n * @param aggregateSpec An `AggregateSpec` object that specifies the aggregates\n * to perform over the result set. The AggregateSpec specifies aliases for each\n * aggregate, which can be used to retrieve the aggregate result.\n * @example\n * ```typescript\n * const aggregateSnapshot = await getAggregateFromServer(query, {\n * countOfDocs: count(),\n * totalHours: sum('hours'),\n * averageScore: average('score')\n * });\n *\n * const countOfDocs: number = aggregateSnapshot.data().countOfDocs;\n * const totalHours: number = aggregateSnapshot.data().totalHours;\n * const averageScore: number | null = aggregateSnapshot.data().averageScore;\n * ```\n */\nfunction getAggregateFromServer(e, t) {\n const n = __PRIVATE_cast(e.firestore, Firestore),\n r = ensureFirestoreConfigured(n),\n i = __PRIVATE_mapToArray(t, (e, t) => new __PRIVATE_AggregateImpl(t, e.aggregateType, e._internalFieldPath));\n // Run the aggregation and convert the results\n return __PRIVATE_firestoreClientRunAggregateQuery(r, e._query, i).then(t =>\n /**\n * Converts the core aggregration result to an `AggregateQuerySnapshot`\n * that can be returned to the consumer.\n * @param query\n * @param aggregateResult Core aggregation result\n * @internal\n */\n function __PRIVATE_convertToAggregateQuerySnapshot(e, t, n) {\n const r = new __PRIVATE_ExpUserDataWriter(e);\n return new AggregateQuerySnapshot(t, r, n);\n }\n /**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */(n, e, t));\n}\nclass __PRIVATE_MemoryLocalCacheImpl {\n constructor(e) {\n this.kind = \"memory\", this._onlineComponentProvider = new OnlineComponentProvider(), (null == e ? void 0 : e.garbageCollector) ? this._offlineComponentProvider = e.garbageCollector._offlineComponentProvider : this._offlineComponentProvider = new MemoryOfflineComponentProvider();\n }\n toJSON() {\n return {\n kind: this.kind\n };\n }\n}\nclass __PRIVATE_PersistentLocalCacheImpl {\n constructor(e) {\n let t;\n this.kind = \"persistent\", (null == e ? void 0 : e.tabManager) ? (e.tabManager._initialize(e), t = e.tabManager) : (t = persistentSingleTabManager(void 0), t._initialize(e)), this._onlineComponentProvider = t._onlineComponentProvider, this._offlineComponentProvider = t._offlineComponentProvider;\n }\n toJSON() {\n return {\n kind: this.kind\n };\n }\n}\nclass __PRIVATE_MemoryEagerGabageCollectorImpl {\n constructor() {\n this.kind = \"memoryEager\", this._offlineComponentProvider = new MemoryOfflineComponentProvider();\n }\n toJSON() {\n return {\n kind: this.kind\n };\n }\n}\nclass __PRIVATE_MemoryLruGabageCollectorImpl {\n constructor(e) {\n this.kind = \"memoryLru\", this._offlineComponentProvider = new __PRIVATE_LruGcMemoryOfflineComponentProvider(e);\n }\n toJSON() {\n return {\n kind: this.kind\n };\n }\n}\n\n/**\n * Creates an instance of `MemoryEagerGarbageCollector`. This is also the\n * default garbage collector unless it is explicitly specified otherwise.\n */\nfunction memoryEagerGarbageCollector() {\n return new __PRIVATE_MemoryEagerGabageCollectorImpl();\n}\n\n/**\n * Creates an instance of `MemoryLruGarbageCollector`.\n *\n * A target size can be specified as part of the setting parameter. The\n * collector will start deleting documents once the cache size exceeds\n * the given size. The default cache size is 40MB (40 * 1024 * 1024 bytes).\n */\nfunction memoryLruGarbageCollector(e) {\n return new __PRIVATE_MemoryLruGabageCollectorImpl(null == e ? void 0 : e.cacheSizeBytes);\n}\n\n/**\n * Creates an instance of `MemoryLocalCache`. The instance can be set to\n * `FirestoreSettings.cache` to tell the SDK which cache layer to use.\n */\nfunction memoryLocalCache(e) {\n return new __PRIVATE_MemoryLocalCacheImpl(e);\n}\n\n/**\n * Creates an instance of `PersistentLocalCache`. The instance can be set to\n * `FirestoreSettings.cache` to tell the SDK which cache layer to use.\n *\n * Persistent cache cannot be used in a Node.js environment.\n */\nfunction persistentLocalCache(e) {\n return new __PRIVATE_PersistentLocalCacheImpl(e);\n}\nclass __PRIVATE_SingleTabManagerImpl {\n constructor(e) {\n this.forceOwnership = e, this.kind = \"persistentSingleTab\";\n }\n toJSON() {\n return {\n kind: this.kind\n };\n }\n /**\n * @internal\n */\n _initialize(e) {\n this._onlineComponentProvider = new OnlineComponentProvider(), this._offlineComponentProvider = new __PRIVATE_IndexedDbOfflineComponentProvider(this._onlineComponentProvider, null == e ? void 0 : e.cacheSizeBytes, this.forceOwnership);\n }\n}\nclass __PRIVATE_MultiTabManagerImpl {\n constructor() {\n this.kind = \"PersistentMultipleTab\";\n }\n toJSON() {\n return {\n kind: this.kind\n };\n }\n /**\n * @internal\n */\n _initialize(e) {\n this._onlineComponentProvider = new OnlineComponentProvider(), this._offlineComponentProvider = new __PRIVATE_MultiTabOfflineComponentProvider(this._onlineComponentProvider, null == e ? void 0 : e.cacheSizeBytes);\n }\n}\n\n/**\n * Creates an instance of `PersistentSingleTabManager`.\n *\n * @param settings Configures the created tab manager.\n */\nfunction persistentSingleTabManager(e) {\n return new __PRIVATE_SingleTabManagerImpl(null == e ? void 0 : e.forceOwnership);\n}\n\n/**\n * Creates an instance of `PersistentMultipleTabManager`.\n */\nfunction persistentMultipleTabManager() {\n return new __PRIVATE_MultiTabManagerImpl();\n}\n\n/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nconst De = {\n maxAttempts: 5\n};\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A write batch, used to perform multiple writes as a single atomic unit.\n *\n * A `WriteBatch` object can be acquired by calling {@link writeBatch}. It\n * provides methods for adding writes to the write batch. None of the writes\n * will be committed (or visible locally) until {@link WriteBatch.commit} is\n * called.\n */\nclass WriteBatch {\n /** @hideconstructor */\n constructor(e, t) {\n this._firestore = e, this._commitHandler = t, this._mutations = [], this._committed = !1, this._dataReader = __PRIVATE_newUserDataReader(e);\n }\n set(e, t, n) {\n this._verifyNotCommitted();\n const r = __PRIVATE_validateReference(e, this._firestore),\n i = __PRIVATE_applyFirestoreDataConverter(r.converter, t, n),\n s = __PRIVATE_parseSetData(this._dataReader, \"WriteBatch.set\", r._key, i, null !== r.converter, n);\n return this._mutations.push(s.toMutation(r._key, Precondition.none())), this;\n }\n update(e, t, n, ...r) {\n this._verifyNotCommitted();\n const i = __PRIVATE_validateReference(e, this._firestore);\n // For Compat types, we have to \"extract\" the underlying types before\n // performing validation.\n let s;\n return s = \"string\" == typeof (t = getModularInstance(t)) || t instanceof FieldPath ? __PRIVATE_parseUpdateVarargs(this._dataReader, \"WriteBatch.update\", i._key, t, n, r) : __PRIVATE_parseUpdateData(this._dataReader, \"WriteBatch.update\", i._key, t), this._mutations.push(s.toMutation(i._key, Precondition.exists(!0))), this;\n }\n /**\n * Deletes the document referred to by the provided {@link DocumentReference}.\n *\n * @param documentRef - A reference to the document to be deleted.\n * @returns This `WriteBatch` instance. Used for chaining method calls.\n */\n delete(e) {\n this._verifyNotCommitted();\n const t = __PRIVATE_validateReference(e, this._firestore);\n return this._mutations = this._mutations.concat(new __PRIVATE_DeleteMutation(t._key, Precondition.none())), this;\n }\n /**\n * Commits all of the writes in this write batch as a single atomic unit.\n *\n * The result of these writes will only be reflected in document reads that\n * occur after the returned promise resolves. If the client is offline, the\n * write fails. If you would like to see local modifications or buffer writes\n * until the client is online, use the full Firestore SDK.\n *\n * @returns A `Promise` resolved once all of the writes in the batch have been\n * successfully written to the backend as an atomic unit (note that it won't\n * resolve while you're offline).\n */\n commit() {\n return this._verifyNotCommitted(), this._committed = !0, this._mutations.length > 0 ? this._commitHandler(this._mutations) : Promise.resolve();\n }\n _verifyNotCommitted() {\n if (this._committed) throw new FirestoreError(C.FAILED_PRECONDITION, \"A write batch can no longer be used after commit() has been called.\");\n }\n}\nfunction __PRIVATE_validateReference(e, t) {\n if ((e = getModularInstance(e)).firestore !== t) throw new FirestoreError(C.INVALID_ARGUMENT, \"Provided document reference is from a different Firestore instance.\");\n return e;\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n// TODO(mrschmidt) Consider using `BaseTransaction` as the base class in the\n// legacy SDK.\n/**\n * A reference to a transaction.\n *\n * The `Transaction` object passed to a transaction's `updateFunction` provides\n * the methods to read and write data within the transaction context. See\n * {@link runTransaction}.\n */\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A reference to a transaction.\n *\n * The `Transaction` object passed to a transaction's `updateFunction` provides\n * the methods to read and write data within the transaction context. See\n * {@link runTransaction}.\n */\nclass Transaction extends class Transaction$1 {\n /** @hideconstructor */\n constructor(e, t) {\n this._firestore = e, this._transaction = t, this._dataReader = __PRIVATE_newUserDataReader(e);\n }\n /**\n * Reads the document referenced by the provided {@link DocumentReference}.\n *\n * @param documentRef - A reference to the document to be read.\n * @returns A `DocumentSnapshot` with the read data.\n */\n get(e) {\n const t = __PRIVATE_validateReference(e, this._firestore),\n n = new __PRIVATE_LiteUserDataWriter(this._firestore);\n return this._transaction.lookup([t._key]).then(e => {\n if (!e || 1 !== e.length) return fail();\n const r = e[0];\n if (r.isFoundDocument()) return new DocumentSnapshot$1(this._firestore, n, r.key, r, t.converter);\n if (r.isNoDocument()) return new DocumentSnapshot$1(this._firestore, n, t._key, null, t.converter);\n throw fail();\n });\n }\n set(e, t, n) {\n const r = __PRIVATE_validateReference(e, this._firestore),\n i = __PRIVATE_applyFirestoreDataConverter(r.converter, t, n),\n s = __PRIVATE_parseSetData(this._dataReader, \"Transaction.set\", r._key, i, null !== r.converter, n);\n return this._transaction.set(r._key, s), this;\n }\n update(e, t, n, ...r) {\n const i = __PRIVATE_validateReference(e, this._firestore);\n // For Compat types, we have to \"extract\" the underlying types before\n // performing validation.\n let s;\n return s = \"string\" == typeof (t = getModularInstance(t)) || t instanceof FieldPath ? __PRIVATE_parseUpdateVarargs(this._dataReader, \"Transaction.update\", i._key, t, n, r) : __PRIVATE_parseUpdateData(this._dataReader, \"Transaction.update\", i._key, t), this._transaction.update(i._key, s), this;\n }\n /**\n * Deletes the document referred to by the provided {@link DocumentReference}.\n *\n * @param documentRef - A reference to the document to be deleted.\n * @returns This `Transaction` instance. Used for chaining method calls.\n */\n delete(e) {\n const t = __PRIVATE_validateReference(e, this._firestore);\n return this._transaction.delete(t._key), this;\n }\n} {\n // This class implements the same logic as the Transaction API in the Lite SDK\n // but is subclassed in order to return its own DocumentSnapshot types.\n /** @hideconstructor */\n constructor(e, t) {\n super(e, t), this._firestore = e;\n }\n /**\n * Reads the document referenced by the provided {@link DocumentReference}.\n *\n * @param documentRef - A reference to the document to be read.\n * @returns A `DocumentSnapshot` with the read data.\n */\n get(e) {\n const t = __PRIVATE_validateReference(e, this._firestore),\n n = new __PRIVATE_ExpUserDataWriter(this._firestore);\n return super.get(e).then(e => new DocumentSnapshot(this._firestore, n, t._key, e._document, new SnapshotMetadata(/* hasPendingWrites= */!1, /* fromCache= */!1), t.converter));\n }\n}\n\n/**\n * Executes the given `updateFunction` and then attempts to commit the changes\n * applied within the transaction. If any document read within the transaction\n * has changed, Cloud Firestore retries the `updateFunction`. If it fails to\n * commit after 5 attempts, the transaction fails.\n *\n * The maximum number of writes allowed in a single transaction is 500.\n *\n * @param firestore - A reference to the Firestore database to run this\n * transaction against.\n * @param updateFunction - The function to execute within the transaction\n * context.\n * @param options - An options object to configure maximum number of attempts to\n * commit.\n * @returns If the transaction completed successfully or was explicitly aborted\n * (the `updateFunction` returned a failed promise), the promise returned by the\n * `updateFunction `is returned here. Otherwise, if the transaction failed, a\n * rejected promise with the corresponding failure error is returned.\n */\nfunction runTransaction(e, t, n) {\n e = __PRIVATE_cast(e, Firestore);\n const r = Object.assign(Object.assign({}, De), n);\n !function __PRIVATE_validateTransactionOptions(e) {\n if (e.maxAttempts < 1) throw new FirestoreError(C.INVALID_ARGUMENT, \"Max attempts must be at least 1\");\n }(r);\n return function __PRIVATE_firestoreClientTransaction(e, t, n) {\n const r = new __PRIVATE_Deferred();\n return e.asyncQueue.enqueueAndForget(async () => {\n const i = await __PRIVATE_getDatastore(e);\n new __PRIVATE_TransactionRunner(e.asyncQueue, i, n, t, r).Xa();\n }), r.promise;\n }(ensureFirestoreConfigured(e), n => t(new Transaction(e, n)), r);\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Returns a sentinel for use with {@link @firebase/firestore/lite#(updateDoc:1)} or\n * {@link @firebase/firestore/lite#(setDoc:1)} with `{merge: true}` to mark a field for deletion.\n */\nfunction deleteField() {\n return new __PRIVATE_DeleteFieldValueImpl(\"deleteField\");\n}\n\n/**\n * Returns a sentinel used with {@link @firebase/firestore/lite#(setDoc:1)} or {@link @firebase/firestore/lite#(updateDoc:1)} to\n * include a server-generated timestamp in the written data.\n */\nfunction serverTimestamp() {\n return new __PRIVATE_ServerTimestampFieldValueImpl(\"serverTimestamp\");\n}\n\n/**\n * Returns a special value that can be used with {@link @firebase/firestore/lite#(setDoc:1)} or {@link\n * @firebase/firestore/lite#(updateDoc:1)} that tells the server to union the given elements with any array\n * value that already exists on the server. Each specified element that doesn't\n * already exist in the array will be added to the end. If the field being\n * modified is not already an array it will be overwritten with an array\n * containing exactly the specified elements.\n *\n * @param elements - The elements to union into the array.\n * @returns The `FieldValue` sentinel for use in a call to `setDoc()` or\n * `updateDoc()`.\n */\nfunction arrayUnion(...e) {\n // NOTE: We don't actually parse the data until it's used in set() or\n // update() since we'd need the Firestore instance to do this.\n return new __PRIVATE_ArrayUnionFieldValueImpl(\"arrayUnion\", e);\n}\n\n/**\n * Returns a special value that can be used with {@link (setDoc:1)} or {@link\n * updateDoc:1} that tells the server to remove the given elements from any\n * array value that already exists on the server. All instances of each element\n * specified will be removed from the array. If the field being modified is not\n * already an array it will be overwritten with an empty array.\n *\n * @param elements - The elements to remove from the array.\n * @returns The `FieldValue` sentinel for use in a call to `setDoc()` or\n * `updateDoc()`\n */\nfunction arrayRemove(...e) {\n // NOTE: We don't actually parse the data until it's used in set() or\n // update() since we'd need the Firestore instance to do this.\n return new __PRIVATE_ArrayRemoveFieldValueImpl(\"arrayRemove\", e);\n}\n\n/**\n * Returns a special value that can be used with {@link @firebase/firestore/lite#(setDoc:1)} or {@link\n * @firebase/firestore/lite#(updateDoc:1)} that tells the server to increment the field's current value by\n * the given value.\n *\n * If either the operand or the current field value uses floating point\n * precision, all arithmetic follows IEEE 754 semantics. If both values are\n * integers, values outside of JavaScript's safe number range\n * (`Number.MIN_SAFE_INTEGER` to `Number.MAX_SAFE_INTEGER`) are also subject to\n * precision loss. Furthermore, once processed by the Firestore backend, all\n * integer operations are capped between -2^63 and 2^63-1.\n *\n * If the current field value is not of type `number`, or if the field does not\n * yet exist, the transformation sets the field to the given value.\n *\n * @param n - The value to increment by.\n * @returns The `FieldValue` sentinel for use in a call to `setDoc()` or\n * `updateDoc()`\n */\nfunction increment(e) {\n return new __PRIVATE_NumericIncrementFieldValueImpl(\"increment\", e);\n}\n\n/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Creates a write batch, used for performing multiple writes as a single\n * atomic operation. The maximum number of writes allowed in a single {@link WriteBatch}\n * is 500.\n *\n * Unlike transactions, write batches are persisted offline and therefore are\n * preferable when you don't need to condition your writes on read data.\n *\n * @returns A {@link WriteBatch} that can be used to atomically execute multiple\n * writes.\n */\nfunction writeBatch(e) {\n return ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore)), new WriteBatch(e, t => executeWrite(e, t));\n}\n\n/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nfunction setIndexConfiguration(e, t) {\n var n;\n const r = ensureFirestoreConfigured(e = __PRIVATE_cast(e, Firestore));\n if (!r._uninitializedComponentsProvider || \"memory\" === (null === (n = r._uninitializedComponentsProvider) || void 0 === n ? void 0 : n._offlineKind))\n // PORTING NOTE: We don't return an error if the user has not enabled\n // persistence since `enableIndexeddbPersistence()` can fail on the Web.\n return __PRIVATE_logWarn(\"Cannot enable indexes when persistence is disabled\"), Promise.resolve();\n const i = function __PRIVATE_parseIndexes(e) {\n const t = \"string\" == typeof e ? function __PRIVATE_tryParseJson(e) {\n try {\n return JSON.parse(e);\n } catch (e) {\n throw new FirestoreError(C.INVALID_ARGUMENT, \"Failed to parse JSON: \" + (null == e ? void 0 : e.message));\n }\n }(e) : e,\n n = [];\n if (Array.isArray(t.indexes)) for (const e of t.indexes) {\n const t = __PRIVATE_tryGetString(e, \"collectionGroup\"),\n r = [];\n if (Array.isArray(e.fields)) for (const t of e.fields) {\n const e = __PRIVATE_fieldPathFromDotSeparatedString(\"setIndexConfiguration\", __PRIVATE_tryGetString(t, \"fieldPath\"));\n \"CONTAINS\" === t.arrayConfig ? r.push(new IndexSegment(e, 2 /* IndexKind.CONTAINS */)) : \"ASCENDING\" === t.order ? r.push(new IndexSegment(e, 0 /* IndexKind.ASCENDING */)) : \"DESCENDING\" === t.order && r.push(new IndexSegment(e, 1 /* IndexKind.DESCENDING */));\n }\n n.push(new FieldIndex(FieldIndex.UNKNOWN_ID, t, r, IndexState.empty()));\n }\n return n;\n }(t);\n return __PRIVATE_firestoreClientSetIndexConfiguration(r, i);\n}\nfunction __PRIVATE_tryGetString(e, t) {\n if (\"string\" != typeof e[t]) throw new FirestoreError(C.INVALID_ARGUMENT, \"Missing string value for: \" + t);\n return e[t];\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * A `PersistentCacheIndexManager` for configuring persistent cache indexes used\n * for local query execution.\n *\n * To use, call `getPersistentCacheIndexManager()` to get an instance.\n */\nclass PersistentCacheIndexManager {\n /** @hideconstructor */\n constructor(e) {\n this._client = e, /** A type string to uniquely identify instances of this class. */\n this.type = \"PersistentCacheIndexManager\";\n }\n}\n\n/**\n * Returns the PersistentCache Index Manager used by the given `Firestore`\n * object.\n *\n * @return The `PersistentCacheIndexManager` instance, or `null` if local\n * persistent storage is not in use.\n */\nfunction getPersistentCacheIndexManager(e) {\n var t;\n e = __PRIVATE_cast(e, Firestore);\n const n = Ce.get(e);\n if (n) return n;\n const r = ensureFirestoreConfigured(e);\n if (\"persistent\" !== (null === (t = r._uninitializedComponentsProvider) || void 0 === t ? void 0 : t._offlineKind)) return null;\n const i = new PersistentCacheIndexManager(r);\n return Ce.set(e, i), i;\n}\n\n/**\n * Enables the SDK to create persistent cache indexes automatically for local\n * query execution when the SDK believes cache indexes can help improve\n * performance.\n *\n * This feature is disabled by default.\n */\nfunction enablePersistentCacheIndexAutoCreation(e) {\n __PRIVATE_setPersistentCacheIndexAutoCreationEnabled(e, !0);\n}\n\n/**\n * Stops creating persistent cache indexes automatically for local query\n * execution. The indexes which have been created by calling\n * `enablePersistentCacheIndexAutoCreation()` still take effect.\n */\nfunction disablePersistentCacheIndexAutoCreation(e) {\n __PRIVATE_setPersistentCacheIndexAutoCreationEnabled(e, !1);\n}\n\n/**\n * Removes all persistent cache indexes.\n *\n * Please note this function will also deletes indexes generated by\n * `setIndexConfiguration()`, which is deprecated.\n */\nfunction deleteAllPersistentCacheIndexes(e) {\n e._client.verifyNotTerminated();\n __PRIVATE_firestoreClientDeleteAllFieldIndexes(e._client).then(e => __PRIVATE_logDebug(\"deleting all persistent cache indexes succeeded\")).catch(e => __PRIVATE_logWarn(\"deleting all persistent cache indexes failed\", e));\n}\nfunction __PRIVATE_setPersistentCacheIndexAutoCreationEnabled(e, t) {\n e._client.verifyNotTerminated();\n __PRIVATE_firestoreClientSetPersistentCacheIndexAutoCreationEnabled(e._client, t).then(e => __PRIVATE_logDebug(`setting persistent cache index auto creation isEnabled=${t} succeeded`)).catch(e => __PRIVATE_logWarn(`setting persistent cache index auto creation isEnabled=${t} failed`, e));\n}\n\n/**\n * Maps `Firestore` instances to their corresponding\n * `PersistentCacheIndexManager` instances.\n *\n * Use a `WeakMap` so that the mapping will be automatically dropped when the\n * `Firestore` instance is garbage collected. This emulates a private member\n * as described in https://goo.gle/454yvug.\n */\nconst Ce = new WeakMap();\n\n/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * @internal\n * @private\n *\n * This function is for internal use only.\n *\n * Returns the `QueryTarget` representation of the given query. Returns `null`\n * if the Firestore client associated with the given query has not been\n * initialized or has been terminated.\n *\n * @param query - The Query to convert to proto representation.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction _internalQueryToProtoQueryTarget(e) {\n var t;\n const n = null === (t = ensureFirestoreConfigured(__PRIVATE_cast(e.firestore, Firestore))._onlineComponents) || void 0 === t ? void 0 : t.datastore.serializer;\n return void 0 === n ? null : __PRIVATE_toQueryTarget(n, __PRIVATE_queryToTarget(e._query))._t;\n}\n\n/**\n * @internal\n * @private\n *\n * This function is for internal use only.\n *\n * Returns `RunAggregationQueryRequest` which contains the proto representation\n * of the given aggregation query request. Returns null if the Firestore client\n * associated with the given query has not been initialized or has been\n * terminated.\n *\n * @param query - The Query to convert to proto representation.\n * @param aggregateSpec - The set of aggregations and their aliases.\n */\nfunction _internalAggregationQueryToProtoRunAggregationQueryRequest(e, t) {\n var n;\n const r = __PRIVATE_mapToArray(t, (e, t) => new __PRIVATE_AggregateImpl(t, e.aggregateType, e._internalFieldPath)),\n i = null === (n = ensureFirestoreConfigured(__PRIVATE_cast(e.firestore, Firestore))._onlineComponents) || void 0 === n ? void 0 : n.datastore.serializer;\n return void 0 === i ? null : __PRIVATE_toRunAggregationQueryRequest(i, __PRIVATE_queryToAggregateTarget(e._query), r, /* skipAliasing= */!0).request;\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Testing hooks for use by Firestore's integration test suite to reach into the\n * SDK internals to validate logic and behavior that is not visible from the\n * public API surface.\n *\n * @internal\n */\nclass TestingHooks {\n constructor() {\n throw new Error(\"instances of this class should not be created\");\n }\n /**\n * Registers a callback to be notified when an existence filter mismatch\n * occurs in the Watch listen stream.\n *\n * The relative order in which callbacks are notified is unspecified; do not\n * rely on any particular ordering. If a given callback is registered multiple\n * times then it will be notified multiple times, once per registration.\n *\n * @param callback the callback to invoke upon existence filter mismatch.\n *\n * @return a function that, when called, unregisters the given callback; only\n * the first invocation of the returned function does anything; all subsequent\n * invocations do nothing.\n */\n static onExistenceFilterMismatch(e) {\n return __PRIVATE_TestingHooksSpiImpl.instance.onExistenceFilterMismatch(e);\n }\n}\n\n/**\n * The implementation of `TestingHooksSpi`.\n */\nclass __PRIVATE_TestingHooksSpiImpl {\n constructor() {\n this.Ou = new Map();\n }\n static get instance() {\n return ve || (ve = new __PRIVATE_TestingHooksSpiImpl(), function __PRIVATE_setTestingHooksSpi(e) {\n if (he) throw new Error(\"a TestingHooksSpi instance is already set\");\n he = e;\n }(ve)), ve;\n }\n et(e) {\n this.Ou.forEach(t => t(e));\n }\n onExistenceFilterMismatch(e) {\n const t = Symbol(),\n n = this.Ou;\n return n.set(t, e), () => n.delete(t);\n }\n}\nlet ve = null;\n\n/**\n * Cloud Firestore\n *\n * @packageDocumentation\n */\n!function __PRIVATE_registerFirestore(e, t = !0) {\n !function __PRIVATE_setSDKVersion(e) {\n b = e;\n }(SDK_VERSION), _registerComponent(new Component(\"firestore\", (e, {\n instanceIdentifier: n,\n options: r\n }) => {\n const i = e.getProvider(\"app\").getImmediate(),\n s = new Firestore(new __PRIVATE_FirebaseAuthCredentialsProvider(e.getProvider(\"auth-internal\")), new __PRIVATE_FirebaseAppCheckTokenProvider(e.getProvider(\"app-check-internal\")), function __PRIVATE_databaseIdFromApp(e, t) {\n if (!Object.prototype.hasOwnProperty.apply(e.options, [\"projectId\"])) throw new FirestoreError(C.INVALID_ARGUMENT, '\"projectId\" not provided in firebase.initializeApp.');\n return new DatabaseId(e.options.projectId, t);\n }(i, n), i);\n return r = Object.assign({\n useFetchStreams: t\n }, r), s._setSettings(r), s;\n }, \"PUBLIC\").setMultipleInstances(!0)), registerVersion(S, \"4.6.3\", e),\n // BUILD_TARGET will be replaced by values like esm5, esm2017, cjs5, etc during the compilation\n registerVersion(S, \"4.6.3\", \"esm2017\");\n}();\nexport { AbstractUserDataWriter, AggregateField, AggregateQuerySnapshot, Bytes, we as CACHE_SIZE_UNLIMITED, CollectionReference, DocumentReference, DocumentSnapshot, FieldPath, FieldValue, Firestore, FirestoreError, GeoPoint, LoadBundleTask, PersistentCacheIndexManager, Query, QueryCompositeFilterConstraint, QueryConstraint, QueryDocumentSnapshot, QueryEndAtConstraint, QueryFieldFilterConstraint, QueryLimitConstraint, QueryOrderByConstraint, QuerySnapshot, QueryStartAtConstraint, SnapshotMetadata, Timestamp, Transaction, WriteBatch, __PRIVATE_AutoId as _AutoId, ByteString as _ByteString, DatabaseId as _DatabaseId, DocumentKey as _DocumentKey, __PRIVATE_EmptyAppCheckTokenProvider as _EmptyAppCheckTokenProvider, __PRIVATE_EmptyAuthCredentialsProvider as _EmptyAuthCredentialsProvider, FieldPath$1 as _FieldPath, TestingHooks as _TestingHooks, __PRIVATE_cast as _cast, __PRIVATE_debugAssert as _debugAssert, _internalAggregationQueryToProtoRunAggregationQueryRequest, _internalQueryToProtoQueryTarget, __PRIVATE_isBase64Available as _isBase64Available, __PRIVATE_logWarn as _logWarn, __PRIVATE_validateIsNotUsedTogether as _validateIsNotUsedTogether, addDoc, aggregateFieldEqual, aggregateQuerySnapshotEqual, and, arrayRemove, arrayUnion, average, clearIndexedDbPersistence, collection, collectionGroup, connectFirestoreEmulator, count, deleteAllPersistentCacheIndexes, deleteDoc, deleteField, disableNetwork, disablePersistentCacheIndexAutoCreation, doc, documentId, enableIndexedDbPersistence, enableMultiTabIndexedDbPersistence, enableNetwork, enablePersistentCacheIndexAutoCreation, endAt, endBefore, ensureFirestoreConfigured, executeWrite, getAggregateFromServer, getCountFromServer, getDoc, getDocFromCache, getDocFromServer, getDocs, getDocsFromCache, getDocsFromServer, getFirestore, getPersistentCacheIndexManager, increment, initializeFirestore, limit, limitToLast, loadBundle, memoryEagerGarbageCollector, memoryLocalCache, memoryLruGarbageCollector, namedQuery, onSnapshot, onSnapshotsInSync, or, orderBy, persistentLocalCache, persistentMultipleTabManager, persistentSingleTabManager, query, queryEqual, refEqual, runTransaction, serverTimestamp, setDoc, setIndexConfiguration, setLogLevel, snapshotEqual, startAfter, startAt, sum, terminate, updateDoc, waitForPendingWrites, where, writeBatch };\n","import firebase from '@firebase/app-compat';\nimport { FirestoreError, Bytes, _isBase64Available, enableIndexedDbPersistence, enableMultiTabIndexedDbPersistence, clearIndexedDbPersistence, _DatabaseId, _logWarn, connectFirestoreEmulator, enableNetwork, disableNetwork, _validateIsNotUsedTogether, waitForPendingWrites, onSnapshotsInSync, collection, doc, collectionGroup, runTransaction, ensureFirestoreConfigured, WriteBatch as WriteBatch$1, executeWrite, loadBundle, namedQuery, DocumentSnapshot as DocumentSnapshot$1, DocumentReference as DocumentReference$1, _DocumentKey, refEqual, setDoc, updateDoc, deleteDoc, onSnapshot, getDocFromCache, getDocFromServer, getDoc, snapshotEqual, query, where, orderBy, limit, limitToLast, startAt, startAfter, endBefore, endAt, queryEqual, getDocsFromCache, getDocsFromServer, getDocs, QuerySnapshot as QuerySnapshot$1, addDoc, _cast, AbstractUserDataWriter, setLogLevel as setLogLevel$1, QueryDocumentSnapshot as QueryDocumentSnapshot$1, _debugAssert, FieldPath as FieldPath$1, _FieldPath, serverTimestamp, deleteField, arrayUnion, arrayRemove, increment, GeoPoint, Timestamp, CACHE_SIZE_UNLIMITED } from '@firebase/firestore';\nimport { getModularInstance } from '@firebase/util';\nimport { Component } from '@firebase/component';\nconst name = \"@firebase/firestore-compat\";\nconst version = \"0.3.32\";\n\n/**\r\n * @license\r\n * Copyright 2021 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction validateSetOptions(methodName, options) {\n if (options === undefined) {\n return {\n merge: false\n };\n }\n if (options.mergeFields !== undefined && options.merge !== undefined) {\n throw new FirestoreError('invalid-argument', `Invalid options passed to function ${methodName}(): You cannot ` + 'specify both \"merge\" and \"mergeFields\".');\n }\n return options;\n}\n\n/**\r\n * @license\r\n * Copyright 2017 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/** Helper function to assert Uint8Array is available at runtime. */\nfunction assertUint8ArrayAvailable() {\n if (typeof Uint8Array === 'undefined') {\n throw new FirestoreError('unimplemented', 'Uint8Arrays are not available in this environment.');\n }\n}\n/** Helper function to assert Base64 functions are available at runtime. */\nfunction assertBase64Available() {\n if (!_isBase64Available()) {\n throw new FirestoreError('unimplemented', 'Blobs are unavailable in Firestore in this environment.');\n }\n}\n/** Immutable class holding a blob (binary data) */\nclass Blob {\n constructor(_delegate) {\n this._delegate = _delegate;\n }\n static fromBase64String(base64) {\n assertBase64Available();\n return new Blob(Bytes.fromBase64String(base64));\n }\n static fromUint8Array(array) {\n assertUint8ArrayAvailable();\n return new Blob(Bytes.fromUint8Array(array));\n }\n toBase64() {\n assertBase64Available();\n return this._delegate.toBase64();\n }\n toUint8Array() {\n assertUint8ArrayAvailable();\n return this._delegate.toUint8Array();\n }\n isEqual(other) {\n return this._delegate.isEqual(other._delegate);\n }\n toString() {\n return 'Blob(base64: ' + this.toBase64() + ')';\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2017 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction isPartialObserver(obj) {\n return implementsAnyMethods(obj, ['next', 'error', 'complete']);\n}\n/**\r\n * Returns true if obj is an object and contains at least one of the specified\r\n * methods.\r\n */\nfunction implementsAnyMethods(obj, methods) {\n if (typeof obj !== 'object' || obj === null) {\n return false;\n }\n const object = obj;\n for (const method of methods) {\n if (method in object && typeof object[method] === 'function') {\n return true;\n }\n }\n return false;\n}\n\n/**\r\n * @license\r\n * Copyright 2017 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * The persistence provider included with the full Firestore SDK.\r\n */\nclass IndexedDbPersistenceProvider {\n enableIndexedDbPersistence(firestore, forceOwnership) {\n return enableIndexedDbPersistence(firestore._delegate, {\n forceOwnership\n });\n }\n enableMultiTabIndexedDbPersistence(firestore) {\n return enableMultiTabIndexedDbPersistence(firestore._delegate);\n }\n clearIndexedDbPersistence(firestore) {\n return clearIndexedDbPersistence(firestore._delegate);\n }\n}\n/**\r\n * Compat class for Firestore. Exposes Firestore Legacy API, but delegates\r\n * to the functional API of firestore-exp.\r\n */\nclass Firestore {\n constructor(databaseIdOrApp, _delegate, _persistenceProvider) {\n this._delegate = _delegate;\n this._persistenceProvider = _persistenceProvider;\n this.INTERNAL = {\n delete: () => this.terminate()\n };\n if (!(databaseIdOrApp instanceof _DatabaseId)) {\n this._appCompat = databaseIdOrApp;\n }\n }\n get _databaseId() {\n return this._delegate._databaseId;\n }\n settings(settingsLiteral) {\n const currentSettings = this._delegate._getSettings();\n if (!settingsLiteral.merge && currentSettings.host !== settingsLiteral.host) {\n _logWarn('You are overriding the original host. If you did not intend ' + 'to override your settings, use {merge: true}.');\n }\n if (settingsLiteral.merge) {\n settingsLiteral = Object.assign(Object.assign({}, currentSettings), settingsLiteral);\n // Remove the property from the settings once the merge is completed\n delete settingsLiteral.merge;\n }\n this._delegate._setSettings(settingsLiteral);\n }\n useEmulator(host, port, options = {}) {\n connectFirestoreEmulator(this._delegate, host, port, options);\n }\n enableNetwork() {\n return enableNetwork(this._delegate);\n }\n disableNetwork() {\n return disableNetwork(this._delegate);\n }\n enablePersistence(settings) {\n let synchronizeTabs = false;\n let experimentalForceOwningTab = false;\n if (settings) {\n synchronizeTabs = !!settings.synchronizeTabs;\n experimentalForceOwningTab = !!settings.experimentalForceOwningTab;\n _validateIsNotUsedTogether('synchronizeTabs', synchronizeTabs, 'experimentalForceOwningTab', experimentalForceOwningTab);\n }\n return synchronizeTabs ? this._persistenceProvider.enableMultiTabIndexedDbPersistence(this) : this._persistenceProvider.enableIndexedDbPersistence(this, experimentalForceOwningTab);\n }\n clearPersistence() {\n return this._persistenceProvider.clearIndexedDbPersistence(this);\n }\n terminate() {\n if (this._appCompat) {\n this._appCompat._removeServiceInstance('firestore-compat');\n this._appCompat._removeServiceInstance('firestore');\n }\n return this._delegate._delete();\n }\n waitForPendingWrites() {\n return waitForPendingWrites(this._delegate);\n }\n onSnapshotsInSync(arg) {\n return onSnapshotsInSync(this._delegate, arg);\n }\n get app() {\n if (!this._appCompat) {\n throw new FirestoreError('failed-precondition', \"Firestore was not initialized using the Firebase SDK. 'app' is \" + 'not available');\n }\n return this._appCompat;\n }\n collection(pathString) {\n try {\n return new CollectionReference(this, collection(this._delegate, pathString));\n } catch (e) {\n throw replaceFunctionName(e, 'collection()', 'Firestore.collection()');\n }\n }\n doc(pathString) {\n try {\n return new DocumentReference(this, doc(this._delegate, pathString));\n } catch (e) {\n throw replaceFunctionName(e, 'doc()', 'Firestore.doc()');\n }\n }\n collectionGroup(collectionId) {\n try {\n return new Query(this, collectionGroup(this._delegate, collectionId));\n } catch (e) {\n throw replaceFunctionName(e, 'collectionGroup()', 'Firestore.collectionGroup()');\n }\n }\n runTransaction(updateFunction) {\n return runTransaction(this._delegate, transaction => updateFunction(new Transaction(this, transaction)));\n }\n batch() {\n ensureFirestoreConfigured(this._delegate);\n return new WriteBatch(new WriteBatch$1(this._delegate, mutations => executeWrite(this._delegate, mutations)));\n }\n loadBundle(bundleData) {\n return loadBundle(this._delegate, bundleData);\n }\n namedQuery(name) {\n return namedQuery(this._delegate, name).then(expQuery => {\n if (!expQuery) {\n return null;\n }\n return new Query(this,\n // We can pass `expQuery` here directly since named queries don't have a UserDataConverter.\n // Otherwise, we would have to create a new ExpQuery and pass the old UserDataConverter.\n expQuery);\n });\n }\n}\nclass UserDataWriter extends AbstractUserDataWriter {\n constructor(firestore) {\n super();\n this.firestore = firestore;\n }\n convertBytes(bytes) {\n return new Blob(new Bytes(bytes));\n }\n convertReference(name) {\n const key = this.convertDocumentKey(name, this.firestore._databaseId);\n return DocumentReference.forKey(key, this.firestore, /* converter= */null);\n }\n}\nfunction setLogLevel(level) {\n setLogLevel$1(level);\n}\n/**\r\n * A reference to a transaction.\r\n */\nclass Transaction {\n constructor(_firestore, _delegate) {\n this._firestore = _firestore;\n this._delegate = _delegate;\n this._userDataWriter = new UserDataWriter(_firestore);\n }\n get(documentRef) {\n const ref = castReference(documentRef);\n return this._delegate.get(ref).then(result => new DocumentSnapshot(this._firestore, new DocumentSnapshot$1(this._firestore._delegate, this._userDataWriter, result._key, result._document, result.metadata, ref.converter)));\n }\n set(documentRef, data, options) {\n const ref = castReference(documentRef);\n if (options) {\n validateSetOptions('Transaction.set', options);\n this._delegate.set(ref, data, options);\n } else {\n this._delegate.set(ref, data);\n }\n return this;\n }\n update(documentRef, dataOrField, value, ...moreFieldsAndValues) {\n const ref = castReference(documentRef);\n if (arguments.length === 2) {\n this._delegate.update(ref, dataOrField);\n } else {\n this._delegate.update(ref, dataOrField, value, ...moreFieldsAndValues);\n }\n return this;\n }\n delete(documentRef) {\n const ref = castReference(documentRef);\n this._delegate.delete(ref);\n return this;\n }\n}\nclass WriteBatch {\n constructor(_delegate) {\n this._delegate = _delegate;\n }\n set(documentRef, data, options) {\n const ref = castReference(documentRef);\n if (options) {\n validateSetOptions('WriteBatch.set', options);\n this._delegate.set(ref, data, options);\n } else {\n this._delegate.set(ref, data);\n }\n return this;\n }\n update(documentRef, dataOrField, value, ...moreFieldsAndValues) {\n const ref = castReference(documentRef);\n if (arguments.length === 2) {\n this._delegate.update(ref, dataOrField);\n } else {\n this._delegate.update(ref, dataOrField, value, ...moreFieldsAndValues);\n }\n return this;\n }\n delete(documentRef) {\n const ref = castReference(documentRef);\n this._delegate.delete(ref);\n return this;\n }\n commit() {\n return this._delegate.commit();\n }\n}\n/**\r\n * Wraps a `PublicFirestoreDataConverter` translating the types from the\r\n * experimental SDK into corresponding types from the Classic SDK before passing\r\n * them to the wrapped converter.\r\n */\nclass FirestoreDataConverter {\n constructor(_firestore, _userDataWriter, _delegate) {\n this._firestore = _firestore;\n this._userDataWriter = _userDataWriter;\n this._delegate = _delegate;\n }\n fromFirestore(snapshot, options) {\n const expSnapshot = new QueryDocumentSnapshot$1(this._firestore._delegate, this._userDataWriter, snapshot._key, snapshot._document, snapshot.metadata, /* converter= */null);\n return this._delegate.fromFirestore(new QueryDocumentSnapshot(this._firestore, expSnapshot), options !== null && options !== void 0 ? options : {});\n }\n toFirestore(modelObject, options) {\n if (!options) {\n return this._delegate.toFirestore(modelObject);\n } else {\n return this._delegate.toFirestore(modelObject, options);\n }\n }\n // Use the same instance of `FirestoreDataConverter` for the given instances\n // of `Firestore` and `PublicFirestoreDataConverter` so that isEqual() will\n // compare equal for two objects created with the same converter instance.\n static getInstance(firestore, converter) {\n const converterMapByFirestore = FirestoreDataConverter.INSTANCES;\n let untypedConverterByConverter = converterMapByFirestore.get(firestore);\n if (!untypedConverterByConverter) {\n untypedConverterByConverter = new WeakMap();\n converterMapByFirestore.set(firestore, untypedConverterByConverter);\n }\n let instance = untypedConverterByConverter.get(converter);\n if (!instance) {\n instance = new FirestoreDataConverter(firestore, new UserDataWriter(firestore), converter);\n untypedConverterByConverter.set(converter, instance);\n }\n return instance;\n }\n}\nFirestoreDataConverter.INSTANCES = new WeakMap();\n/**\r\n * A reference to a particular document in a collection in the database.\r\n */\nclass DocumentReference {\n constructor(firestore, _delegate) {\n this.firestore = firestore;\n this._delegate = _delegate;\n this._userDataWriter = new UserDataWriter(firestore);\n }\n static forPath(path, firestore, converter) {\n if (path.length % 2 !== 0) {\n throw new FirestoreError('invalid-argument', 'Invalid document reference. Document ' + 'references must have an even number of segments, but ' + `${path.canonicalString()} has ${path.length}`);\n }\n return new DocumentReference(firestore, new DocumentReference$1(firestore._delegate, converter, new _DocumentKey(path)));\n }\n static forKey(key, firestore, converter) {\n return new DocumentReference(firestore, new DocumentReference$1(firestore._delegate, converter, key));\n }\n get id() {\n return this._delegate.id;\n }\n get parent() {\n return new CollectionReference(this.firestore, this._delegate.parent);\n }\n get path() {\n return this._delegate.path;\n }\n collection(pathString) {\n try {\n return new CollectionReference(this.firestore, collection(this._delegate, pathString));\n } catch (e) {\n throw replaceFunctionName(e, 'collection()', 'DocumentReference.collection()');\n }\n }\n isEqual(other) {\n other = getModularInstance(other);\n if (!(other instanceof DocumentReference$1)) {\n return false;\n }\n return refEqual(this._delegate, other);\n }\n set(value, options) {\n options = validateSetOptions('DocumentReference.set', options);\n try {\n if (options) {\n return setDoc(this._delegate, value, options);\n } else {\n return setDoc(this._delegate, value);\n }\n } catch (e) {\n throw replaceFunctionName(e, 'setDoc()', 'DocumentReference.set()');\n }\n }\n update(fieldOrUpdateData, value, ...moreFieldsAndValues) {\n try {\n if (arguments.length === 1) {\n return updateDoc(this._delegate, fieldOrUpdateData);\n } else {\n return updateDoc(this._delegate, fieldOrUpdateData, value, ...moreFieldsAndValues);\n }\n } catch (e) {\n throw replaceFunctionName(e, 'updateDoc()', 'DocumentReference.update()');\n }\n }\n delete() {\n return deleteDoc(this._delegate);\n }\n onSnapshot(...args) {\n const options = extractSnapshotOptions(args);\n const observer = wrapObserver(args, result => new DocumentSnapshot(this.firestore, new DocumentSnapshot$1(this.firestore._delegate, this._userDataWriter, result._key, result._document, result.metadata, this._delegate.converter)));\n return onSnapshot(this._delegate, options, observer);\n }\n get(options) {\n let snap;\n if ((options === null || options === void 0 ? void 0 : options.source) === 'cache') {\n snap = getDocFromCache(this._delegate);\n } else if ((options === null || options === void 0 ? void 0 : options.source) === 'server') {\n snap = getDocFromServer(this._delegate);\n } else {\n snap = getDoc(this._delegate);\n }\n return snap.then(result => new DocumentSnapshot(this.firestore, new DocumentSnapshot$1(this.firestore._delegate, this._userDataWriter, result._key, result._document, result.metadata, this._delegate.converter)));\n }\n withConverter(converter) {\n return new DocumentReference(this.firestore, converter ? this._delegate.withConverter(FirestoreDataConverter.getInstance(this.firestore, converter)) : this._delegate.withConverter(null));\n }\n}\n/**\r\n * Replaces the function name in an error thrown by the firestore-exp API\r\n * with the function names used in the classic API.\r\n */\nfunction replaceFunctionName(e, original, updated) {\n e.message = e.message.replace(original, updated);\n return e;\n}\n/**\r\n * Iterates the list of arguments from an `onSnapshot` call and returns the\r\n * first argument that may be an `SnapshotListenOptions` object. Returns an\r\n * empty object if none is found.\r\n */\nfunction extractSnapshotOptions(args) {\n for (const arg of args) {\n if (typeof arg === 'object' && !isPartialObserver(arg)) {\n return arg;\n }\n }\n return {};\n}\n/**\r\n * Creates an observer that can be passed to the firestore-exp SDK. The\r\n * observer converts all observed values into the format expected by the classic\r\n * SDK.\r\n *\r\n * @param args - The list of arguments from an `onSnapshot` call.\r\n * @param wrapper - The function that converts the firestore-exp type into the\r\n * type used by this shim.\r\n */\nfunction wrapObserver(args, wrapper) {\n var _a, _b;\n let userObserver;\n if (isPartialObserver(args[0])) {\n userObserver = args[0];\n } else if (isPartialObserver(args[1])) {\n userObserver = args[1];\n } else if (typeof args[0] === 'function') {\n userObserver = {\n next: args[0],\n error: args[1],\n complete: args[2]\n };\n } else {\n userObserver = {\n next: args[1],\n error: args[2],\n complete: args[3]\n };\n }\n return {\n next: val => {\n if (userObserver.next) {\n userObserver.next(wrapper(val));\n }\n },\n error: (_a = userObserver.error) === null || _a === void 0 ? void 0 : _a.bind(userObserver),\n complete: (_b = userObserver.complete) === null || _b === void 0 ? void 0 : _b.bind(userObserver)\n };\n}\nclass DocumentSnapshot {\n constructor(_firestore, _delegate) {\n this._firestore = _firestore;\n this._delegate = _delegate;\n }\n get ref() {\n return new DocumentReference(this._firestore, this._delegate.ref);\n }\n get id() {\n return this._delegate.id;\n }\n get metadata() {\n return this._delegate.metadata;\n }\n get exists() {\n return this._delegate.exists();\n }\n data(options) {\n return this._delegate.data(options);\n }\n get(fieldPath, options\n // We are using `any` here to avoid an explicit cast by our users.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ) {\n return this._delegate.get(fieldPath, options);\n }\n isEqual(other) {\n return snapshotEqual(this._delegate, other._delegate);\n }\n}\nclass QueryDocumentSnapshot extends DocumentSnapshot {\n data(options) {\n const data = this._delegate.data(options);\n if (this._delegate._converter) {\n // Undefined is a possible valid value from converter.\n return data;\n } else {\n _debugAssert(data !== undefined, 'Document in a QueryDocumentSnapshot should exist');\n return data;\n }\n }\n}\nclass Query {\n constructor(firestore, _delegate) {\n this.firestore = firestore;\n this._delegate = _delegate;\n this._userDataWriter = new UserDataWriter(firestore);\n }\n where(fieldPath, opStr, value) {\n try {\n // The \"as string\" cast is a little bit of a hack. `where` accepts the\n // FieldPath Compat type as input, but is not typed as such in order to\n // not expose this via our public typings file.\n return new Query(this.firestore, query(this._delegate, where(fieldPath, opStr, value)));\n } catch (e) {\n throw replaceFunctionName(e, /(orderBy|where)\\(\\)/, 'Query.$1()');\n }\n }\n orderBy(fieldPath, directionStr) {\n try {\n // The \"as string\" cast is a little bit of a hack. `orderBy` accepts the\n // FieldPath Compat type as input, but is not typed as such in order to\n // not expose this via our public typings file.\n return new Query(this.firestore, query(this._delegate, orderBy(fieldPath, directionStr)));\n } catch (e) {\n throw replaceFunctionName(e, /(orderBy|where)\\(\\)/, 'Query.$1()');\n }\n }\n limit(n) {\n try {\n return new Query(this.firestore, query(this._delegate, limit(n)));\n } catch (e) {\n throw replaceFunctionName(e, 'limit()', 'Query.limit()');\n }\n }\n limitToLast(n) {\n try {\n return new Query(this.firestore, query(this._delegate, limitToLast(n)));\n } catch (e) {\n throw replaceFunctionName(e, 'limitToLast()', 'Query.limitToLast()');\n }\n }\n startAt(...args) {\n try {\n return new Query(this.firestore, query(this._delegate, startAt(...args)));\n } catch (e) {\n throw replaceFunctionName(e, 'startAt()', 'Query.startAt()');\n }\n }\n startAfter(...args) {\n try {\n return new Query(this.firestore, query(this._delegate, startAfter(...args)));\n } catch (e) {\n throw replaceFunctionName(e, 'startAfter()', 'Query.startAfter()');\n }\n }\n endBefore(...args) {\n try {\n return new Query(this.firestore, query(this._delegate, endBefore(...args)));\n } catch (e) {\n throw replaceFunctionName(e, 'endBefore()', 'Query.endBefore()');\n }\n }\n endAt(...args) {\n try {\n return new Query(this.firestore, query(this._delegate, endAt(...args)));\n } catch (e) {\n throw replaceFunctionName(e, 'endAt()', 'Query.endAt()');\n }\n }\n isEqual(other) {\n return queryEqual(this._delegate, other._delegate);\n }\n get(options) {\n let query;\n if ((options === null || options === void 0 ? void 0 : options.source) === 'cache') {\n query = getDocsFromCache(this._delegate);\n } else if ((options === null || options === void 0 ? void 0 : options.source) === 'server') {\n query = getDocsFromServer(this._delegate);\n } else {\n query = getDocs(this._delegate);\n }\n return query.then(result => new QuerySnapshot(this.firestore, new QuerySnapshot$1(this.firestore._delegate, this._userDataWriter, this._delegate, result._snapshot)));\n }\n onSnapshot(...args) {\n const options = extractSnapshotOptions(args);\n const observer = wrapObserver(args, snap => new QuerySnapshot(this.firestore, new QuerySnapshot$1(this.firestore._delegate, this._userDataWriter, this._delegate, snap._snapshot)));\n return onSnapshot(this._delegate, options, observer);\n }\n withConverter(converter) {\n return new Query(this.firestore, converter ? this._delegate.withConverter(FirestoreDataConverter.getInstance(this.firestore, converter)) : this._delegate.withConverter(null));\n }\n}\nclass DocumentChange {\n constructor(_firestore, _delegate) {\n this._firestore = _firestore;\n this._delegate = _delegate;\n }\n get type() {\n return this._delegate.type;\n }\n get doc() {\n return new QueryDocumentSnapshot(this._firestore, this._delegate.doc);\n }\n get oldIndex() {\n return this._delegate.oldIndex;\n }\n get newIndex() {\n return this._delegate.newIndex;\n }\n}\nclass QuerySnapshot {\n constructor(_firestore, _delegate) {\n this._firestore = _firestore;\n this._delegate = _delegate;\n }\n get query() {\n return new Query(this._firestore, this._delegate.query);\n }\n get metadata() {\n return this._delegate.metadata;\n }\n get size() {\n return this._delegate.size;\n }\n get empty() {\n return this._delegate.empty;\n }\n get docs() {\n return this._delegate.docs.map(doc => new QueryDocumentSnapshot(this._firestore, doc));\n }\n docChanges(options) {\n return this._delegate.docChanges(options).map(docChange => new DocumentChange(this._firestore, docChange));\n }\n forEach(callback, thisArg) {\n this._delegate.forEach(snapshot => {\n callback.call(thisArg, new QueryDocumentSnapshot(this._firestore, snapshot));\n });\n }\n isEqual(other) {\n return snapshotEqual(this._delegate, other._delegate);\n }\n}\nclass CollectionReference extends Query {\n constructor(firestore, _delegate) {\n super(firestore, _delegate);\n this.firestore = firestore;\n this._delegate = _delegate;\n }\n get id() {\n return this._delegate.id;\n }\n get path() {\n return this._delegate.path;\n }\n get parent() {\n const docRef = this._delegate.parent;\n return docRef ? new DocumentReference(this.firestore, docRef) : null;\n }\n doc(documentPath) {\n try {\n if (documentPath === undefined) {\n // Call `doc` without `documentPath` if `documentPath` is `undefined`\n // as `doc` validates the number of arguments to prevent users from\n // accidentally passing `undefined`.\n return new DocumentReference(this.firestore, doc(this._delegate));\n } else {\n return new DocumentReference(this.firestore, doc(this._delegate, documentPath));\n }\n } catch (e) {\n throw replaceFunctionName(e, 'doc()', 'CollectionReference.doc()');\n }\n }\n add(data) {\n return addDoc(this._delegate, data).then(docRef => new DocumentReference(this.firestore, docRef));\n }\n isEqual(other) {\n return refEqual(this._delegate, other._delegate);\n }\n withConverter(converter) {\n return new CollectionReference(this.firestore, converter ? this._delegate.withConverter(FirestoreDataConverter.getInstance(this.firestore, converter)) : this._delegate.withConverter(null));\n }\n}\nfunction castReference(documentRef) {\n return _cast(documentRef, DocumentReference$1);\n}\n\n/**\r\n * @license\r\n * Copyright 2017 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n// The objects that are a part of this API are exposed to third-parties as\n// compiled javascript so we want to flag our private members with a leading\n// underscore to discourage their use.\n/**\r\n * A `FieldPath` refers to a field in a document. The path may consist of a\r\n * single field name (referring to a top-level field in the document), or a list\r\n * of field names (referring to a nested field in the document).\r\n */\nclass FieldPath {\n /**\r\n * Creates a FieldPath from the provided field names. If more than one field\r\n * name is provided, the path will point to a nested field in a document.\r\n *\r\n * @param fieldNames - A list of field names.\r\n */\n constructor(...fieldNames) {\n this._delegate = new FieldPath$1(...fieldNames);\n }\n static documentId() {\n /**\r\n * Internal Note: The backend doesn't technically support querying by\r\n * document ID. Instead it queries by the entire document name (full path\r\n * included), but in the cases we currently support documentId(), the net\r\n * effect is the same.\r\n */\n return new FieldPath(_FieldPath.keyField().canonicalString());\n }\n isEqual(other) {\n other = getModularInstance(other);\n if (!(other instanceof FieldPath$1)) {\n return false;\n }\n return this._delegate._internalPath.isEqual(other._internalPath);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2017 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass FieldValue {\n constructor(_delegate) {\n this._delegate = _delegate;\n }\n static serverTimestamp() {\n const delegate = serverTimestamp();\n delegate._methodName = 'FieldValue.serverTimestamp';\n return new FieldValue(delegate);\n }\n static delete() {\n const delegate = deleteField();\n delegate._methodName = 'FieldValue.delete';\n return new FieldValue(delegate);\n }\n static arrayUnion(...elements) {\n const delegate = arrayUnion(...elements);\n delegate._methodName = 'FieldValue.arrayUnion';\n return new FieldValue(delegate);\n }\n static arrayRemove(...elements) {\n const delegate = arrayRemove(...elements);\n delegate._methodName = 'FieldValue.arrayRemove';\n return new FieldValue(delegate);\n }\n static increment(n) {\n const delegate = increment(n);\n delegate._methodName = 'FieldValue.increment';\n return new FieldValue(delegate);\n }\n isEqual(other) {\n return this._delegate.isEqual(other._delegate);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2021 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst firestoreNamespace = {\n Firestore,\n GeoPoint,\n Timestamp,\n Blob,\n Transaction,\n WriteBatch,\n DocumentReference,\n DocumentSnapshot,\n Query,\n QueryDocumentSnapshot,\n QuerySnapshot,\n CollectionReference,\n FieldPath,\n FieldValue,\n setLogLevel,\n CACHE_SIZE_UNLIMITED\n};\n/**\r\n * Configures Firestore as part of the Firebase SDK by calling registerComponent.\r\n *\r\n * @param firebase - The FirebaseNamespace to register Firestore with\r\n * @param firestoreFactory - A factory function that returns a new Firestore\r\n * instance.\r\n */\nfunction configureForFirebase(firebase, firestoreFactory) {\n firebase.INTERNAL.registerComponent(new Component('firestore-compat', container => {\n const app = container.getProvider('app-compat').getImmediate();\n const firestoreExp = container.getProvider('firestore').getImmediate();\n return firestoreFactory(app, firestoreExp);\n }, 'PUBLIC').setServiceProps(Object.assign({}, firestoreNamespace)));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Registers the main Firestore build with the components framework.\r\n * Persistence can be enabled via `firebase.firestore().enablePersistence()`.\r\n */\nfunction registerFirestore(instance) {\n configureForFirebase(instance, (app, firestoreExp) => new Firestore(app, firestoreExp, new IndexedDbPersistenceProvider()));\n instance.registerVersion(name, version);\n}\nregisterFirestore(firebase);\nexport { registerFirestore };\n","import '@firebase/firestore-compat';\n","import { isPlatformServer } from '@angular/common';\nimport * as i0 from '@angular/core';\nimport { InjectionToken, PLATFORM_ID, Injectable, Inject, Optional, NgModule } from '@angular/core';\nimport * as i1 from '@angular/fire';\nimport { keepUnstableUntilFirst, VERSION } from '@angular/fire';\nimport * as i3 from '@angular/fire/app-check';\nimport { ɵfirebaseAppFactory as _firebaseAppFactory, ɵcacheInstance as _cacheInstance, FIREBASE_OPTIONS, FIREBASE_APP_NAME } from '@angular/fire/compat';\nimport * as i2 from '@angular/fire/compat/auth';\nimport { ɵauthFactory as _authFactory, USE_EMULATOR as USE_EMULATOR$1, SETTINGS as SETTINGS$1, TENANT_ID, LANGUAGE_CODE, USE_DEVICE_LANGUAGE, PERSISTENCE } from '@angular/fire/compat/auth';\nimport { Observable, asyncScheduler, from, of } from 'rxjs';\nimport { startWith, pairwise, map, scan, distinctUntilChanged, filter } from 'rxjs/operators';\nimport 'firebase/compat/auth';\nimport 'firebase/compat/firestore';\nimport firebase from 'firebase/compat/app';\nfunction _fromRef(ref, scheduler = asyncScheduler) {\n return new Observable(subscriber => {\n let unsubscribe;\n if (scheduler != null) {\n scheduler.schedule(() => {\n unsubscribe = ref.onSnapshot({\n includeMetadataChanges: true\n }, subscriber);\n });\n } else {\n unsubscribe = ref.onSnapshot({\n includeMetadataChanges: true\n }, subscriber);\n }\n return () => {\n if (unsubscribe != null) {\n unsubscribe();\n }\n };\n });\n}\nfunction fromRef(ref, scheduler) {\n return _fromRef(ref, scheduler);\n}\nfunction fromDocRef(ref, scheduler) {\n return fromRef(ref, scheduler).pipe(startWith(undefined), pairwise(), map(snapshots => {\n const [priorPayload, payload] = snapshots;\n if (!payload.exists) {\n return {\n payload,\n type: 'removed'\n };\n }\n if (!priorPayload?.exists) {\n return {\n payload,\n type: 'added'\n };\n }\n return {\n payload,\n type: 'modified'\n };\n }));\n}\nfunction fromCollectionRef(ref, scheduler) {\n return fromRef(ref, scheduler).pipe(map(payload => ({\n payload,\n type: 'query'\n })));\n}\n\n/**\n * AngularFirestoreDocument service\n *\n * This class creates a reference to a Firestore Document. A reference is provided in\n * in the constructor. The class is generic which gives you type safety for data update\n * methods and data streaming.\n *\n * This class uses Symbol.observable to transform into Observable using Observable.from().\n *\n * This class is rarely used directly and should be created from the AngularFirestore service.\n *\n * Example:\n *\n * const fakeStock = new AngularFirestoreDocument(doc('stocks/FAKE'));\n * await fakeStock.set({ name: 'FAKE', price: 0.01 });\n * fakeStock.valueChanges().map(snap => {\n * if(snap.exists) return snap.data();\n * return null;\n * }).subscribe(value => console.log(value));\n * // OR! Transform using Observable.from() and the data is unwrapped for you\n * Observable.from(fakeStock).subscribe(value => console.log(value));\n */\nclass AngularFirestoreDocument {\n ref;\n afs;\n /**\n * The constructor takes in a DocumentReference to provide wrapper methods\n * for data operations, data streaming, and Symbol.observable.\n */\n constructor(ref, afs) {\n this.ref = ref;\n this.afs = afs;\n }\n /**\n * Create or overwrite a single document.\n */\n set(data, options) {\n return this.ref.set(data, options);\n }\n /**\n * Update some fields of a document without overwriting the entire document.\n */\n update(data) {\n return this.ref.update(data);\n }\n /**\n * Delete a document.\n */\n delete() {\n return this.ref.delete();\n }\n /**\n * Create a reference to a sub-collection given a path and an optional query\n * function.\n */\n collection(path, queryFn) {\n const collectionRef = this.ref.collection(path);\n const {\n ref,\n query\n } = associateQuery(collectionRef, queryFn);\n return new AngularFirestoreCollection(ref, query, this.afs);\n }\n /**\n * Listen to snapshot updates from the document.\n */\n snapshotChanges() {\n const scheduledFromDocRef$ = fromDocRef(this.ref, this.afs.schedulers.outsideAngular);\n return scheduledFromDocRef$.pipe(keepUnstableUntilFirst);\n }\n valueChanges(options = {}) {\n return this.snapshotChanges().pipe(map(({\n payload\n }) => options.idField ? {\n ...payload.data(),\n ...{\n [options.idField]: payload.id\n }\n } : payload.data()));\n }\n /**\n * Retrieve the document once.\n */\n get(options) {\n return from(this.ref.get(options)).pipe(keepUnstableUntilFirst);\n }\n}\n\n/**\n * Return a stream of document changes on a query. These results are not in sort order but in\n * order of occurence.\n */\nfunction docChanges(query, scheduler) {\n return fromCollectionRef(query, scheduler).pipe(startWith(undefined), pairwise(), map(actionTuple => {\n const [priorAction, action] = actionTuple;\n const docChanges = action.payload.docChanges();\n const actions = docChanges.map(change => ({\n type: change.type,\n payload: change\n }));\n // the metadata has changed from the prior emission\n if (priorAction && JSON.stringify(priorAction.payload.metadata) !== JSON.stringify(action.payload.metadata)) {\n // go through all the docs in payload and figure out which ones changed\n action.payload.docs.forEach((currentDoc, currentIndex) => {\n const docChange = docChanges.find(d => d.doc.ref.isEqual(currentDoc.ref));\n const priorDoc = priorAction?.payload.docs.find(d => d.ref.isEqual(currentDoc.ref));\n if (docChange && JSON.stringify(docChange.doc.metadata) === JSON.stringify(currentDoc.metadata) || !docChange && priorDoc && JSON.stringify(priorDoc.metadata) === JSON.stringify(currentDoc.metadata)) {\n // document doesn't appear to have changed, don't log another action\n } else {\n // since the actions are processed in order just push onto the array\n actions.push({\n type: 'modified',\n payload: {\n oldIndex: currentIndex,\n newIndex: currentIndex,\n type: 'modified',\n doc: currentDoc\n }\n });\n }\n });\n }\n return actions;\n }));\n}\n/**\n * Return a stream of document changes on a query. These results are in sort order.\n */\nfunction sortedChanges(query, events, scheduler) {\n return docChanges(query, scheduler).pipe(scan((current, changes) => combineChanges(current, changes.map(it => it.payload), events), []), distinctUntilChanged(),\n // cut down on unneed change cycles\n map(changes => changes.map(c => ({\n type: c.type,\n payload: c\n }))));\n}\n/**\n * Combines the total result set from the current set of changes from an incoming set\n * of changes.\n */\nfunction combineChanges(current, changes, events) {\n changes.forEach(change => {\n // skip unwanted change types\n if (events.indexOf(change.type) > -1) {\n current = combineChange(current, change);\n }\n });\n return current;\n}\n/**\n * Splice arguments on top of a sliced array, to break top-level ===\n * this is useful for change-detection\n */\nfunction sliceAndSplice(original, start, deleteCount, ...args) {\n const returnArray = original.slice();\n returnArray.splice(start, deleteCount, ...args);\n return returnArray;\n}\n/**\n * Creates a new sorted array from a new change.\n * Build our own because we allow filtering of action types ('added', 'removed', 'modified') before scanning\n * and so we have greater control over change detection (by breaking ===)\n */\nfunction combineChange(combined, change) {\n switch (change.type) {\n case 'added':\n if (combined[change.newIndex] && combined[change.newIndex].doc.ref.isEqual(change.doc.ref)) {\n // Not sure why the duplicates are getting fired\n } else {\n return sliceAndSplice(combined, change.newIndex, 0, change);\n }\n break;\n case 'modified':\n if (combined[change.oldIndex] == null || combined[change.oldIndex].doc.ref.isEqual(change.doc.ref)) {\n // When an item changes position we first remove it\n // and then add it's new position\n if (change.oldIndex !== change.newIndex) {\n const copiedArray = combined.slice();\n copiedArray.splice(change.oldIndex, 1);\n copiedArray.splice(change.newIndex, 0, change);\n return copiedArray;\n } else {\n return sliceAndSplice(combined, change.newIndex, 1, change);\n }\n }\n break;\n case 'removed':\n if (combined[change.oldIndex] && combined[change.oldIndex].doc.ref.isEqual(change.doc.ref)) {\n return sliceAndSplice(combined, change.oldIndex, 1);\n }\n break;\n }\n return combined;\n}\nfunction validateEventsArray(events) {\n if (!events || events.length === 0) {\n events = ['added', 'removed', 'modified'];\n }\n return events;\n}\n/**\n * AngularFirestoreCollection service\n *\n * This class creates a reference to a Firestore Collection. A reference and a query are provided in\n * in the constructor. The query can be the unqueried reference if no query is desired.The class\n * is generic which gives you type safety for data update methods and data streaming.\n *\n * This class uses Symbol.observable to transform into Observable using Observable.from().\n *\n * This class is rarely used directly and should be created from the AngularFirestore service.\n *\n * Example:\n *\n * const collectionRef = firebase.firestore.collection('stocks');\n * const query = collectionRef.where('price', '>', '0.01');\n * const fakeStock = new AngularFirestoreCollection(collectionRef, query);\n *\n * // NOTE!: the updates are performed on the reference not the query\n * await fakeStock.add({ name: 'FAKE', price: 0.01 });\n *\n * // Subscribe to changes as snapshots. This provides you data updates as well as delta updates.\n * fakeStock.valueChanges().subscribe(value => console.log(value));\n */\nclass AngularFirestoreCollection {\n ref;\n query;\n afs;\n /**\n * The constructor takes in a CollectionReference and Query to provide wrapper methods\n * for data operations and data streaming.\n *\n * Note: Data operation methods are done on the reference not the query. This means\n * when you update data it is not updating data to the window of your query unless\n * the data fits the criteria of the query. See the AssociatedRefence type for details\n * on this implication.\n */\n constructor(ref, query, afs) {\n this.ref = ref;\n this.query = query;\n this.afs = afs;\n }\n /**\n * Listen to the latest change in the stream. This method returns changes\n * as they occur and they are not sorted by query order. This allows you to construct\n * your own data structure.\n */\n stateChanges(events) {\n let source = docChanges(this.query, this.afs.schedulers.outsideAngular);\n if (events && events.length > 0) {\n source = source.pipe(map(actions => actions.filter(change => events.indexOf(change.type) > -1)));\n }\n return source.pipe(\n // We want to filter out empty arrays, but always emit at first, so the developer knows\n // that the collection has been resolve; even if it's empty\n startWith(undefined), pairwise(), filter(([prior, current]) => current.length > 0 || !prior), map(([, current]) => current), keepUnstableUntilFirst);\n }\n /**\n * Create a stream of changes as they occur it time. This method is similar to stateChanges()\n * but it collects each event in an array over time.\n */\n auditTrail(events) {\n return this.stateChanges(events).pipe(scan((current, action) => [...current, ...action], []));\n }\n /**\n * Create a stream of synchronized changes. This method keeps the local array in sorted\n * query order.\n */\n snapshotChanges(events) {\n const validatedEvents = validateEventsArray(events);\n const scheduledSortedChanges$ = sortedChanges(this.query, validatedEvents, this.afs.schedulers.outsideAngular);\n return scheduledSortedChanges$.pipe(keepUnstableUntilFirst);\n }\n valueChanges(options = {}) {\n return fromCollectionRef(this.query, this.afs.schedulers.outsideAngular).pipe(map(actions => actions.payload.docs.map(a => {\n if (options.idField) {\n return {\n ...a.data(),\n ...{\n [options.idField]: a.id\n }\n };\n } else {\n return a.data();\n }\n })), keepUnstableUntilFirst);\n }\n /**\n * Retrieve the results of the query once.\n */\n get(options) {\n return from(this.query.get(options)).pipe(keepUnstableUntilFirst);\n }\n /**\n * Add data to a collection reference.\n *\n * Note: Data operation methods are done on the reference not the query. This means\n * when you update data it is not updating data to the window of your query unless\n * the data fits the criteria of the query.\n */\n add(data) {\n return this.ref.add(data);\n }\n /**\n * Create a reference to a single document in a collection.\n */\n doc(path) {\n // TODO is there a better way to solve this type issue\n return new AngularFirestoreDocument(this.ref.doc(path), this.afs);\n }\n}\n\n/**\n * AngularFirestoreCollectionGroup service\n *\n * This class holds a reference to a Firestore Collection Group Query.\n *\n * This class uses Symbol.observable to transform into Observable using Observable.from().\n *\n * This class is rarely used directly and should be created from the AngularFirestore service.\n *\n * Example:\n *\n * const collectionGroup = firebase.firestore.collectionGroup('stocks');\n * const query = collectionRef.where('price', '>', '0.01');\n * const fakeStock = new AngularFirestoreCollectionGroup(query, afs);\n *\n * // Subscribe to changes as snapshots. This provides you data updates as well as delta updates.\n * fakeStock.valueChanges().subscribe(value => console.log(value));\n */\nclass AngularFirestoreCollectionGroup {\n query;\n afs;\n /**\n * The constructor takes in a CollectionGroupQuery to provide wrapper methods\n * for data operations and data streaming.\n */\n constructor(query, afs) {\n this.query = query;\n this.afs = afs;\n }\n /**\n * Listen to the latest change in the stream. This method returns changes\n * as they occur and they are not sorted by query order. This allows you to construct\n * your own data structure.\n */\n stateChanges(events) {\n if (!events || events.length === 0) {\n return docChanges(this.query, this.afs.schedulers.outsideAngular).pipe(keepUnstableUntilFirst);\n }\n return docChanges(this.query, this.afs.schedulers.outsideAngular).pipe(map(actions => actions.filter(change => events.indexOf(change.type) > -1)), filter(changes => changes.length > 0), keepUnstableUntilFirst);\n }\n /**\n * Create a stream of changes as they occur it time. This method is similar to stateChanges()\n * but it collects each event in an array over time.\n */\n auditTrail(events) {\n return this.stateChanges(events).pipe(scan((current, action) => [...current, ...action], []));\n }\n /**\n * Create a stream of synchronized changes. This method keeps the local array in sorted\n * query order.\n */\n snapshotChanges(events) {\n const validatedEvents = validateEventsArray(events);\n const scheduledSortedChanges$ = sortedChanges(this.query, validatedEvents, this.afs.schedulers.outsideAngular);\n return scheduledSortedChanges$.pipe(keepUnstableUntilFirst);\n }\n valueChanges(options = {}) {\n const fromCollectionRefScheduled$ = fromCollectionRef(this.query, this.afs.schedulers.outsideAngular);\n return fromCollectionRefScheduled$.pipe(map(actions => actions.payload.docs.map(a => {\n if (options.idField) {\n return {\n [options.idField]: a.id,\n ...a.data()\n };\n } else {\n return a.data();\n }\n })), keepUnstableUntilFirst);\n }\n /**\n * Retrieve the results of the query once.\n */\n get(options) {\n return from(this.query.get(options)).pipe(keepUnstableUntilFirst);\n }\n}\n\n/**\n * The value of this token determines whether or not the firestore will have persistance enabled\n */\nconst ENABLE_PERSISTENCE = /*#__PURE__*/new InjectionToken('angularfire2.enableFirestorePersistence');\nconst PERSISTENCE_SETTINGS = /*#__PURE__*/new InjectionToken('angularfire2.firestore.persistenceSettings');\nconst SETTINGS = /*#__PURE__*/new InjectionToken('angularfire2.firestore.settings');\nconst USE_EMULATOR = /*#__PURE__*/new InjectionToken('angularfire2.firestore.use-emulator');\n/**\n * A utility methods for associating a collection reference with\n * a query.\n *\n * @param collectionRef - A collection reference to query\n * @param queryFn - The callback to create a query\n *\n * Example:\n * const { query, ref } = associateQuery(docRef.collection('items'), ref => {\n * return ref.where('age', '<', 200);\n * });\n */\nfunction associateQuery(collectionRef, queryFn = ref => ref) {\n const query = queryFn(collectionRef);\n const ref = collectionRef;\n return {\n query,\n ref\n };\n}\n/**\n * AngularFirestore Service\n *\n * This service is the main entry point for this feature module. It provides\n * an API for creating Collection and Reference services. These services can\n * then be used to do data updates and observable streams of the data.\n *\n * Example:\n *\n * import { Component } from '@angular/core';\n * import { AngularFirestore, AngularFirestoreCollection, AngularFirestoreDocument } from '@angular/fire/firestore';\n * import { Observable } from 'rxjs/Observable';\n * import { from } from 'rxjs/observable';\n *\n * @Component({\n * selector: 'app-my-component',\n * template: `\n * Items for {{ (profile | async)?.name }}\n * \n * \n * \n * Add Item \n *
\n * `\n * })\n * export class MyComponent implements OnInit {\n *\n * // services for data operations and data streaming\n * private readonly itemsRef: AngularFirestoreCollection- ;\n * private readonly profileRef: AngularFirestoreDocument
;\n *\n * // observables for template\n * items: Observable- ;\n * profile: Observable
;\n *\n * // inject main service\n * constructor(private readonly afs: AngularFirestore) {}\n *\n * ngOnInit() {\n * this.itemsRef = afs.collection('items', ref => ref.where('user', '==', 'davideast').limit(10));\n * this.items = this.itemsRef.valueChanges().map(snap => snap.docs.map(data => doc.data()));\n * // this.items = from(this.itemsRef); // you can also do this with no mapping\n *\n * this.profileRef = afs.doc('users/davideast');\n * this.profile = this.profileRef.valueChanges();\n * }\n *\n * addItem(name: string) {\n * const user = 'davideast';\n * this.itemsRef.add({ name, user });\n * }\n * }\n */\nlet AngularFirestore = /*#__PURE__*/(() => {\n class AngularFirestore {\n schedulers;\n firestore;\n persistenceEnabled$;\n /**\n * Each Feature of AngularFire has a FirebaseApp injected. This way we\n * don't rely on the main Firebase App instance and we can create named\n * apps and use multiple apps.\n */\n constructor(options, name, shouldEnablePersistence, settings,\n // eslint-disable-next-line @typescript-eslint/ban-types\n platformId, zone, schedulers, persistenceSettings, _useEmulator, auth, useAuthEmulator, authSettings,\n // can't use firebase.auth.AuthSettings here\n tenantId, languageCode, useDeviceLanguage, persistence, _appCheckInstances) {\n this.schedulers = schedulers;\n const app = _firebaseAppFactory(options, zone, name);\n const useEmulator = _useEmulator;\n if (auth) {\n _authFactory(app, zone, useAuthEmulator, tenantId, languageCode, useDeviceLanguage, authSettings, persistence);\n }\n [this.firestore, this.persistenceEnabled$] = _cacheInstance(`${app.name}.firestore`, 'AngularFirestore', app.name, () => {\n const firestore = zone.runOutsideAngular(() => app.firestore());\n if (settings) {\n firestore.settings(settings);\n }\n if (useEmulator) {\n firestore.useEmulator(...useEmulator);\n }\n if (shouldEnablePersistence && !isPlatformServer(platformId)) {\n // We need to try/catch here because not all enablePersistence() failures are caught\n // https://github.com/firebase/firebase-js-sdk/issues/608\n const enablePersistence = () => {\n try {\n return from(firestore.enablePersistence(persistenceSettings || undefined).then(() => true, () => false));\n } catch (e) {\n if (typeof console !== 'undefined') {\n console.warn(e);\n }\n return of(false);\n }\n };\n return [firestore, zone.runOutsideAngular(enablePersistence)];\n } else {\n return [firestore, of(false)];\n }\n }, [settings, useEmulator, shouldEnablePersistence]);\n }\n collection(pathOrRef, queryFn) {\n let collectionRef;\n if (typeof pathOrRef === 'string') {\n collectionRef = this.firestore.collection(pathOrRef);\n } else {\n collectionRef = pathOrRef;\n }\n const {\n ref,\n query\n } = associateQuery(collectionRef, queryFn);\n const refInZone = this.schedulers.ngZone.run(() => ref);\n return new AngularFirestoreCollection(refInZone, query, this);\n }\n /**\n * Create a reference to a Firestore Collection Group based on a collectionId\n * and an optional query function to narrow the result\n * set.\n */\n collectionGroup(collectionId, queryGroupFn) {\n const queryFn = queryGroupFn || (ref => ref);\n const collectionGroup = this.firestore.collectionGroup(collectionId);\n return new AngularFirestoreCollectionGroup(queryFn(collectionGroup), this);\n }\n doc(pathOrRef) {\n let ref;\n if (typeof pathOrRef === 'string') {\n ref = this.firestore.doc(pathOrRef);\n } else {\n ref = pathOrRef;\n }\n const refInZone = this.schedulers.ngZone.run(() => ref);\n return new AngularFirestoreDocument(refInZone, this);\n }\n /**\n * Returns a generated Firestore Document Id.\n */\n createId() {\n return this.firestore.collection('_').doc().id;\n }\n static ɵfac = function AngularFirestore_Factory(__ngFactoryType__) {\n return new (__ngFactoryType__ || AngularFirestore)(i0.ɵɵinject(FIREBASE_OPTIONS), i0.ɵɵinject(FIREBASE_APP_NAME, 8), i0.ɵɵinject(ENABLE_PERSISTENCE, 8), i0.ɵɵinject(SETTINGS, 8), i0.ɵɵinject(PLATFORM_ID), i0.ɵɵinject(i0.NgZone), i0.ɵɵinject(i1.ɵAngularFireSchedulers), i0.ɵɵinject(PERSISTENCE_SETTINGS, 8), i0.ɵɵinject(USE_EMULATOR, 8), i0.ɵɵinject(i2.AngularFireAuth, 8), i0.ɵɵinject(USE_EMULATOR$1, 8), i0.ɵɵinject(SETTINGS$1, 8), i0.ɵɵinject(TENANT_ID, 8), i0.ɵɵinject(LANGUAGE_CODE, 8), i0.ɵɵinject(USE_DEVICE_LANGUAGE, 8), i0.ɵɵinject(PERSISTENCE, 8), i0.ɵɵinject(i3.AppCheckInstances, 8));\n };\n static ɵprov = /* @__PURE__ */i0.ɵɵdefineInjectable({\n token: AngularFirestore,\n factory: AngularFirestore.ɵfac,\n providedIn: 'any'\n });\n }\n return AngularFirestore;\n})();\n/*#__PURE__*/(() => {\n (typeof ngDevMode === \"undefined\" || ngDevMode) && void 0;\n})();\nlet AngularFirestoreModule = /*#__PURE__*/(() => {\n class AngularFirestoreModule {\n constructor() {\n firebase.registerVersion('angularfire', VERSION.full, 'fst-compat');\n }\n /**\n * Attempt to enable persistent storage, if possible\n */\n static enablePersistence(persistenceSettings) {\n return {\n ngModule: AngularFirestoreModule,\n providers: [{\n provide: ENABLE_PERSISTENCE,\n useValue: true\n }, {\n provide: PERSISTENCE_SETTINGS,\n useValue: persistenceSettings\n }]\n };\n }\n static ɵfac = function AngularFirestoreModule_Factory(__ngFactoryType__) {\n return new (__ngFactoryType__ || AngularFirestoreModule)();\n };\n static ɵmod = /* @__PURE__ */i0.ɵɵdefineNgModule({\n type: AngularFirestoreModule\n });\n static ɵinj = /* @__PURE__ */i0.ɵɵdefineInjector({\n providers: [AngularFirestore]\n });\n }\n return AngularFirestoreModule;\n})();\n/*#__PURE__*/(() => {\n (typeof ngDevMode === \"undefined\" || ngDevMode) && void 0;\n})();\n\n/**\n * Generated bundle index. Do not edit.\n */\n\nexport { AngularFirestore, AngularFirestoreCollection, AngularFirestoreCollectionGroup, AngularFirestoreDocument, AngularFirestoreModule, ENABLE_PERSISTENCE, PERSISTENCE_SETTINGS, SETTINGS, USE_EMULATOR, associateQuery, combineChange, combineChanges, docChanges, fromCollectionRef, fromDocRef, fromRef, sortedChanges, validateEventsArray };\n","import { SDK_VERSION, _isFirebaseServerApp, _getProvider, _registerComponent, registerVersion, getApp } from '@firebase/app';\nimport { ErrorFactory, isBrowserExtension, isMobileCordova, isReactNative, FirebaseError, querystring, getModularInstance, base64Decode, getUA, isIE, createSubscribe, deepEqual, querystringDecode, extractQuerystring, isEmpty, getExperimentalSetting, getDefaultEmulatorHost } from '@firebase/util';\nimport { Logger, LogLevel } from '@firebase/logger';\nimport { __rest } from 'tslib';\nimport { Component } from '@firebase/component';\n\n/**\r\n * @license\r\n * Copyright 2021 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * An enum of factors that may be used for multifactor authentication.\r\n *\r\n * @public\r\n */\nconst FactorId = {\n /** Phone as second factor */\n PHONE: 'phone',\n TOTP: 'totp'\n};\n/**\r\n * Enumeration of supported providers.\r\n *\r\n * @public\r\n */\nconst ProviderId = {\n /** Facebook provider ID */\n FACEBOOK: 'facebook.com',\n /** GitHub provider ID */\n GITHUB: 'github.com',\n /** Google provider ID */\n GOOGLE: 'google.com',\n /** Password provider */\n PASSWORD: 'password',\n /** Phone provider */\n PHONE: 'phone',\n /** Twitter provider ID */\n TWITTER: 'twitter.com'\n};\n/**\r\n * Enumeration of supported sign-in methods.\r\n *\r\n * @public\r\n */\nconst SignInMethod = {\n /** Email link sign in method */\n EMAIL_LINK: 'emailLink',\n /** Email/password sign in method */\n EMAIL_PASSWORD: 'password',\n /** Facebook sign in method */\n FACEBOOK: 'facebook.com',\n /** GitHub sign in method */\n GITHUB: 'github.com',\n /** Google sign in method */\n GOOGLE: 'google.com',\n /** Phone sign in method */\n PHONE: 'phone',\n /** Twitter sign in method */\n TWITTER: 'twitter.com'\n};\n/**\r\n * Enumeration of supported operation types.\r\n *\r\n * @public\r\n */\nconst OperationType = {\n /** Operation involving linking an additional provider to an already signed-in user. */\n LINK: 'link',\n /** Operation involving using a provider to reauthenticate an already signed-in user. */\n REAUTHENTICATE: 'reauthenticate',\n /** Operation involving signing in a user. */\n SIGN_IN: 'signIn'\n};\n/**\r\n * An enumeration of the possible email action types.\r\n *\r\n * @public\r\n */\nconst ActionCodeOperation = {\n /** The email link sign-in action. */\n EMAIL_SIGNIN: 'EMAIL_SIGNIN',\n /** The password reset action. */\n PASSWORD_RESET: 'PASSWORD_RESET',\n /** The email revocation action. */\n RECOVER_EMAIL: 'RECOVER_EMAIL',\n /** The revert second factor addition email action. */\n REVERT_SECOND_FACTOR_ADDITION: 'REVERT_SECOND_FACTOR_ADDITION',\n /** The revert second factor addition email action. */\n VERIFY_AND_CHANGE_EMAIL: 'VERIFY_AND_CHANGE_EMAIL',\n /** The email verification action. */\n VERIFY_EMAIL: 'VERIFY_EMAIL'\n};\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _debugErrorMap() {\n return {\n [\"admin-restricted-operation\" /* AuthErrorCode.ADMIN_ONLY_OPERATION */]: 'This operation is restricted to administrators only.',\n [\"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */]: '',\n [\"app-not-authorized\" /* AuthErrorCode.APP_NOT_AUTHORIZED */]: \"This app, identified by the domain where it's hosted, is not \" + 'authorized to use Firebase Authentication with the provided API key. ' + 'Review your key configuration in the Google API console.',\n [\"app-not-installed\" /* AuthErrorCode.APP_NOT_INSTALLED */]: 'The requested mobile application corresponding to the identifier (' + 'Android package name or iOS bundle ID) provided is not installed on ' + 'this device.',\n [\"captcha-check-failed\" /* AuthErrorCode.CAPTCHA_CHECK_FAILED */]: 'The reCAPTCHA response token provided is either invalid, expired, ' + 'already used or the domain associated with it does not match the list ' + 'of whitelisted domains.',\n [\"code-expired\" /* AuthErrorCode.CODE_EXPIRED */]: 'The SMS code has expired. Please re-send the verification code to try ' + 'again.',\n [\"cordova-not-ready\" /* AuthErrorCode.CORDOVA_NOT_READY */]: 'Cordova framework is not ready.',\n [\"cors-unsupported\" /* AuthErrorCode.CORS_UNSUPPORTED */]: 'This browser is not supported.',\n [\"credential-already-in-use\" /* AuthErrorCode.CREDENTIAL_ALREADY_IN_USE */]: 'This credential is already associated with a different user account.',\n [\"custom-token-mismatch\" /* AuthErrorCode.CREDENTIAL_MISMATCH */]: 'The custom token corresponds to a different audience.',\n [\"requires-recent-login\" /* AuthErrorCode.CREDENTIAL_TOO_OLD_LOGIN_AGAIN */]: 'This operation is sensitive and requires recent authentication. Log in ' + 'again before retrying this request.',\n [\"dependent-sdk-initialized-before-auth\" /* AuthErrorCode.DEPENDENT_SDK_INIT_BEFORE_AUTH */]: 'Another Firebase SDK was initialized and is trying to use Auth before Auth is ' + 'initialized. Please be sure to call `initializeAuth` or `getAuth` before ' + 'starting any other Firebase SDK.',\n [\"dynamic-link-not-activated\" /* AuthErrorCode.DYNAMIC_LINK_NOT_ACTIVATED */]: 'Please activate Dynamic Links in the Firebase Console and agree to the terms and ' + 'conditions.',\n [\"email-change-needs-verification\" /* AuthErrorCode.EMAIL_CHANGE_NEEDS_VERIFICATION */]: 'Multi-factor users must always have a verified email.',\n [\"email-already-in-use\" /* AuthErrorCode.EMAIL_EXISTS */]: 'The email address is already in use by another account.',\n [\"emulator-config-failed\" /* AuthErrorCode.EMULATOR_CONFIG_FAILED */]: 'Auth instance has already been used to make a network call. Auth can ' + 'no longer be configured to use the emulator. Try calling ' + '\"connectAuthEmulator()\" sooner.',\n [\"expired-action-code\" /* AuthErrorCode.EXPIRED_OOB_CODE */]: 'The action code has expired.',\n [\"cancelled-popup-request\" /* AuthErrorCode.EXPIRED_POPUP_REQUEST */]: 'This operation has been cancelled due to another conflicting popup being opened.',\n [\"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */]: 'An internal AuthError has occurred.',\n [\"invalid-app-credential\" /* AuthErrorCode.INVALID_APP_CREDENTIAL */]: 'The phone verification request contains an invalid application verifier.' + ' The reCAPTCHA token response is either invalid or expired.',\n [\"invalid-app-id\" /* AuthErrorCode.INVALID_APP_ID */]: 'The mobile app identifier is not registed for the current project.',\n [\"invalid-user-token\" /* AuthErrorCode.INVALID_AUTH */]: \"This user's credential isn't valid for this project. This can happen \" + \"if the user's token has been tampered with, or if the user isn't for \" + 'the project associated with this API key.',\n [\"invalid-auth-event\" /* AuthErrorCode.INVALID_AUTH_EVENT */]: 'An internal AuthError has occurred.',\n [\"invalid-verification-code\" /* AuthErrorCode.INVALID_CODE */]: 'The SMS verification code used to create the phone auth credential is ' + 'invalid. Please resend the verification code sms and be sure to use the ' + 'verification code provided by the user.',\n [\"invalid-continue-uri\" /* AuthErrorCode.INVALID_CONTINUE_URI */]: 'The continue URL provided in the request is invalid.',\n [\"invalid-cordova-configuration\" /* AuthErrorCode.INVALID_CORDOVA_CONFIGURATION */]: 'The following Cordova plugins must be installed to enable OAuth sign-in: ' + 'cordova-plugin-buildinfo, cordova-universal-links-plugin, ' + 'cordova-plugin-browsertab, cordova-plugin-inappbrowser and ' + 'cordova-plugin-customurlscheme.',\n [\"invalid-custom-token\" /* AuthErrorCode.INVALID_CUSTOM_TOKEN */]: 'The custom token format is incorrect. Please check the documentation.',\n [\"invalid-dynamic-link-domain\" /* AuthErrorCode.INVALID_DYNAMIC_LINK_DOMAIN */]: 'The provided dynamic link domain is not configured or authorized for the current project.',\n [\"invalid-email\" /* AuthErrorCode.INVALID_EMAIL */]: 'The email address is badly formatted.',\n [\"invalid-emulator-scheme\" /* AuthErrorCode.INVALID_EMULATOR_SCHEME */]: 'Emulator URL must start with a valid scheme (http:// or https://).',\n [\"invalid-api-key\" /* AuthErrorCode.INVALID_API_KEY */]: 'Your API key is invalid, please check you have copied it correctly.',\n [\"invalid-cert-hash\" /* AuthErrorCode.INVALID_CERT_HASH */]: 'The SHA-1 certificate hash provided is invalid.',\n [\"invalid-credential\" /* AuthErrorCode.INVALID_CREDENTIAL */]: 'The supplied auth credential is incorrect, malformed or has expired.',\n [\"invalid-message-payload\" /* AuthErrorCode.INVALID_MESSAGE_PAYLOAD */]: 'The email template corresponding to this action contains invalid characters in its message. ' + 'Please fix by going to the Auth email templates section in the Firebase Console.',\n [\"invalid-multi-factor-session\" /* AuthErrorCode.INVALID_MFA_SESSION */]: 'The request does not contain a valid proof of first factor successful sign-in.',\n [\"invalid-oauth-provider\" /* AuthErrorCode.INVALID_OAUTH_PROVIDER */]: 'EmailAuthProvider is not supported for this operation. This operation ' + 'only supports OAuth providers.',\n [\"invalid-oauth-client-id\" /* AuthErrorCode.INVALID_OAUTH_CLIENT_ID */]: 'The OAuth client ID provided is either invalid or does not match the ' + 'specified API key.',\n [\"unauthorized-domain\" /* AuthErrorCode.INVALID_ORIGIN */]: 'This domain is not authorized for OAuth operations for your Firebase ' + 'project. Edit the list of authorized domains from the Firebase console.',\n [\"invalid-action-code\" /* AuthErrorCode.INVALID_OOB_CODE */]: 'The action code is invalid. This can happen if the code is malformed, ' + 'expired, or has already been used.',\n [\"wrong-password\" /* AuthErrorCode.INVALID_PASSWORD */]: 'The password is invalid or the user does not have a password.',\n [\"invalid-persistence-type\" /* AuthErrorCode.INVALID_PERSISTENCE */]: 'The specified persistence type is invalid. It can only be local, session or none.',\n [\"invalid-phone-number\" /* AuthErrorCode.INVALID_PHONE_NUMBER */]: 'The format of the phone number provided is incorrect. Please enter the ' + 'phone number in a format that can be parsed into E.164 format. E.164 ' + 'phone numbers are written in the format [+][country code][subscriber ' + 'number including area code].',\n [\"invalid-provider-id\" /* AuthErrorCode.INVALID_PROVIDER_ID */]: 'The specified provider ID is invalid.',\n [\"invalid-recipient-email\" /* AuthErrorCode.INVALID_RECIPIENT_EMAIL */]: 'The email corresponding to this action failed to send as the provided ' + 'recipient email address is invalid.',\n [\"invalid-sender\" /* AuthErrorCode.INVALID_SENDER */]: 'The email template corresponding to this action contains an invalid sender email or name. ' + 'Please fix by going to the Auth email templates section in the Firebase Console.',\n [\"invalid-verification-id\" /* AuthErrorCode.INVALID_SESSION_INFO */]: 'The verification ID used to create the phone auth credential is invalid.',\n [\"invalid-tenant-id\" /* AuthErrorCode.INVALID_TENANT_ID */]: \"The Auth instance's tenant ID is invalid.\",\n [\"login-blocked\" /* AuthErrorCode.LOGIN_BLOCKED */]: 'Login blocked by user-provided method: {$originalMessage}',\n [\"missing-android-pkg-name\" /* AuthErrorCode.MISSING_ANDROID_PACKAGE_NAME */]: 'An Android Package Name must be provided if the Android App is required to be installed.',\n [\"auth-domain-config-required\" /* AuthErrorCode.MISSING_AUTH_DOMAIN */]: 'Be sure to include authDomain when calling firebase.initializeApp(), ' + 'by following the instructions in the Firebase console.',\n [\"missing-app-credential\" /* AuthErrorCode.MISSING_APP_CREDENTIAL */]: 'The phone verification request is missing an application verifier ' + 'assertion. A reCAPTCHA response token needs to be provided.',\n [\"missing-verification-code\" /* AuthErrorCode.MISSING_CODE */]: 'The phone auth credential was created with an empty SMS verification code.',\n [\"missing-continue-uri\" /* AuthErrorCode.MISSING_CONTINUE_URI */]: 'A continue URL must be provided in the request.',\n [\"missing-iframe-start\" /* AuthErrorCode.MISSING_IFRAME_START */]: 'An internal AuthError has occurred.',\n [\"missing-ios-bundle-id\" /* AuthErrorCode.MISSING_IOS_BUNDLE_ID */]: 'An iOS Bundle ID must be provided if an App Store ID is provided.',\n [\"missing-or-invalid-nonce\" /* AuthErrorCode.MISSING_OR_INVALID_NONCE */]: 'The request does not contain a valid nonce. This can occur if the ' + 'SHA-256 hash of the provided raw nonce does not match the hashed nonce ' + 'in the ID token payload.',\n [\"missing-password\" /* AuthErrorCode.MISSING_PASSWORD */]: 'A non-empty password must be provided',\n [\"missing-multi-factor-info\" /* AuthErrorCode.MISSING_MFA_INFO */]: 'No second factor identifier is provided.',\n [\"missing-multi-factor-session\" /* AuthErrorCode.MISSING_MFA_SESSION */]: 'The request is missing proof of first factor successful sign-in.',\n [\"missing-phone-number\" /* AuthErrorCode.MISSING_PHONE_NUMBER */]: 'To send verification codes, provide a phone number for the recipient.',\n [\"missing-verification-id\" /* AuthErrorCode.MISSING_SESSION_INFO */]: 'The phone auth credential was created with an empty verification ID.',\n [\"app-deleted\" /* AuthErrorCode.MODULE_DESTROYED */]: 'This instance of FirebaseApp has been deleted.',\n [\"multi-factor-info-not-found\" /* AuthErrorCode.MFA_INFO_NOT_FOUND */]: 'The user does not have a second factor matching the identifier provided.',\n [\"multi-factor-auth-required\" /* AuthErrorCode.MFA_REQUIRED */]: 'Proof of ownership of a second factor is required to complete sign-in.',\n [\"account-exists-with-different-credential\" /* AuthErrorCode.NEED_CONFIRMATION */]: 'An account already exists with the same email address but different ' + 'sign-in credentials. Sign in using a provider associated with this ' + 'email address.',\n [\"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */]: 'A network AuthError (such as timeout, interrupted connection or unreachable host) has occurred.',\n [\"no-auth-event\" /* AuthErrorCode.NO_AUTH_EVENT */]: 'An internal AuthError has occurred.',\n [\"no-such-provider\" /* AuthErrorCode.NO_SUCH_PROVIDER */]: 'User was not linked to an account with the given provider.',\n [\"null-user\" /* AuthErrorCode.NULL_USER */]: 'A null user object was provided as the argument for an operation which ' + 'requires a non-null user object.',\n [\"operation-not-allowed\" /* AuthErrorCode.OPERATION_NOT_ALLOWED */]: 'The given sign-in provider is disabled for this Firebase project. ' + 'Enable it in the Firebase console, under the sign-in method tab of the ' + 'Auth section.',\n [\"operation-not-supported-in-this-environment\" /* AuthErrorCode.OPERATION_NOT_SUPPORTED */]: 'This operation is not supported in the environment this application is ' + 'running on. \"location.protocol\" must be http, https or chrome-extension' + ' and web storage must be enabled.',\n [\"popup-blocked\" /* AuthErrorCode.POPUP_BLOCKED */]: 'Unable to establish a connection with the popup. It may have been blocked by the browser.',\n [\"popup-closed-by-user\" /* AuthErrorCode.POPUP_CLOSED_BY_USER */]: 'The popup has been closed by the user before finalizing the operation.',\n [\"provider-already-linked\" /* AuthErrorCode.PROVIDER_ALREADY_LINKED */]: 'User can only be linked to one identity for the given provider.',\n [\"quota-exceeded\" /* AuthErrorCode.QUOTA_EXCEEDED */]: \"The project's quota for this operation has been exceeded.\",\n [\"redirect-cancelled-by-user\" /* AuthErrorCode.REDIRECT_CANCELLED_BY_USER */]: 'The redirect operation has been cancelled by the user before finalizing.',\n [\"redirect-operation-pending\" /* AuthErrorCode.REDIRECT_OPERATION_PENDING */]: 'A redirect sign-in operation is already pending.',\n [\"rejected-credential\" /* AuthErrorCode.REJECTED_CREDENTIAL */]: 'The request contains malformed or mismatching credentials.',\n [\"second-factor-already-in-use\" /* AuthErrorCode.SECOND_FACTOR_ALREADY_ENROLLED */]: 'The second factor is already enrolled on this account.',\n [\"maximum-second-factor-count-exceeded\" /* AuthErrorCode.SECOND_FACTOR_LIMIT_EXCEEDED */]: 'The maximum allowed number of second factors on a user has been exceeded.',\n [\"tenant-id-mismatch\" /* AuthErrorCode.TENANT_ID_MISMATCH */]: \"The provided tenant ID does not match the Auth instance's tenant ID\",\n [\"timeout\" /* AuthErrorCode.TIMEOUT */]: 'The operation has timed out.',\n [\"user-token-expired\" /* AuthErrorCode.TOKEN_EXPIRED */]: \"The user's credential is no longer valid. The user must sign in again.\",\n [\"too-many-requests\" /* AuthErrorCode.TOO_MANY_ATTEMPTS_TRY_LATER */]: 'We have blocked all requests from this device due to unusual activity. ' + 'Try again later.',\n [\"unauthorized-continue-uri\" /* AuthErrorCode.UNAUTHORIZED_DOMAIN */]: 'The domain of the continue URL is not whitelisted. Please whitelist ' + 'the domain in the Firebase console.',\n [\"unsupported-first-factor\" /* AuthErrorCode.UNSUPPORTED_FIRST_FACTOR */]: 'Enrolling a second factor or signing in with a multi-factor account requires sign-in with a supported first factor.',\n [\"unsupported-persistence-type\" /* AuthErrorCode.UNSUPPORTED_PERSISTENCE */]: 'The current environment does not support the specified persistence type.',\n [\"unsupported-tenant-operation\" /* AuthErrorCode.UNSUPPORTED_TENANT_OPERATION */]: 'This operation is not supported in a multi-tenant context.',\n [\"unverified-email\" /* AuthErrorCode.UNVERIFIED_EMAIL */]: 'The operation requires a verified email.',\n [\"user-cancelled\" /* AuthErrorCode.USER_CANCELLED */]: 'The user did not grant your application the permissions it requested.',\n [\"user-not-found\" /* AuthErrorCode.USER_DELETED */]: 'There is no user record corresponding to this identifier. The user may ' + 'have been deleted.',\n [\"user-disabled\" /* AuthErrorCode.USER_DISABLED */]: 'The user account has been disabled by an administrator.',\n [\"user-mismatch\" /* AuthErrorCode.USER_MISMATCH */]: 'The supplied credentials do not correspond to the previously signed in user.',\n [\"user-signed-out\" /* AuthErrorCode.USER_SIGNED_OUT */]: '',\n [\"weak-password\" /* AuthErrorCode.WEAK_PASSWORD */]: 'The password must be 6 characters long or more.',\n [\"web-storage-unsupported\" /* AuthErrorCode.WEB_STORAGE_UNSUPPORTED */]: 'This browser is not supported or 3rd party cookies and data may be disabled.',\n [\"already-initialized\" /* AuthErrorCode.ALREADY_INITIALIZED */]: 'initializeAuth() has already been called with ' + 'different options. To avoid this error, call initializeAuth() with the ' + 'same options as when it was originally called, or call getAuth() to return the' + ' already initialized instance.',\n [\"missing-recaptcha-token\" /* AuthErrorCode.MISSING_RECAPTCHA_TOKEN */]: 'The reCAPTCHA token is missing when sending request to the backend.',\n [\"invalid-recaptcha-token\" /* AuthErrorCode.INVALID_RECAPTCHA_TOKEN */]: 'The reCAPTCHA token is invalid when sending request to the backend.',\n [\"invalid-recaptcha-action\" /* AuthErrorCode.INVALID_RECAPTCHA_ACTION */]: 'The reCAPTCHA action is invalid when sending request to the backend.',\n [\"recaptcha-not-enabled\" /* AuthErrorCode.RECAPTCHA_NOT_ENABLED */]: 'reCAPTCHA Enterprise integration is not enabled for this project.',\n [\"missing-client-type\" /* AuthErrorCode.MISSING_CLIENT_TYPE */]: 'The reCAPTCHA client type is missing when sending request to the backend.',\n [\"missing-recaptcha-version\" /* AuthErrorCode.MISSING_RECAPTCHA_VERSION */]: 'The reCAPTCHA version is missing when sending request to the backend.',\n [\"invalid-req-type\" /* AuthErrorCode.INVALID_REQ_TYPE */]: 'Invalid request parameters.',\n [\"invalid-recaptcha-version\" /* AuthErrorCode.INVALID_RECAPTCHA_VERSION */]: 'The reCAPTCHA version is invalid when sending request to the backend.',\n [\"unsupported-password-policy-schema-version\" /* AuthErrorCode.UNSUPPORTED_PASSWORD_POLICY_SCHEMA_VERSION */]: 'The password policy received from the backend uses a schema version that is not supported by this version of the Firebase SDK.',\n [\"password-does-not-meet-requirements\" /* AuthErrorCode.PASSWORD_DOES_NOT_MEET_REQUIREMENTS */]: 'The password does not meet the requirements.'\n };\n}\nfunction _prodErrorMap() {\n // We will include this one message in the prod error map since by the very\n // nature of this error, developers will never be able to see the message\n // using the debugErrorMap (which is installed during auth initialization).\n return {\n [\"dependent-sdk-initialized-before-auth\" /* AuthErrorCode.DEPENDENT_SDK_INIT_BEFORE_AUTH */]: 'Another Firebase SDK was initialized and is trying to use Auth before Auth is ' + 'initialized. Please be sure to call `initializeAuth` or `getAuth` before ' + 'starting any other Firebase SDK.'\n };\n}\n/**\r\n * A verbose error map with detailed descriptions for most error codes.\r\n *\r\n * See discussion at {@link AuthErrorMap}\r\n *\r\n * @public\r\n */\nconst debugErrorMap = _debugErrorMap;\n/**\r\n * A minimal error map with all verbose error messages stripped.\r\n *\r\n * See discussion at {@link AuthErrorMap}\r\n *\r\n * @public\r\n */\nconst prodErrorMap = _prodErrorMap;\nconst _DEFAULT_AUTH_ERROR_FACTORY = new ErrorFactory('auth', 'Firebase', _prodErrorMap());\n/**\r\n * A map of potential `Auth` error codes, for easier comparison with errors\r\n * thrown by the SDK.\r\n *\r\n * @remarks\r\n * Note that you can't tree-shake individual keys\r\n * in the map, so by using the map you might substantially increase your\r\n * bundle size.\r\n *\r\n * @public\r\n */\nconst AUTH_ERROR_CODES_MAP_DO_NOT_USE_INTERNALLY = {\n ADMIN_ONLY_OPERATION: 'auth/admin-restricted-operation',\n ARGUMENT_ERROR: 'auth/argument-error',\n APP_NOT_AUTHORIZED: 'auth/app-not-authorized',\n APP_NOT_INSTALLED: 'auth/app-not-installed',\n CAPTCHA_CHECK_FAILED: 'auth/captcha-check-failed',\n CODE_EXPIRED: 'auth/code-expired',\n CORDOVA_NOT_READY: 'auth/cordova-not-ready',\n CORS_UNSUPPORTED: 'auth/cors-unsupported',\n CREDENTIAL_ALREADY_IN_USE: 'auth/credential-already-in-use',\n CREDENTIAL_MISMATCH: 'auth/custom-token-mismatch',\n CREDENTIAL_TOO_OLD_LOGIN_AGAIN: 'auth/requires-recent-login',\n DEPENDENT_SDK_INIT_BEFORE_AUTH: 'auth/dependent-sdk-initialized-before-auth',\n DYNAMIC_LINK_NOT_ACTIVATED: 'auth/dynamic-link-not-activated',\n EMAIL_CHANGE_NEEDS_VERIFICATION: 'auth/email-change-needs-verification',\n EMAIL_EXISTS: 'auth/email-already-in-use',\n EMULATOR_CONFIG_FAILED: 'auth/emulator-config-failed',\n EXPIRED_OOB_CODE: 'auth/expired-action-code',\n EXPIRED_POPUP_REQUEST: 'auth/cancelled-popup-request',\n INTERNAL_ERROR: 'auth/internal-error',\n INVALID_API_KEY: 'auth/invalid-api-key',\n INVALID_APP_CREDENTIAL: 'auth/invalid-app-credential',\n INVALID_APP_ID: 'auth/invalid-app-id',\n INVALID_AUTH: 'auth/invalid-user-token',\n INVALID_AUTH_EVENT: 'auth/invalid-auth-event',\n INVALID_CERT_HASH: 'auth/invalid-cert-hash',\n INVALID_CODE: 'auth/invalid-verification-code',\n INVALID_CONTINUE_URI: 'auth/invalid-continue-uri',\n INVALID_CORDOVA_CONFIGURATION: 'auth/invalid-cordova-configuration',\n INVALID_CUSTOM_TOKEN: 'auth/invalid-custom-token',\n INVALID_DYNAMIC_LINK_DOMAIN: 'auth/invalid-dynamic-link-domain',\n INVALID_EMAIL: 'auth/invalid-email',\n INVALID_EMULATOR_SCHEME: 'auth/invalid-emulator-scheme',\n INVALID_IDP_RESPONSE: 'auth/invalid-credential',\n INVALID_LOGIN_CREDENTIALS: 'auth/invalid-credential',\n INVALID_MESSAGE_PAYLOAD: 'auth/invalid-message-payload',\n INVALID_MFA_SESSION: 'auth/invalid-multi-factor-session',\n INVALID_OAUTH_CLIENT_ID: 'auth/invalid-oauth-client-id',\n INVALID_OAUTH_PROVIDER: 'auth/invalid-oauth-provider',\n INVALID_OOB_CODE: 'auth/invalid-action-code',\n INVALID_ORIGIN: 'auth/unauthorized-domain',\n INVALID_PASSWORD: 'auth/wrong-password',\n INVALID_PERSISTENCE: 'auth/invalid-persistence-type',\n INVALID_PHONE_NUMBER: 'auth/invalid-phone-number',\n INVALID_PROVIDER_ID: 'auth/invalid-provider-id',\n INVALID_RECIPIENT_EMAIL: 'auth/invalid-recipient-email',\n INVALID_SENDER: 'auth/invalid-sender',\n INVALID_SESSION_INFO: 'auth/invalid-verification-id',\n INVALID_TENANT_ID: 'auth/invalid-tenant-id',\n MFA_INFO_NOT_FOUND: 'auth/multi-factor-info-not-found',\n MFA_REQUIRED: 'auth/multi-factor-auth-required',\n MISSING_ANDROID_PACKAGE_NAME: 'auth/missing-android-pkg-name',\n MISSING_APP_CREDENTIAL: 'auth/missing-app-credential',\n MISSING_AUTH_DOMAIN: 'auth/auth-domain-config-required',\n MISSING_CODE: 'auth/missing-verification-code',\n MISSING_CONTINUE_URI: 'auth/missing-continue-uri',\n MISSING_IFRAME_START: 'auth/missing-iframe-start',\n MISSING_IOS_BUNDLE_ID: 'auth/missing-ios-bundle-id',\n MISSING_OR_INVALID_NONCE: 'auth/missing-or-invalid-nonce',\n MISSING_MFA_INFO: 'auth/missing-multi-factor-info',\n MISSING_MFA_SESSION: 'auth/missing-multi-factor-session',\n MISSING_PHONE_NUMBER: 'auth/missing-phone-number',\n MISSING_SESSION_INFO: 'auth/missing-verification-id',\n MODULE_DESTROYED: 'auth/app-deleted',\n NEED_CONFIRMATION: 'auth/account-exists-with-different-credential',\n NETWORK_REQUEST_FAILED: 'auth/network-request-failed',\n NULL_USER: 'auth/null-user',\n NO_AUTH_EVENT: 'auth/no-auth-event',\n NO_SUCH_PROVIDER: 'auth/no-such-provider',\n OPERATION_NOT_ALLOWED: 'auth/operation-not-allowed',\n OPERATION_NOT_SUPPORTED: 'auth/operation-not-supported-in-this-environment',\n POPUP_BLOCKED: 'auth/popup-blocked',\n POPUP_CLOSED_BY_USER: 'auth/popup-closed-by-user',\n PROVIDER_ALREADY_LINKED: 'auth/provider-already-linked',\n QUOTA_EXCEEDED: 'auth/quota-exceeded',\n REDIRECT_CANCELLED_BY_USER: 'auth/redirect-cancelled-by-user',\n REDIRECT_OPERATION_PENDING: 'auth/redirect-operation-pending',\n REJECTED_CREDENTIAL: 'auth/rejected-credential',\n SECOND_FACTOR_ALREADY_ENROLLED: 'auth/second-factor-already-in-use',\n SECOND_FACTOR_LIMIT_EXCEEDED: 'auth/maximum-second-factor-count-exceeded',\n TENANT_ID_MISMATCH: 'auth/tenant-id-mismatch',\n TIMEOUT: 'auth/timeout',\n TOKEN_EXPIRED: 'auth/user-token-expired',\n TOO_MANY_ATTEMPTS_TRY_LATER: 'auth/too-many-requests',\n UNAUTHORIZED_DOMAIN: 'auth/unauthorized-continue-uri',\n UNSUPPORTED_FIRST_FACTOR: 'auth/unsupported-first-factor',\n UNSUPPORTED_PERSISTENCE: 'auth/unsupported-persistence-type',\n UNSUPPORTED_TENANT_OPERATION: 'auth/unsupported-tenant-operation',\n UNVERIFIED_EMAIL: 'auth/unverified-email',\n USER_CANCELLED: 'auth/user-cancelled',\n USER_DELETED: 'auth/user-not-found',\n USER_DISABLED: 'auth/user-disabled',\n USER_MISMATCH: 'auth/user-mismatch',\n USER_SIGNED_OUT: 'auth/user-signed-out',\n WEAK_PASSWORD: 'auth/weak-password',\n WEB_STORAGE_UNSUPPORTED: 'auth/web-storage-unsupported',\n ALREADY_INITIALIZED: 'auth/already-initialized',\n RECAPTCHA_NOT_ENABLED: 'auth/recaptcha-not-enabled',\n MISSING_RECAPTCHA_TOKEN: 'auth/missing-recaptcha-token',\n INVALID_RECAPTCHA_TOKEN: 'auth/invalid-recaptcha-token',\n INVALID_RECAPTCHA_ACTION: 'auth/invalid-recaptcha-action',\n MISSING_CLIENT_TYPE: 'auth/missing-client-type',\n MISSING_RECAPTCHA_VERSION: 'auth/missing-recaptcha-version',\n INVALID_RECAPTCHA_VERSION: 'auth/invalid-recaptcha-version',\n INVALID_REQ_TYPE: 'auth/invalid-req-type'\n};\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst logClient = new Logger('@firebase/auth');\nfunction _logWarn(msg, ...args) {\n if (logClient.logLevel <= LogLevel.WARN) {\n logClient.warn(`Auth (${SDK_VERSION}): ${msg}`, ...args);\n }\n}\nfunction _logError(msg, ...args) {\n if (logClient.logLevel <= LogLevel.ERROR) {\n logClient.error(`Auth (${SDK_VERSION}): ${msg}`, ...args);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _fail(authOrCode, ...rest) {\n throw createErrorInternal(authOrCode, ...rest);\n}\nfunction _createError(authOrCode, ...rest) {\n return createErrorInternal(authOrCode, ...rest);\n}\nfunction _errorWithCustomMessage(auth, code, message) {\n const errorMap = Object.assign(Object.assign({}, prodErrorMap()), {\n [code]: message\n });\n const factory = new ErrorFactory('auth', 'Firebase', errorMap);\n return factory.create(code, {\n appName: auth.name\n });\n}\nfunction _serverAppCurrentUserOperationNotSupportedError(auth) {\n return _errorWithCustomMessage(auth, \"operation-not-supported-in-this-environment\" /* AuthErrorCode.OPERATION_NOT_SUPPORTED */, 'Operations that alter the current user are not supported in conjunction with FirebaseServerApp');\n}\nfunction _assertInstanceOf(auth, object, instance) {\n const constructorInstance = instance;\n if (!(object instanceof constructorInstance)) {\n if (constructorInstance.name !== object.constructor.name) {\n _fail(auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n }\n throw _errorWithCustomMessage(auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */, `Type of ${object.constructor.name} does not match expected instance.` + `Did you pass a reference from a different Auth SDK?`);\n }\n}\nfunction createErrorInternal(authOrCode, ...rest) {\n if (typeof authOrCode !== 'string') {\n const code = rest[0];\n const fullParams = [...rest.slice(1)];\n if (fullParams[0]) {\n fullParams[0].appName = authOrCode.name;\n }\n return authOrCode._errorFactory.create(code, ...fullParams);\n }\n return _DEFAULT_AUTH_ERROR_FACTORY.create(authOrCode, ...rest);\n}\nfunction _assert(assertion, authOrCode, ...rest) {\n if (!assertion) {\n throw createErrorInternal(authOrCode, ...rest);\n }\n}\n/**\r\n * Unconditionally fails, throwing an internal error with the given message.\r\n *\r\n * @param failure type of failure encountered\r\n * @throws Error\r\n */\nfunction debugFail(failure) {\n // Log the failure in addition to throw an exception, just in case the\n // exception is swallowed.\n const message = `INTERNAL ASSERTION FAILED: ` + failure;\n _logError(message);\n // NOTE: We don't use FirebaseError here because these are internal failures\n // that cannot be handled by the user. (Also it would create a circular\n // dependency between the error and assert modules which doesn't work.)\n throw new Error(message);\n}\n/**\r\n * Fails if the given assertion condition is false, throwing an Error with the\r\n * given message if it did.\r\n *\r\n * @param assertion\r\n * @param message\r\n */\nfunction debugAssert(assertion, message) {\n if (!assertion) {\n debugFail(message);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _getCurrentUrl() {\n var _a;\n return typeof self !== 'undefined' && ((_a = self.location) === null || _a === void 0 ? void 0 : _a.href) || '';\n}\nfunction _isHttpOrHttps() {\n return _getCurrentScheme() === 'http:' || _getCurrentScheme() === 'https:';\n}\nfunction _getCurrentScheme() {\n var _a;\n return typeof self !== 'undefined' && ((_a = self.location) === null || _a === void 0 ? void 0 : _a.protocol) || null;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Determine whether the browser is working online\r\n */\nfunction _isOnline() {\n if (typeof navigator !== 'undefined' && navigator && 'onLine' in navigator && typeof navigator.onLine === 'boolean' && (\n // Apply only for traditional web apps and Chrome extensions.\n // This is especially true for Cordova apps which have unreliable\n // navigator.onLine behavior unless cordova-plugin-network-information is\n // installed which overwrites the native navigator.onLine value and\n // defines navigator.connection.\n _isHttpOrHttps() || isBrowserExtension() || 'connection' in navigator)) {\n return navigator.onLine;\n }\n // If we can't determine the state, assume it is online.\n return true;\n}\nfunction _getUserLanguage() {\n if (typeof navigator === 'undefined') {\n return null;\n }\n const navigatorLanguage = navigator;\n return (\n // Most reliable, but only supported in Chrome/Firefox.\n navigatorLanguage.languages && navigatorLanguage.languages[0] ||\n // Supported in most browsers, but returns the language of the browser\n // UI, not the language set in browser settings.\n navigatorLanguage.language ||\n // Couldn't determine language.\n null\n );\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * A structure to help pick between a range of long and short delay durations\r\n * depending on the current environment. In general, the long delay is used for\r\n * mobile environments whereas short delays are used for desktop environments.\r\n */\nclass Delay {\n constructor(shortDelay, longDelay) {\n this.shortDelay = shortDelay;\n this.longDelay = longDelay;\n // Internal error when improperly initialized.\n debugAssert(longDelay > shortDelay, 'Short delay should be less than long delay!');\n this.isMobile = isMobileCordova() || isReactNative();\n }\n get() {\n if (!_isOnline()) {\n // Pick the shorter timeout.\n return Math.min(5000 /* DelayMin.OFFLINE */, this.shortDelay);\n }\n // If running in a mobile environment, return the long delay, otherwise\n // return the short delay.\n // This could be improved in the future to dynamically change based on other\n // variables instead of just reading the current environment.\n return this.isMobile ? this.longDelay : this.shortDelay;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _emulatorUrl(config, path) {\n debugAssert(config.emulator, 'Emulator should always be set here');\n const {\n url\n } = config.emulator;\n if (!path) {\n return url;\n }\n return `${url}${path.startsWith('/') ? path.slice(1) : path}`;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass FetchProvider {\n static initialize(fetchImpl, headersImpl, responseImpl) {\n this.fetchImpl = fetchImpl;\n if (headersImpl) {\n this.headersImpl = headersImpl;\n }\n if (responseImpl) {\n this.responseImpl = responseImpl;\n }\n }\n static fetch() {\n if (this.fetchImpl) {\n return this.fetchImpl;\n }\n if (typeof self !== 'undefined' && 'fetch' in self) {\n return self.fetch;\n }\n if (typeof globalThis !== 'undefined' && globalThis.fetch) {\n return globalThis.fetch;\n }\n if (typeof fetch !== 'undefined') {\n return fetch;\n }\n debugFail('Could not find fetch implementation, make sure you call FetchProvider.initialize() with an appropriate polyfill');\n }\n static headers() {\n if (this.headersImpl) {\n return this.headersImpl;\n }\n if (typeof self !== 'undefined' && 'Headers' in self) {\n return self.Headers;\n }\n if (typeof globalThis !== 'undefined' && globalThis.Headers) {\n return globalThis.Headers;\n }\n if (typeof Headers !== 'undefined') {\n return Headers;\n }\n debugFail('Could not find Headers implementation, make sure you call FetchProvider.initialize() with an appropriate polyfill');\n }\n static response() {\n if (this.responseImpl) {\n return this.responseImpl;\n }\n if (typeof self !== 'undefined' && 'Response' in self) {\n return self.Response;\n }\n if (typeof globalThis !== 'undefined' && globalThis.Response) {\n return globalThis.Response;\n }\n if (typeof Response !== 'undefined') {\n return Response;\n }\n debugFail('Could not find Response implementation, make sure you call FetchProvider.initialize() with an appropriate polyfill');\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Map from errors returned by the server to errors to developer visible errors\r\n */\nconst SERVER_ERROR_MAP = {\n // Custom token errors.\n [\"CREDENTIAL_MISMATCH\" /* ServerError.CREDENTIAL_MISMATCH */]: \"custom-token-mismatch\" /* AuthErrorCode.CREDENTIAL_MISMATCH */,\n\n // This can only happen if the SDK sends a bad request.\n [\"MISSING_CUSTOM_TOKEN\" /* ServerError.MISSING_CUSTOM_TOKEN */]: \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */,\n\n // Create Auth URI errors.\n [\"INVALID_IDENTIFIER\" /* ServerError.INVALID_IDENTIFIER */]: \"invalid-email\" /* AuthErrorCode.INVALID_EMAIL */,\n\n // This can only happen if the SDK sends a bad request.\n [\"MISSING_CONTINUE_URI\" /* ServerError.MISSING_CONTINUE_URI */]: \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */,\n\n // Sign in with email and password errors (some apply to sign up too).\n [\"INVALID_PASSWORD\" /* ServerError.INVALID_PASSWORD */]: \"wrong-password\" /* AuthErrorCode.INVALID_PASSWORD */,\n\n // This can only happen if the SDK sends a bad request.\n [\"MISSING_PASSWORD\" /* ServerError.MISSING_PASSWORD */]: \"missing-password\" /* AuthErrorCode.MISSING_PASSWORD */,\n\n // Thrown if Email Enumeration Protection is enabled in the project and the email or password is\n // invalid.\n [\"INVALID_LOGIN_CREDENTIALS\" /* ServerError.INVALID_LOGIN_CREDENTIALS */]: \"invalid-credential\" /* AuthErrorCode.INVALID_CREDENTIAL */,\n\n // Sign up with email and password errors.\n [\"EMAIL_EXISTS\" /* ServerError.EMAIL_EXISTS */]: \"email-already-in-use\" /* AuthErrorCode.EMAIL_EXISTS */,\n [\"PASSWORD_LOGIN_DISABLED\" /* ServerError.PASSWORD_LOGIN_DISABLED */]: \"operation-not-allowed\" /* AuthErrorCode.OPERATION_NOT_ALLOWED */,\n\n // Verify assertion for sign in with credential errors:\n [\"INVALID_IDP_RESPONSE\" /* ServerError.INVALID_IDP_RESPONSE */]: \"invalid-credential\" /* AuthErrorCode.INVALID_CREDENTIAL */,\n [\"INVALID_PENDING_TOKEN\" /* ServerError.INVALID_PENDING_TOKEN */]: \"invalid-credential\" /* AuthErrorCode.INVALID_CREDENTIAL */,\n [\"FEDERATED_USER_ID_ALREADY_LINKED\" /* ServerError.FEDERATED_USER_ID_ALREADY_LINKED */]: \"credential-already-in-use\" /* AuthErrorCode.CREDENTIAL_ALREADY_IN_USE */,\n\n // This can only happen if the SDK sends a bad request.\n [\"MISSING_REQ_TYPE\" /* ServerError.MISSING_REQ_TYPE */]: \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */,\n\n // Send Password reset email errors:\n [\"EMAIL_NOT_FOUND\" /* ServerError.EMAIL_NOT_FOUND */]: \"user-not-found\" /* AuthErrorCode.USER_DELETED */,\n [\"RESET_PASSWORD_EXCEED_LIMIT\" /* ServerError.RESET_PASSWORD_EXCEED_LIMIT */]: \"too-many-requests\" /* AuthErrorCode.TOO_MANY_ATTEMPTS_TRY_LATER */,\n [\"EXPIRED_OOB_CODE\" /* ServerError.EXPIRED_OOB_CODE */]: \"expired-action-code\" /* AuthErrorCode.EXPIRED_OOB_CODE */,\n [\"INVALID_OOB_CODE\" /* ServerError.INVALID_OOB_CODE */]: \"invalid-action-code\" /* AuthErrorCode.INVALID_OOB_CODE */,\n\n // This can only happen if the SDK sends a bad request.\n [\"MISSING_OOB_CODE\" /* ServerError.MISSING_OOB_CODE */]: \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */,\n\n // Operations that require ID token in request:\n [\"CREDENTIAL_TOO_OLD_LOGIN_AGAIN\" /* ServerError.CREDENTIAL_TOO_OLD_LOGIN_AGAIN */]: \"requires-recent-login\" /* AuthErrorCode.CREDENTIAL_TOO_OLD_LOGIN_AGAIN */,\n [\"INVALID_ID_TOKEN\" /* ServerError.INVALID_ID_TOKEN */]: \"invalid-user-token\" /* AuthErrorCode.INVALID_AUTH */,\n [\"TOKEN_EXPIRED\" /* ServerError.TOKEN_EXPIRED */]: \"user-token-expired\" /* AuthErrorCode.TOKEN_EXPIRED */,\n [\"USER_NOT_FOUND\" /* ServerError.USER_NOT_FOUND */]: \"user-token-expired\" /* AuthErrorCode.TOKEN_EXPIRED */,\n\n // Other errors.\n [\"TOO_MANY_ATTEMPTS_TRY_LATER\" /* ServerError.TOO_MANY_ATTEMPTS_TRY_LATER */]: \"too-many-requests\" /* AuthErrorCode.TOO_MANY_ATTEMPTS_TRY_LATER */,\n [\"PASSWORD_DOES_NOT_MEET_REQUIREMENTS\" /* ServerError.PASSWORD_DOES_NOT_MEET_REQUIREMENTS */]: \"password-does-not-meet-requirements\" /* AuthErrorCode.PASSWORD_DOES_NOT_MEET_REQUIREMENTS */,\n\n // Phone Auth related errors.\n [\"INVALID_CODE\" /* ServerError.INVALID_CODE */]: \"invalid-verification-code\" /* AuthErrorCode.INVALID_CODE */,\n [\"INVALID_SESSION_INFO\" /* ServerError.INVALID_SESSION_INFO */]: \"invalid-verification-id\" /* AuthErrorCode.INVALID_SESSION_INFO */,\n [\"INVALID_TEMPORARY_PROOF\" /* ServerError.INVALID_TEMPORARY_PROOF */]: \"invalid-credential\" /* AuthErrorCode.INVALID_CREDENTIAL */,\n [\"MISSING_SESSION_INFO\" /* ServerError.MISSING_SESSION_INFO */]: \"missing-verification-id\" /* AuthErrorCode.MISSING_SESSION_INFO */,\n [\"SESSION_EXPIRED\" /* ServerError.SESSION_EXPIRED */]: \"code-expired\" /* AuthErrorCode.CODE_EXPIRED */,\n\n // Other action code errors when additional settings passed.\n // MISSING_CONTINUE_URI is getting mapped to INTERNAL_ERROR above.\n // This is OK as this error will be caught by client side validation.\n [\"MISSING_ANDROID_PACKAGE_NAME\" /* ServerError.MISSING_ANDROID_PACKAGE_NAME */]: \"missing-android-pkg-name\" /* AuthErrorCode.MISSING_ANDROID_PACKAGE_NAME */,\n [\"UNAUTHORIZED_DOMAIN\" /* ServerError.UNAUTHORIZED_DOMAIN */]: \"unauthorized-continue-uri\" /* AuthErrorCode.UNAUTHORIZED_DOMAIN */,\n\n // getProjectConfig errors when clientId is passed.\n [\"INVALID_OAUTH_CLIENT_ID\" /* ServerError.INVALID_OAUTH_CLIENT_ID */]: \"invalid-oauth-client-id\" /* AuthErrorCode.INVALID_OAUTH_CLIENT_ID */,\n\n // User actions (sign-up or deletion) disabled errors.\n [\"ADMIN_ONLY_OPERATION\" /* ServerError.ADMIN_ONLY_OPERATION */]: \"admin-restricted-operation\" /* AuthErrorCode.ADMIN_ONLY_OPERATION */,\n\n // Multi factor related errors.\n [\"INVALID_MFA_PENDING_CREDENTIAL\" /* ServerError.INVALID_MFA_PENDING_CREDENTIAL */]: \"invalid-multi-factor-session\" /* AuthErrorCode.INVALID_MFA_SESSION */,\n [\"MFA_ENROLLMENT_NOT_FOUND\" /* ServerError.MFA_ENROLLMENT_NOT_FOUND */]: \"multi-factor-info-not-found\" /* AuthErrorCode.MFA_INFO_NOT_FOUND */,\n [\"MISSING_MFA_ENROLLMENT_ID\" /* ServerError.MISSING_MFA_ENROLLMENT_ID */]: \"missing-multi-factor-info\" /* AuthErrorCode.MISSING_MFA_INFO */,\n [\"MISSING_MFA_PENDING_CREDENTIAL\" /* ServerError.MISSING_MFA_PENDING_CREDENTIAL */]: \"missing-multi-factor-session\" /* AuthErrorCode.MISSING_MFA_SESSION */,\n [\"SECOND_FACTOR_EXISTS\" /* ServerError.SECOND_FACTOR_EXISTS */]: \"second-factor-already-in-use\" /* AuthErrorCode.SECOND_FACTOR_ALREADY_ENROLLED */,\n [\"SECOND_FACTOR_LIMIT_EXCEEDED\" /* ServerError.SECOND_FACTOR_LIMIT_EXCEEDED */]: \"maximum-second-factor-count-exceeded\" /* AuthErrorCode.SECOND_FACTOR_LIMIT_EXCEEDED */,\n\n // Blocking functions related errors.\n [\"BLOCKING_FUNCTION_ERROR_RESPONSE\" /* ServerError.BLOCKING_FUNCTION_ERROR_RESPONSE */]: \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */,\n\n // Recaptcha related errors.\n [\"RECAPTCHA_NOT_ENABLED\" /* ServerError.RECAPTCHA_NOT_ENABLED */]: \"recaptcha-not-enabled\" /* AuthErrorCode.RECAPTCHA_NOT_ENABLED */,\n [\"MISSING_RECAPTCHA_TOKEN\" /* ServerError.MISSING_RECAPTCHA_TOKEN */]: \"missing-recaptcha-token\" /* AuthErrorCode.MISSING_RECAPTCHA_TOKEN */,\n [\"INVALID_RECAPTCHA_TOKEN\" /* ServerError.INVALID_RECAPTCHA_TOKEN */]: \"invalid-recaptcha-token\" /* AuthErrorCode.INVALID_RECAPTCHA_TOKEN */,\n [\"INVALID_RECAPTCHA_ACTION\" /* ServerError.INVALID_RECAPTCHA_ACTION */]: \"invalid-recaptcha-action\" /* AuthErrorCode.INVALID_RECAPTCHA_ACTION */,\n [\"MISSING_CLIENT_TYPE\" /* ServerError.MISSING_CLIENT_TYPE */]: \"missing-client-type\" /* AuthErrorCode.MISSING_CLIENT_TYPE */,\n [\"MISSING_RECAPTCHA_VERSION\" /* ServerError.MISSING_RECAPTCHA_VERSION */]: \"missing-recaptcha-version\" /* AuthErrorCode.MISSING_RECAPTCHA_VERSION */,\n [\"INVALID_RECAPTCHA_VERSION\" /* ServerError.INVALID_RECAPTCHA_VERSION */]: \"invalid-recaptcha-version\" /* AuthErrorCode.INVALID_RECAPTCHA_VERSION */,\n [\"INVALID_REQ_TYPE\" /* ServerError.INVALID_REQ_TYPE */]: \"invalid-req-type\" /* AuthErrorCode.INVALID_REQ_TYPE */\n};\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst DEFAULT_API_TIMEOUT_MS = new Delay(30000, 60000);\nfunction _addTidIfNecessary(auth, request) {\n if (auth.tenantId && !request.tenantId) {\n return Object.assign(Object.assign({}, request), {\n tenantId: auth.tenantId\n });\n }\n return request;\n}\nasync function _performApiRequest(auth, method, path, request, customErrorMap = {}) {\n return _performFetchWithErrorHandling(auth, customErrorMap, async () => {\n let body = {};\n let params = {};\n if (request) {\n if (method === \"GET\" /* HttpMethod.GET */) {\n params = request;\n } else {\n body = {\n body: JSON.stringify(request)\n };\n }\n }\n const query = querystring(Object.assign({\n key: auth.config.apiKey\n }, params)).slice(1);\n const headers = await auth._getAdditionalHeaders();\n headers[\"Content-Type\" /* HttpHeader.CONTENT_TYPE */] = 'application/json';\n if (auth.languageCode) {\n headers[\"X-Firebase-Locale\" /* HttpHeader.X_FIREBASE_LOCALE */] = auth.languageCode;\n }\n return FetchProvider.fetch()(_getFinalTarget(auth, auth.config.apiHost, path, query), Object.assign({\n method,\n headers,\n referrerPolicy: 'no-referrer'\n }, body));\n });\n}\nasync function _performFetchWithErrorHandling(auth, customErrorMap, fetchFn) {\n auth._canInitEmulator = false;\n const errorMap = Object.assign(Object.assign({}, SERVER_ERROR_MAP), customErrorMap);\n try {\n const networkTimeout = new NetworkTimeout(auth);\n const response = await Promise.race([fetchFn(), networkTimeout.promise]);\n // If we've reached this point, the fetch succeeded and the networkTimeout\n // didn't throw; clear the network timeout delay so that Node won't hang\n networkTimeout.clearNetworkTimeout();\n const json = await response.json();\n if ('needConfirmation' in json) {\n throw _makeTaggedError(auth, \"account-exists-with-different-credential\" /* AuthErrorCode.NEED_CONFIRMATION */, json);\n }\n if (response.ok && !('errorMessage' in json)) {\n return json;\n } else {\n const errorMessage = response.ok ? json.errorMessage : json.error.message;\n const [serverErrorCode, serverErrorMessage] = errorMessage.split(' : ');\n if (serverErrorCode === \"FEDERATED_USER_ID_ALREADY_LINKED\" /* ServerError.FEDERATED_USER_ID_ALREADY_LINKED */) {\n throw _makeTaggedError(auth, \"credential-already-in-use\" /* AuthErrorCode.CREDENTIAL_ALREADY_IN_USE */, json);\n } else if (serverErrorCode === \"EMAIL_EXISTS\" /* ServerError.EMAIL_EXISTS */) {\n throw _makeTaggedError(auth, \"email-already-in-use\" /* AuthErrorCode.EMAIL_EXISTS */, json);\n } else if (serverErrorCode === \"USER_DISABLED\" /* ServerError.USER_DISABLED */) {\n throw _makeTaggedError(auth, \"user-disabled\" /* AuthErrorCode.USER_DISABLED */, json);\n }\n const authError = errorMap[serverErrorCode] || serverErrorCode.toLowerCase().replace(/[_\\s]+/g, '-');\n if (serverErrorMessage) {\n throw _errorWithCustomMessage(auth, authError, serverErrorMessage);\n } else {\n _fail(auth, authError);\n }\n }\n } catch (e) {\n if (e instanceof FirebaseError) {\n throw e;\n }\n // Changing this to a different error code will log user out when there is a network error\n // because we treat any error other than NETWORK_REQUEST_FAILED as token is invalid.\n // https://github.com/firebase/firebase-js-sdk/blob/4fbc73610d70be4e0852e7de63a39cb7897e8546/packages/auth/src/core/auth/auth_impl.ts#L309-L316\n _fail(auth, \"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */, {\n 'message': String(e)\n });\n }\n}\nasync function _performSignInRequest(auth, method, path, request, customErrorMap = {}) {\n const serverResponse = await _performApiRequest(auth, method, path, request, customErrorMap);\n if ('mfaPendingCredential' in serverResponse) {\n _fail(auth, \"multi-factor-auth-required\" /* AuthErrorCode.MFA_REQUIRED */, {\n _serverResponse: serverResponse\n });\n }\n return serverResponse;\n}\nfunction _getFinalTarget(auth, host, path, query) {\n const base = `${host}${path}?${query}`;\n if (!auth.config.emulator) {\n return `${auth.config.apiScheme}://${base}`;\n }\n return _emulatorUrl(auth.config, base);\n}\nfunction _parseEnforcementState(enforcementStateStr) {\n switch (enforcementStateStr) {\n case 'ENFORCE':\n return \"ENFORCE\" /* EnforcementState.ENFORCE */;\n case 'AUDIT':\n return \"AUDIT\" /* EnforcementState.AUDIT */;\n case 'OFF':\n return \"OFF\" /* EnforcementState.OFF */;\n default:\n return \"ENFORCEMENT_STATE_UNSPECIFIED\" /* EnforcementState.ENFORCEMENT_STATE_UNSPECIFIED */;\n }\n}\nclass NetworkTimeout {\n constructor(auth) {\n this.auth = auth;\n // Node timers and browser timers are fundamentally incompatible, but we\n // don't care about the value here\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.timer = null;\n this.promise = new Promise((_, reject) => {\n this.timer = setTimeout(() => {\n return reject(_createError(this.auth, \"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */));\n }, DEFAULT_API_TIMEOUT_MS.get());\n });\n }\n clearNetworkTimeout() {\n clearTimeout(this.timer);\n }\n}\nfunction _makeTaggedError(auth, code, response) {\n const errorParams = {\n appName: auth.name\n };\n if (response.email) {\n errorParams.email = response.email;\n }\n if (response.phoneNumber) {\n errorParams.phoneNumber = response.phoneNumber;\n }\n const error = _createError(auth, code, errorParams);\n // We know customData is defined on error because errorParams is defined\n error.customData._tokenResponse = response;\n return error;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction isV2(grecaptcha) {\n return grecaptcha !== undefined && grecaptcha.getResponse !== undefined;\n}\nfunction isEnterprise(grecaptcha) {\n return grecaptcha !== undefined && grecaptcha.enterprise !== undefined;\n}\nclass RecaptchaConfig {\n constructor(response) {\n /**\r\n * The reCAPTCHA site key.\r\n */\n this.siteKey = '';\n /**\r\n * The list of providers and their enablement status for reCAPTCHA Enterprise.\r\n */\n this.recaptchaEnforcementState = [];\n if (response.recaptchaKey === undefined) {\n throw new Error('recaptchaKey undefined');\n }\n // Example response.recaptchaKey: \"projects/proj123/keys/sitekey123\"\n this.siteKey = response.recaptchaKey.split('/')[3];\n this.recaptchaEnforcementState = response.recaptchaEnforcementState;\n }\n /**\r\n * Returns the reCAPTCHA Enterprise enforcement state for the given provider.\r\n *\r\n * @param providerStr - The provider whose enforcement state is to be returned.\r\n * @returns The reCAPTCHA Enterprise enforcement state for the given provider.\r\n */\n getProviderEnforcementState(providerStr) {\n if (!this.recaptchaEnforcementState || this.recaptchaEnforcementState.length === 0) {\n return null;\n }\n for (const recaptchaEnforcementState of this.recaptchaEnforcementState) {\n if (recaptchaEnforcementState.provider && recaptchaEnforcementState.provider === providerStr) {\n return _parseEnforcementState(recaptchaEnforcementState.enforcementState);\n }\n }\n return null;\n }\n /**\r\n * Returns true if the reCAPTCHA Enterprise enforcement state for the provider is set to ENFORCE or AUDIT.\r\n *\r\n * @param providerStr - The provider whose enablement state is to be returned.\r\n * @returns Whether or not reCAPTCHA Enterprise protection is enabled for the given provider.\r\n */\n isProviderEnabled(providerStr) {\n return this.getProviderEnforcementState(providerStr) === \"ENFORCE\" /* EnforcementState.ENFORCE */ || this.getProviderEnforcementState(providerStr) === \"AUDIT\" /* EnforcementState.AUDIT */;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function getRecaptchaParams(auth) {\n return (await _performApiRequest(auth, \"GET\" /* HttpMethod.GET */, \"/v1/recaptchaParams\" /* Endpoint.GET_RECAPTCHA_PARAM */)).recaptchaSiteKey || '';\n}\nasync function getRecaptchaConfig(auth, request) {\n return _performApiRequest(auth, \"GET\" /* HttpMethod.GET */, \"/v2/recaptchaConfig\" /* Endpoint.GET_RECAPTCHA_CONFIG */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function deleteAccount(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:delete\" /* Endpoint.DELETE_ACCOUNT */, request);\n}\nasync function deleteLinkedAccounts(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:update\" /* Endpoint.SET_ACCOUNT_INFO */, request);\n}\nasync function getAccountInfo(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:lookup\" /* Endpoint.GET_ACCOUNT_INFO */, request);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction utcTimestampToDateString(utcTimestamp) {\n if (!utcTimestamp) {\n return undefined;\n }\n try {\n // Convert to date object.\n const date = new Date(Number(utcTimestamp));\n // Test date is valid.\n if (!isNaN(date.getTime())) {\n // Convert to UTC date string.\n return date.toUTCString();\n }\n } catch (e) {\n // Do nothing. undefined will be returned.\n }\n return undefined;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Returns a JSON Web Token (JWT) used to identify the user to a Firebase service.\r\n *\r\n * @remarks\r\n * Returns the current token if it has not expired or if it will not expire in the next five\r\n * minutes. Otherwise, this will refresh the token and return a new one.\r\n *\r\n * @param user - The user.\r\n * @param forceRefresh - Force refresh regardless of token expiration.\r\n *\r\n * @public\r\n */\nfunction getIdToken(user, forceRefresh = false) {\n return getModularInstance(user).getIdToken(forceRefresh);\n}\n/**\r\n * Returns a deserialized JSON Web Token (JWT) used to identify the user to a Firebase service.\r\n *\r\n * @remarks\r\n * Returns the current token if it has not expired or if it will not expire in the next five\r\n * minutes. Otherwise, this will refresh the token and return a new one.\r\n *\r\n * @param user - The user.\r\n * @param forceRefresh - Force refresh regardless of token expiration.\r\n *\r\n * @public\r\n */\nasync function getIdTokenResult(user, forceRefresh = false) {\n const userInternal = getModularInstance(user);\n const token = await userInternal.getIdToken(forceRefresh);\n const claims = _parseToken(token);\n _assert(claims && claims.exp && claims.auth_time && claims.iat, userInternal.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const firebase = typeof claims.firebase === 'object' ? claims.firebase : undefined;\n const signInProvider = firebase === null || firebase === void 0 ? void 0 : firebase['sign_in_provider'];\n return {\n claims,\n token,\n authTime: utcTimestampToDateString(secondsStringToMilliseconds(claims.auth_time)),\n issuedAtTime: utcTimestampToDateString(secondsStringToMilliseconds(claims.iat)),\n expirationTime: utcTimestampToDateString(secondsStringToMilliseconds(claims.exp)),\n signInProvider: signInProvider || null,\n signInSecondFactor: (firebase === null || firebase === void 0 ? void 0 : firebase['sign_in_second_factor']) || null\n };\n}\nfunction secondsStringToMilliseconds(seconds) {\n return Number(seconds) * 1000;\n}\nfunction _parseToken(token) {\n const [algorithm, payload, signature] = token.split('.');\n if (algorithm === undefined || payload === undefined || signature === undefined) {\n _logError('JWT malformed, contained fewer than 3 sections');\n return null;\n }\n try {\n const decoded = base64Decode(payload);\n if (!decoded) {\n _logError('Failed to decode base64 JWT payload');\n return null;\n }\n return JSON.parse(decoded);\n } catch (e) {\n _logError('Caught error parsing JWT payload as JSON', e === null || e === void 0 ? void 0 : e.toString());\n return null;\n }\n}\n/**\r\n * Extract expiresIn TTL from a token by subtracting the expiration from the issuance.\r\n */\nfunction _tokenExpiresIn(token) {\n const parsedToken = _parseToken(token);\n _assert(parsedToken, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n _assert(typeof parsedToken.exp !== 'undefined', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n _assert(typeof parsedToken.iat !== 'undefined', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return Number(parsedToken.exp) - Number(parsedToken.iat);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function _logoutIfInvalidated(user, promise, bypassAuthState = false) {\n if (bypassAuthState) {\n return promise;\n }\n try {\n return await promise;\n } catch (e) {\n if (e instanceof FirebaseError && isUserInvalidated(e)) {\n if (user.auth.currentUser === user) {\n await user.auth.signOut();\n }\n }\n throw e;\n }\n}\nfunction isUserInvalidated({\n code\n}) {\n return code === `auth/${\"user-disabled\" /* AuthErrorCode.USER_DISABLED */}` || code === `auth/${\"user-token-expired\" /* AuthErrorCode.TOKEN_EXPIRED */}`;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass ProactiveRefresh {\n constructor(user) {\n this.user = user;\n this.isRunning = false;\n // Node timers and browser timers return fundamentally different types.\n // We don't actually care what the value is but TS won't accept unknown and\n // we can't cast properly in both environments.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.timerId = null;\n this.errorBackoff = 30000 /* Duration.RETRY_BACKOFF_MIN */;\n }\n _start() {\n if (this.isRunning) {\n return;\n }\n this.isRunning = true;\n this.schedule();\n }\n _stop() {\n if (!this.isRunning) {\n return;\n }\n this.isRunning = false;\n if (this.timerId !== null) {\n clearTimeout(this.timerId);\n }\n }\n getInterval(wasError) {\n var _a;\n if (wasError) {\n const interval = this.errorBackoff;\n this.errorBackoff = Math.min(this.errorBackoff * 2, 960000 /* Duration.RETRY_BACKOFF_MAX */);\n return interval;\n } else {\n // Reset the error backoff\n this.errorBackoff = 30000 /* Duration.RETRY_BACKOFF_MIN */;\n const expTime = (_a = this.user.stsTokenManager.expirationTime) !== null && _a !== void 0 ? _a : 0;\n const interval = expTime - Date.now() - 300000 /* Duration.OFFSET */;\n return Math.max(0, interval);\n }\n }\n schedule(wasError = false) {\n if (!this.isRunning) {\n // Just in case...\n return;\n }\n const interval = this.getInterval(wasError);\n this.timerId = setTimeout(async () => {\n await this.iteration();\n }, interval);\n }\n async iteration() {\n try {\n await this.user.getIdToken(true);\n } catch (e) {\n // Only retry on network errors\n if ((e === null || e === void 0 ? void 0 : e.code) === `auth/${\"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */}`) {\n this.schedule(/* wasError */true);\n }\n return;\n }\n this.schedule();\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass UserMetadata {\n constructor(createdAt, lastLoginAt) {\n this.createdAt = createdAt;\n this.lastLoginAt = lastLoginAt;\n this._initializeTime();\n }\n _initializeTime() {\n this.lastSignInTime = utcTimestampToDateString(this.lastLoginAt);\n this.creationTime = utcTimestampToDateString(this.createdAt);\n }\n _copy(metadata) {\n this.createdAt = metadata.createdAt;\n this.lastLoginAt = metadata.lastLoginAt;\n this._initializeTime();\n }\n toJSON() {\n return {\n createdAt: this.createdAt,\n lastLoginAt: this.lastLoginAt\n };\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function _reloadWithoutSaving(user) {\n var _a;\n const auth = user.auth;\n const idToken = await user.getIdToken();\n const response = await _logoutIfInvalidated(user, getAccountInfo(auth, {\n idToken\n }));\n _assert(response === null || response === void 0 ? void 0 : response.users.length, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const coreAccount = response.users[0];\n user._notifyReloadListener(coreAccount);\n const newProviderData = ((_a = coreAccount.providerUserInfo) === null || _a === void 0 ? void 0 : _a.length) ? extractProviderData(coreAccount.providerUserInfo) : [];\n const providerData = mergeProviderData(user.providerData, newProviderData);\n // Preserves the non-nonymous status of the stored user, even if no more\n // credentials (federated or email/password) are linked to the user. If\n // the user was previously anonymous, then use provider data to update.\n // On the other hand, if it was not anonymous before, it should never be\n // considered anonymous now.\n const oldIsAnonymous = user.isAnonymous;\n const newIsAnonymous = !(user.email && coreAccount.passwordHash) && !(providerData === null || providerData === void 0 ? void 0 : providerData.length);\n const isAnonymous = !oldIsAnonymous ? false : newIsAnonymous;\n const updates = {\n uid: coreAccount.localId,\n displayName: coreAccount.displayName || null,\n photoURL: coreAccount.photoUrl || null,\n email: coreAccount.email || null,\n emailVerified: coreAccount.emailVerified || false,\n phoneNumber: coreAccount.phoneNumber || null,\n tenantId: coreAccount.tenantId || null,\n providerData,\n metadata: new UserMetadata(coreAccount.createdAt, coreAccount.lastLoginAt),\n isAnonymous\n };\n Object.assign(user, updates);\n}\n/**\r\n * Reloads user account data, if signed in.\r\n *\r\n * @param user - The user.\r\n *\r\n * @public\r\n */\nasync function reload(user) {\n const userInternal = getModularInstance(user);\n await _reloadWithoutSaving(userInternal);\n // Even though the current user hasn't changed, update\n // current user will trigger a persistence update w/ the\n // new info.\n await userInternal.auth._persistUserIfCurrent(userInternal);\n userInternal.auth._notifyListenersIfCurrent(userInternal);\n}\nfunction mergeProviderData(original, newData) {\n const deduped = original.filter(o => !newData.some(n => n.providerId === o.providerId));\n return [...deduped, ...newData];\n}\nfunction extractProviderData(providers) {\n return providers.map(_a => {\n var {\n providerId\n } = _a,\n provider = __rest(_a, [\"providerId\"]);\n return {\n providerId,\n uid: provider.rawId || '',\n displayName: provider.displayName || null,\n email: provider.email || null,\n phoneNumber: provider.phoneNumber || null,\n photoURL: provider.photoUrl || null\n };\n });\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function requestStsToken(auth, refreshToken) {\n const response = await _performFetchWithErrorHandling(auth, {}, async () => {\n const body = querystring({\n 'grant_type': 'refresh_token',\n 'refresh_token': refreshToken\n }).slice(1);\n const {\n tokenApiHost,\n apiKey\n } = auth.config;\n const url = _getFinalTarget(auth, tokenApiHost, \"/v1/token\" /* Endpoint.TOKEN */, `key=${apiKey}`);\n const headers = await auth._getAdditionalHeaders();\n headers[\"Content-Type\" /* HttpHeader.CONTENT_TYPE */] = 'application/x-www-form-urlencoded';\n return FetchProvider.fetch()(url, {\n method: \"POST\" /* HttpMethod.POST */,\n headers,\n body\n });\n });\n // The response comes back in snake_case. Convert to camel:\n return {\n accessToken: response.access_token,\n expiresIn: response.expires_in,\n refreshToken: response.refresh_token\n };\n}\nasync function revokeToken(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts:revokeToken\" /* Endpoint.REVOKE_TOKEN */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * We need to mark this class as internal explicitly to exclude it in the public typings, because\r\n * it references AuthInternal which has a circular dependency with UserInternal.\r\n *\r\n * @internal\r\n */\nclass StsTokenManager {\n constructor() {\n this.refreshToken = null;\n this.accessToken = null;\n this.expirationTime = null;\n }\n get isExpired() {\n return !this.expirationTime || Date.now() > this.expirationTime - 30000 /* Buffer.TOKEN_REFRESH */;\n }\n updateFromServerResponse(response) {\n _assert(response.idToken, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n _assert(typeof response.idToken !== 'undefined', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n _assert(typeof response.refreshToken !== 'undefined', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const expiresIn = 'expiresIn' in response && typeof response.expiresIn !== 'undefined' ? Number(response.expiresIn) : _tokenExpiresIn(response.idToken);\n this.updateTokensAndExpiration(response.idToken, response.refreshToken, expiresIn);\n }\n updateFromIdToken(idToken) {\n _assert(idToken.length !== 0, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const expiresIn = _tokenExpiresIn(idToken);\n this.updateTokensAndExpiration(idToken, null, expiresIn);\n }\n async getToken(auth, forceRefresh = false) {\n if (!forceRefresh && this.accessToken && !this.isExpired) {\n return this.accessToken;\n }\n _assert(this.refreshToken, auth, \"user-token-expired\" /* AuthErrorCode.TOKEN_EXPIRED */);\n if (this.refreshToken) {\n await this.refresh(auth, this.refreshToken);\n return this.accessToken;\n }\n return null;\n }\n clearRefreshToken() {\n this.refreshToken = null;\n }\n async refresh(auth, oldToken) {\n const {\n accessToken,\n refreshToken,\n expiresIn\n } = await requestStsToken(auth, oldToken);\n this.updateTokensAndExpiration(accessToken, refreshToken, Number(expiresIn));\n }\n updateTokensAndExpiration(accessToken, refreshToken, expiresInSec) {\n this.refreshToken = refreshToken || null;\n this.accessToken = accessToken || null;\n this.expirationTime = Date.now() + expiresInSec * 1000;\n }\n static fromJSON(appName, object) {\n const {\n refreshToken,\n accessToken,\n expirationTime\n } = object;\n const manager = new StsTokenManager();\n if (refreshToken) {\n _assert(typeof refreshToken === 'string', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */, {\n appName\n });\n manager.refreshToken = refreshToken;\n }\n if (accessToken) {\n _assert(typeof accessToken === 'string', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */, {\n appName\n });\n manager.accessToken = accessToken;\n }\n if (expirationTime) {\n _assert(typeof expirationTime === 'number', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */, {\n appName\n });\n manager.expirationTime = expirationTime;\n }\n return manager;\n }\n toJSON() {\n return {\n refreshToken: this.refreshToken,\n accessToken: this.accessToken,\n expirationTime: this.expirationTime\n };\n }\n _assign(stsTokenManager) {\n this.accessToken = stsTokenManager.accessToken;\n this.refreshToken = stsTokenManager.refreshToken;\n this.expirationTime = stsTokenManager.expirationTime;\n }\n _clone() {\n return Object.assign(new StsTokenManager(), this.toJSON());\n }\n _performRefresh() {\n return debugFail('not implemented');\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction assertStringOrUndefined(assertion, appName) {\n _assert(typeof assertion === 'string' || typeof assertion === 'undefined', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */, {\n appName\n });\n}\nclass UserImpl {\n constructor(_a) {\n var {\n uid,\n auth,\n stsTokenManager\n } = _a,\n opt = __rest(_a, [\"uid\", \"auth\", \"stsTokenManager\"]);\n // For the user object, provider is always Firebase.\n this.providerId = \"firebase\" /* ProviderId.FIREBASE */;\n this.proactiveRefresh = new ProactiveRefresh(this);\n this.reloadUserInfo = null;\n this.reloadListener = null;\n this.uid = uid;\n this.auth = auth;\n this.stsTokenManager = stsTokenManager;\n this.accessToken = stsTokenManager.accessToken;\n this.displayName = opt.displayName || null;\n this.email = opt.email || null;\n this.emailVerified = opt.emailVerified || false;\n this.phoneNumber = opt.phoneNumber || null;\n this.photoURL = opt.photoURL || null;\n this.isAnonymous = opt.isAnonymous || false;\n this.tenantId = opt.tenantId || null;\n this.providerData = opt.providerData ? [...opt.providerData] : [];\n this.metadata = new UserMetadata(opt.createdAt || undefined, opt.lastLoginAt || undefined);\n }\n async getIdToken(forceRefresh) {\n const accessToken = await _logoutIfInvalidated(this, this.stsTokenManager.getToken(this.auth, forceRefresh));\n _assert(accessToken, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n if (this.accessToken !== accessToken) {\n this.accessToken = accessToken;\n await this.auth._persistUserIfCurrent(this);\n this.auth._notifyListenersIfCurrent(this);\n }\n return accessToken;\n }\n getIdTokenResult(forceRefresh) {\n return getIdTokenResult(this, forceRefresh);\n }\n reload() {\n return reload(this);\n }\n _assign(user) {\n if (this === user) {\n return;\n }\n _assert(this.uid === user.uid, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n this.displayName = user.displayName;\n this.photoURL = user.photoURL;\n this.email = user.email;\n this.emailVerified = user.emailVerified;\n this.phoneNumber = user.phoneNumber;\n this.isAnonymous = user.isAnonymous;\n this.tenantId = user.tenantId;\n this.providerData = user.providerData.map(userInfo => Object.assign({}, userInfo));\n this.metadata._copy(user.metadata);\n this.stsTokenManager._assign(user.stsTokenManager);\n }\n _clone(auth) {\n const newUser = new UserImpl(Object.assign(Object.assign({}, this), {\n auth,\n stsTokenManager: this.stsTokenManager._clone()\n }));\n newUser.metadata._copy(this.metadata);\n return newUser;\n }\n _onReload(callback) {\n // There should only ever be one listener, and that is a single instance of MultiFactorUser\n _assert(!this.reloadListener, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n this.reloadListener = callback;\n if (this.reloadUserInfo) {\n this._notifyReloadListener(this.reloadUserInfo);\n this.reloadUserInfo = null;\n }\n }\n _notifyReloadListener(userInfo) {\n if (this.reloadListener) {\n this.reloadListener(userInfo);\n } else {\n // If no listener is subscribed yet, save the result so it's available when they do subscribe\n this.reloadUserInfo = userInfo;\n }\n }\n _startProactiveRefresh() {\n this.proactiveRefresh._start();\n }\n _stopProactiveRefresh() {\n this.proactiveRefresh._stop();\n }\n async _updateTokensIfNecessary(response, reload = false) {\n let tokensRefreshed = false;\n if (response.idToken && response.idToken !== this.stsTokenManager.accessToken) {\n this.stsTokenManager.updateFromServerResponse(response);\n tokensRefreshed = true;\n }\n if (reload) {\n await _reloadWithoutSaving(this);\n }\n await this.auth._persistUserIfCurrent(this);\n if (tokensRefreshed) {\n this.auth._notifyListenersIfCurrent(this);\n }\n }\n async delete() {\n if (_isFirebaseServerApp(this.auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(this.auth));\n }\n const idToken = await this.getIdToken();\n await _logoutIfInvalidated(this, deleteAccount(this.auth, {\n idToken\n }));\n this.stsTokenManager.clearRefreshToken();\n // TODO: Determine if cancellable-promises are necessary to use in this class so that delete()\n // cancels pending actions...\n return this.auth.signOut();\n }\n toJSON() {\n return Object.assign(Object.assign({\n uid: this.uid,\n email: this.email || undefined,\n emailVerified: this.emailVerified,\n displayName: this.displayName || undefined,\n isAnonymous: this.isAnonymous,\n photoURL: this.photoURL || undefined,\n phoneNumber: this.phoneNumber || undefined,\n tenantId: this.tenantId || undefined,\n providerData: this.providerData.map(userInfo => Object.assign({}, userInfo)),\n stsTokenManager: this.stsTokenManager.toJSON(),\n // Redirect event ID must be maintained in case there is a pending\n // redirect event.\n _redirectEventId: this._redirectEventId\n }, this.metadata.toJSON()), {\n // Required for compatibility with the legacy SDK (go/firebase-auth-sdk-persistence-parsing):\n apiKey: this.auth.config.apiKey,\n appName: this.auth.name\n });\n }\n get refreshToken() {\n return this.stsTokenManager.refreshToken || '';\n }\n static _fromJSON(auth, object) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n const displayName = (_a = object.displayName) !== null && _a !== void 0 ? _a : undefined;\n const email = (_b = object.email) !== null && _b !== void 0 ? _b : undefined;\n const phoneNumber = (_c = object.phoneNumber) !== null && _c !== void 0 ? _c : undefined;\n const photoURL = (_d = object.photoURL) !== null && _d !== void 0 ? _d : undefined;\n const tenantId = (_e = object.tenantId) !== null && _e !== void 0 ? _e : undefined;\n const _redirectEventId = (_f = object._redirectEventId) !== null && _f !== void 0 ? _f : undefined;\n const createdAt = (_g = object.createdAt) !== null && _g !== void 0 ? _g : undefined;\n const lastLoginAt = (_h = object.lastLoginAt) !== null && _h !== void 0 ? _h : undefined;\n const {\n uid,\n emailVerified,\n isAnonymous,\n providerData,\n stsTokenManager: plainObjectTokenManager\n } = object;\n _assert(uid && plainObjectTokenManager, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const stsTokenManager = StsTokenManager.fromJSON(this.name, plainObjectTokenManager);\n _assert(typeof uid === 'string', auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n assertStringOrUndefined(displayName, auth.name);\n assertStringOrUndefined(email, auth.name);\n _assert(typeof emailVerified === 'boolean', auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n _assert(typeof isAnonymous === 'boolean', auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n assertStringOrUndefined(phoneNumber, auth.name);\n assertStringOrUndefined(photoURL, auth.name);\n assertStringOrUndefined(tenantId, auth.name);\n assertStringOrUndefined(_redirectEventId, auth.name);\n assertStringOrUndefined(createdAt, auth.name);\n assertStringOrUndefined(lastLoginAt, auth.name);\n const user = new UserImpl({\n uid,\n auth,\n email,\n emailVerified,\n displayName,\n isAnonymous,\n photoURL,\n phoneNumber,\n tenantId,\n stsTokenManager,\n createdAt,\n lastLoginAt\n });\n if (providerData && Array.isArray(providerData)) {\n user.providerData = providerData.map(userInfo => Object.assign({}, userInfo));\n }\n if (_redirectEventId) {\n user._redirectEventId = _redirectEventId;\n }\n return user;\n }\n /**\r\n * Initialize a User from an idToken server response\r\n * @param auth\r\n * @param idTokenResponse\r\n */\n static async _fromIdTokenResponse(auth, idTokenResponse, isAnonymous = false) {\n const stsTokenManager = new StsTokenManager();\n stsTokenManager.updateFromServerResponse(idTokenResponse);\n // Initialize the Firebase Auth user.\n const user = new UserImpl({\n uid: idTokenResponse.localId,\n auth,\n stsTokenManager,\n isAnonymous\n });\n // Updates the user info and data and resolves with a user instance.\n await _reloadWithoutSaving(user);\n return user;\n }\n /**\r\n * Initialize a User from an idToken server response\r\n * @param auth\r\n * @param idTokenResponse\r\n */\n static async _fromGetAccountInfoResponse(auth, response, idToken) {\n const coreAccount = response.users[0];\n _assert(coreAccount.localId !== undefined, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const providerData = coreAccount.providerUserInfo !== undefined ? extractProviderData(coreAccount.providerUserInfo) : [];\n const isAnonymous = !(coreAccount.email && coreAccount.passwordHash) && !(providerData === null || providerData === void 0 ? void 0 : providerData.length);\n const stsTokenManager = new StsTokenManager();\n stsTokenManager.updateFromIdToken(idToken);\n // Initialize the Firebase Auth user.\n const user = new UserImpl({\n uid: coreAccount.localId,\n auth,\n stsTokenManager,\n isAnonymous\n });\n // update the user with data from the GetAccountInfo response.\n const updates = {\n uid: coreAccount.localId,\n displayName: coreAccount.displayName || null,\n photoURL: coreAccount.photoUrl || null,\n email: coreAccount.email || null,\n emailVerified: coreAccount.emailVerified || false,\n phoneNumber: coreAccount.phoneNumber || null,\n tenantId: coreAccount.tenantId || null,\n providerData,\n metadata: new UserMetadata(coreAccount.createdAt, coreAccount.lastLoginAt),\n isAnonymous: !(coreAccount.email && coreAccount.passwordHash) && !(providerData === null || providerData === void 0 ? void 0 : providerData.length)\n };\n Object.assign(user, updates);\n return user;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst instanceCache = new Map();\nfunction _getInstance(cls) {\n debugAssert(cls instanceof Function, 'Expected a class definition');\n let instance = instanceCache.get(cls);\n if (instance) {\n debugAssert(instance instanceof cls, 'Instance stored in cache mismatched with class');\n return instance;\n }\n instance = new cls();\n instanceCache.set(cls, instance);\n return instance;\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nlet InMemoryPersistence = /*#__PURE__*/(() => {\n class InMemoryPersistence {\n constructor() {\n this.type = \"NONE\" /* PersistenceType.NONE */;\n this.storage = {};\n }\n async _isAvailable() {\n return true;\n }\n async _set(key, value) {\n this.storage[key] = value;\n }\n async _get(key) {\n const value = this.storage[key];\n return value === undefined ? null : value;\n }\n async _remove(key) {\n delete this.storage[key];\n }\n _addListener(_key, _listener) {\n // Listeners are not supported for in-memory storage since it cannot be shared across windows/workers\n return;\n }\n _removeListener(_key, _listener) {\n // Listeners are not supported for in-memory storage since it cannot be shared across windows/workers\n return;\n }\n }\n InMemoryPersistence.type = 'NONE';\n /**\r\n * An implementation of {@link Persistence} of type 'NONE'.\r\n *\r\n * @public\r\n */\n return InMemoryPersistence;\n})();\nconst inMemoryPersistence = InMemoryPersistence;\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _persistenceKeyName(key, apiKey, appName) {\n return `${\"firebase\" /* Namespace.PERSISTENCE */}:${key}:${apiKey}:${appName}`;\n}\nclass PersistenceUserManager {\n constructor(persistence, auth, userKey) {\n this.persistence = persistence;\n this.auth = auth;\n this.userKey = userKey;\n const {\n config,\n name\n } = this.auth;\n this.fullUserKey = _persistenceKeyName(this.userKey, config.apiKey, name);\n this.fullPersistenceKey = _persistenceKeyName(\"persistence\" /* KeyName.PERSISTENCE_USER */, config.apiKey, name);\n this.boundEventHandler = auth._onStorageEvent.bind(auth);\n this.persistence._addListener(this.fullUserKey, this.boundEventHandler);\n }\n setCurrentUser(user) {\n return this.persistence._set(this.fullUserKey, user.toJSON());\n }\n async getCurrentUser() {\n const blob = await this.persistence._get(this.fullUserKey);\n return blob ? UserImpl._fromJSON(this.auth, blob) : null;\n }\n removeCurrentUser() {\n return this.persistence._remove(this.fullUserKey);\n }\n savePersistenceForRedirect() {\n return this.persistence._set(this.fullPersistenceKey, this.persistence.type);\n }\n async setPersistence(newPersistence) {\n if (this.persistence === newPersistence) {\n return;\n }\n const currentUser = await this.getCurrentUser();\n await this.removeCurrentUser();\n this.persistence = newPersistence;\n if (currentUser) {\n return this.setCurrentUser(currentUser);\n }\n }\n delete() {\n this.persistence._removeListener(this.fullUserKey, this.boundEventHandler);\n }\n static async create(auth, persistenceHierarchy, userKey = \"authUser\" /* KeyName.AUTH_USER */) {\n if (!persistenceHierarchy.length) {\n return new PersistenceUserManager(_getInstance(inMemoryPersistence), auth, userKey);\n }\n // Eliminate any persistences that are not available\n const availablePersistences = (await Promise.all(persistenceHierarchy.map(async persistence => {\n if (await persistence._isAvailable()) {\n return persistence;\n }\n return undefined;\n }))).filter(persistence => persistence);\n // Fall back to the first persistence listed, or in memory if none available\n let selectedPersistence = availablePersistences[0] || _getInstance(inMemoryPersistence);\n const key = _persistenceKeyName(userKey, auth.config.apiKey, auth.name);\n // Pull out the existing user, setting the chosen persistence to that\n // persistence if the user exists.\n let userToMigrate = null;\n // Note, here we check for a user in _all_ persistences, not just the\n // ones deemed available. If we can migrate a user out of a broken\n // persistence, we will (but only if that persistence supports migration).\n for (const persistence of persistenceHierarchy) {\n try {\n const blob = await persistence._get(key);\n if (blob) {\n const user = UserImpl._fromJSON(auth, blob); // throws for unparsable blob (wrong format)\n if (persistence !== selectedPersistence) {\n userToMigrate = user;\n }\n selectedPersistence = persistence;\n break;\n }\n } catch (_a) {}\n }\n // If we find the user in a persistence that does support migration, use\n // that migration path (of only persistences that support migration)\n const migrationHierarchy = availablePersistences.filter(p => p._shouldAllowMigration);\n // If the persistence does _not_ allow migration, just finish off here\n if (!selectedPersistence._shouldAllowMigration || !migrationHierarchy.length) {\n return new PersistenceUserManager(selectedPersistence, auth, userKey);\n }\n selectedPersistence = migrationHierarchy[0];\n if (userToMigrate) {\n // This normally shouldn't throw since chosenPersistence.isAvailable() is true, but if it does\n // we'll just let it bubble to surface the error.\n await selectedPersistence._set(key, userToMigrate.toJSON());\n }\n // Attempt to clear the key in other persistences but ignore errors. This helps prevent issues\n // such as users getting stuck with a previous account after signing out and refreshing the tab.\n await Promise.all(persistenceHierarchy.map(async persistence => {\n if (persistence !== selectedPersistence) {\n try {\n await persistence._remove(key);\n } catch (_a) {}\n }\n }));\n return new PersistenceUserManager(selectedPersistence, auth, userKey);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Determine the browser for the purposes of reporting usage to the API\r\n */\nfunction _getBrowserName(userAgent) {\n const ua = userAgent.toLowerCase();\n if (ua.includes('opera/') || ua.includes('opr/') || ua.includes('opios/')) {\n return \"Opera\" /* BrowserName.OPERA */;\n } else if (_isIEMobile(ua)) {\n // Windows phone IEMobile browser.\n return \"IEMobile\" /* BrowserName.IEMOBILE */;\n } else if (ua.includes('msie') || ua.includes('trident/')) {\n return \"IE\" /* BrowserName.IE */;\n } else if (ua.includes('edge/')) {\n return \"Edge\" /* BrowserName.EDGE */;\n } else if (_isFirefox(ua)) {\n return \"Firefox\" /* BrowserName.FIREFOX */;\n } else if (ua.includes('silk/')) {\n return \"Silk\" /* BrowserName.SILK */;\n } else if (_isBlackBerry(ua)) {\n // Blackberry browser.\n return \"Blackberry\" /* BrowserName.BLACKBERRY */;\n } else if (_isWebOS(ua)) {\n // WebOS default browser.\n return \"Webos\" /* BrowserName.WEBOS */;\n } else if (_isSafari(ua)) {\n return \"Safari\" /* BrowserName.SAFARI */;\n } else if ((ua.includes('chrome/') || _isChromeIOS(ua)) && !ua.includes('edge/')) {\n return \"Chrome\" /* BrowserName.CHROME */;\n } else if (_isAndroid(ua)) {\n // Android stock browser.\n return \"Android\" /* BrowserName.ANDROID */;\n } else {\n // Most modern browsers have name/version at end of user agent string.\n const re = /([a-zA-Z\\d\\.]+)\\/[a-zA-Z\\d\\.]*$/;\n const matches = userAgent.match(re);\n if ((matches === null || matches === void 0 ? void 0 : matches.length) === 2) {\n return matches[1];\n }\n }\n return \"Other\" /* BrowserName.OTHER */;\n}\nfunction _isFirefox(ua = getUA()) {\n return /firefox\\//i.test(ua);\n}\nfunction _isSafari(userAgent = getUA()) {\n const ua = userAgent.toLowerCase();\n return ua.includes('safari/') && !ua.includes('chrome/') && !ua.includes('crios/') && !ua.includes('android');\n}\nfunction _isChromeIOS(ua = getUA()) {\n return /crios\\//i.test(ua);\n}\nfunction _isIEMobile(ua = getUA()) {\n return /iemobile/i.test(ua);\n}\nfunction _isAndroid(ua = getUA()) {\n return /android/i.test(ua);\n}\nfunction _isBlackBerry(ua = getUA()) {\n return /blackberry/i.test(ua);\n}\nfunction _isWebOS(ua = getUA()) {\n return /webos/i.test(ua);\n}\nfunction _isIOS(ua = getUA()) {\n return /iphone|ipad|ipod/i.test(ua) || /macintosh/i.test(ua) && /mobile/i.test(ua);\n}\nfunction _isIOS7Or8(ua = getUA()) {\n return /(iPad|iPhone|iPod).*OS 7_\\d/i.test(ua) || /(iPad|iPhone|iPod).*OS 8_\\d/i.test(ua);\n}\nfunction _isIOSStandalone(ua = getUA()) {\n var _a;\n return _isIOS(ua) && !!((_a = window.navigator) === null || _a === void 0 ? void 0 : _a.standalone);\n}\nfunction _isIE10() {\n return isIE() && document.documentMode === 10;\n}\nfunction _isMobileBrowser(ua = getUA()) {\n // TODO: implement getBrowserName equivalent for OS.\n return _isIOS(ua) || _isAndroid(ua) || _isWebOS(ua) || _isBlackBerry(ua) || /windows phone/i.test(ua) || _isIEMobile(ua);\n}\nfunction _isIframe() {\n try {\n // Check that the current window is not the top window.\n // If so, return true.\n return !!(window && window !== window.top);\n } catch (e) {\n return false;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/*\r\n * Determine the SDK version string\r\n */\nfunction _getClientVersion(clientPlatform, frameworks = []) {\n let reportedPlatform;\n switch (clientPlatform) {\n case \"Browser\" /* ClientPlatform.BROWSER */:\n // In a browser environment, report the browser name.\n reportedPlatform = _getBrowserName(getUA());\n break;\n case \"Worker\" /* ClientPlatform.WORKER */:\n // Technically a worker runs from a browser but we need to differentiate a\n // worker from a browser.\n // For example: Chrome-Worker/JsCore/4.9.1/FirebaseCore-web.\n reportedPlatform = `${_getBrowserName(getUA())}-${clientPlatform}`;\n break;\n default:\n reportedPlatform = clientPlatform;\n }\n const reportedFrameworks = frameworks.length ? frameworks.join(',') : 'FirebaseCore-web'; /* default value if no other framework is used */\n return `${reportedPlatform}/${\"JsCore\" /* ClientImplementation.CORE */}/${SDK_VERSION}/${reportedFrameworks}`;\n}\n\n/**\r\n * @license\r\n * Copyright 2022 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass AuthMiddlewareQueue {\n constructor(auth) {\n this.auth = auth;\n this.queue = [];\n }\n pushCallback(callback, onAbort) {\n // The callback could be sync or async. Wrap it into a\n // function that is always async.\n const wrappedCallback = user => new Promise((resolve, reject) => {\n try {\n const result = callback(user);\n // Either resolve with existing promise or wrap a non-promise\n // return value into a promise.\n resolve(result);\n } catch (e) {\n // Sync callback throws.\n reject(e);\n }\n });\n // Attach the onAbort if present\n wrappedCallback.onAbort = onAbort;\n this.queue.push(wrappedCallback);\n const index = this.queue.length - 1;\n return () => {\n // Unsubscribe. Replace with no-op. Do not remove from array, or it will disturb\n // indexing of other elements.\n this.queue[index] = () => Promise.resolve();\n };\n }\n async runMiddleware(nextUser) {\n if (this.auth.currentUser === nextUser) {\n return;\n }\n // While running the middleware, build a temporary stack of onAbort\n // callbacks to call if one middleware callback rejects.\n const onAbortStack = [];\n try {\n for (const beforeStateCallback of this.queue) {\n await beforeStateCallback(nextUser);\n // Only push the onAbort if the callback succeeds\n if (beforeStateCallback.onAbort) {\n onAbortStack.push(beforeStateCallback.onAbort);\n }\n }\n } catch (e) {\n // Run all onAbort, with separate try/catch to ignore any errors and\n // continue\n onAbortStack.reverse();\n for (const onAbort of onAbortStack) {\n try {\n onAbort();\n } catch (_) {\n /* swallow error */\n }\n }\n throw this.auth._errorFactory.create(\"login-blocked\" /* AuthErrorCode.LOGIN_BLOCKED */, {\n originalMessage: e === null || e === void 0 ? void 0 : e.message\n });\n }\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2023 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Fetches the password policy for the currently set tenant or the project if no tenant is set.\r\n *\r\n * @param auth Auth object.\r\n * @param request Password policy request.\r\n * @returns Password policy response.\r\n */\nasync function _getPasswordPolicy(auth, request = {}) {\n return _performApiRequest(auth, \"GET\" /* HttpMethod.GET */, \"/v2/passwordPolicy\" /* Endpoint.GET_PASSWORD_POLICY */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2023 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n// Minimum min password length enforced by the backend, even if no minimum length is set.\nconst MINIMUM_MIN_PASSWORD_LENGTH = 6;\n/**\r\n * Stores password policy requirements and provides password validation against the policy.\r\n *\r\n * @internal\r\n */\nclass PasswordPolicyImpl {\n constructor(response) {\n var _a, _b, _c, _d;\n // Only include custom strength options defined in the response.\n const responseOptions = response.customStrengthOptions;\n this.customStrengthOptions = {};\n // TODO: Remove once the backend is updated to include the minimum min password length instead of undefined when there is no minimum length set.\n this.customStrengthOptions.minPasswordLength = (_a = responseOptions.minPasswordLength) !== null && _a !== void 0 ? _a : MINIMUM_MIN_PASSWORD_LENGTH;\n if (responseOptions.maxPasswordLength) {\n this.customStrengthOptions.maxPasswordLength = responseOptions.maxPasswordLength;\n }\n if (responseOptions.containsLowercaseCharacter !== undefined) {\n this.customStrengthOptions.containsLowercaseLetter = responseOptions.containsLowercaseCharacter;\n }\n if (responseOptions.containsUppercaseCharacter !== undefined) {\n this.customStrengthOptions.containsUppercaseLetter = responseOptions.containsUppercaseCharacter;\n }\n if (responseOptions.containsNumericCharacter !== undefined) {\n this.customStrengthOptions.containsNumericCharacter = responseOptions.containsNumericCharacter;\n }\n if (responseOptions.containsNonAlphanumericCharacter !== undefined) {\n this.customStrengthOptions.containsNonAlphanumericCharacter = responseOptions.containsNonAlphanumericCharacter;\n }\n this.enforcementState = response.enforcementState;\n if (this.enforcementState === 'ENFORCEMENT_STATE_UNSPECIFIED') {\n this.enforcementState = 'OFF';\n }\n // Use an empty string if no non-alphanumeric characters are specified in the response.\n this.allowedNonAlphanumericCharacters = (_c = (_b = response.allowedNonAlphanumericCharacters) === null || _b === void 0 ? void 0 : _b.join('')) !== null && _c !== void 0 ? _c : '';\n this.forceUpgradeOnSignin = (_d = response.forceUpgradeOnSignin) !== null && _d !== void 0 ? _d : false;\n this.schemaVersion = response.schemaVersion;\n }\n validatePassword(password) {\n var _a, _b, _c, _d, _e, _f;\n const status = {\n isValid: true,\n passwordPolicy: this\n };\n // Check the password length and character options.\n this.validatePasswordLengthOptions(password, status);\n this.validatePasswordCharacterOptions(password, status);\n // Combine the status into single isValid property.\n status.isValid && (status.isValid = (_a = status.meetsMinPasswordLength) !== null && _a !== void 0 ? _a : true);\n status.isValid && (status.isValid = (_b = status.meetsMaxPasswordLength) !== null && _b !== void 0 ? _b : true);\n status.isValid && (status.isValid = (_c = status.containsLowercaseLetter) !== null && _c !== void 0 ? _c : true);\n status.isValid && (status.isValid = (_d = status.containsUppercaseLetter) !== null && _d !== void 0 ? _d : true);\n status.isValid && (status.isValid = (_e = status.containsNumericCharacter) !== null && _e !== void 0 ? _e : true);\n status.isValid && (status.isValid = (_f = status.containsNonAlphanumericCharacter) !== null && _f !== void 0 ? _f : true);\n return status;\n }\n /**\r\n * Validates that the password meets the length options for the policy.\r\n *\r\n * @param password Password to validate.\r\n * @param status Validation status.\r\n */\n validatePasswordLengthOptions(password, status) {\n const minPasswordLength = this.customStrengthOptions.minPasswordLength;\n const maxPasswordLength = this.customStrengthOptions.maxPasswordLength;\n if (minPasswordLength) {\n status.meetsMinPasswordLength = password.length >= minPasswordLength;\n }\n if (maxPasswordLength) {\n status.meetsMaxPasswordLength = password.length <= maxPasswordLength;\n }\n }\n /**\r\n * Validates that the password meets the character options for the policy.\r\n *\r\n * @param password Password to validate.\r\n * @param status Validation status.\r\n */\n validatePasswordCharacterOptions(password, status) {\n // Assign statuses for requirements even if the password is an empty string.\n this.updatePasswordCharacterOptionsStatuses(status, /* containsLowercaseCharacter= */false, /* containsUppercaseCharacter= */false, /* containsNumericCharacter= */false, /* containsNonAlphanumericCharacter= */false);\n let passwordChar;\n for (let i = 0; i < password.length; i++) {\n passwordChar = password.charAt(i);\n this.updatePasswordCharacterOptionsStatuses(status, /* containsLowercaseCharacter= */passwordChar >= 'a' && passwordChar <= 'z', /* containsUppercaseCharacter= */passwordChar >= 'A' && passwordChar <= 'Z', /* containsNumericCharacter= */passwordChar >= '0' && passwordChar <= '9', /* containsNonAlphanumericCharacter= */this.allowedNonAlphanumericCharacters.includes(passwordChar));\n }\n }\n /**\r\n * Updates the running validation status with the statuses for the character options.\r\n * Expected to be called each time a character is processed to update each option status\r\n * based on the current character.\r\n *\r\n * @param status Validation status.\r\n * @param containsLowercaseCharacter Whether the character is a lowercase letter.\r\n * @param containsUppercaseCharacter Whether the character is an uppercase letter.\r\n * @param containsNumericCharacter Whether the character is a numeric character.\r\n * @param containsNonAlphanumericCharacter Whether the character is a non-alphanumeric character.\r\n */\n updatePasswordCharacterOptionsStatuses(status, containsLowercaseCharacter, containsUppercaseCharacter, containsNumericCharacter, containsNonAlphanumericCharacter) {\n if (this.customStrengthOptions.containsLowercaseLetter) {\n status.containsLowercaseLetter || (status.containsLowercaseLetter = containsLowercaseCharacter);\n }\n if (this.customStrengthOptions.containsUppercaseLetter) {\n status.containsUppercaseLetter || (status.containsUppercaseLetter = containsUppercaseCharacter);\n }\n if (this.customStrengthOptions.containsNumericCharacter) {\n status.containsNumericCharacter || (status.containsNumericCharacter = containsNumericCharacter);\n }\n if (this.customStrengthOptions.containsNonAlphanumericCharacter) {\n status.containsNonAlphanumericCharacter || (status.containsNonAlphanumericCharacter = containsNonAlphanumericCharacter);\n }\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass AuthImpl {\n constructor(app, heartbeatServiceProvider, appCheckServiceProvider, config) {\n this.app = app;\n this.heartbeatServiceProvider = heartbeatServiceProvider;\n this.appCheckServiceProvider = appCheckServiceProvider;\n this.config = config;\n this.currentUser = null;\n this.emulatorConfig = null;\n this.operations = Promise.resolve();\n this.authStateSubscription = new Subscription(this);\n this.idTokenSubscription = new Subscription(this);\n this.beforeStateQueue = new AuthMiddlewareQueue(this);\n this.redirectUser = null;\n this.isProactiveRefreshEnabled = false;\n this.EXPECTED_PASSWORD_POLICY_SCHEMA_VERSION = 1;\n // Any network calls will set this to true and prevent subsequent emulator\n // initialization\n this._canInitEmulator = true;\n this._isInitialized = false;\n this._deleted = false;\n this._initializationPromise = null;\n this._popupRedirectResolver = null;\n this._errorFactory = _DEFAULT_AUTH_ERROR_FACTORY;\n this._agentRecaptchaConfig = null;\n this._tenantRecaptchaConfigs = {};\n this._projectPasswordPolicy = null;\n this._tenantPasswordPolicies = {};\n // Tracks the last notified UID for state change listeners to prevent\n // repeated calls to the callbacks. Undefined means it's never been\n // called, whereas null means it's been called with a signed out user\n this.lastNotifiedUid = undefined;\n this.languageCode = null;\n this.tenantId = null;\n this.settings = {\n appVerificationDisabledForTesting: false\n };\n this.frameworks = [];\n this.name = app.name;\n this.clientVersion = config.sdkClientVersion;\n }\n _initializeWithPersistence(persistenceHierarchy, popupRedirectResolver) {\n if (popupRedirectResolver) {\n this._popupRedirectResolver = _getInstance(popupRedirectResolver);\n }\n // Have to check for app deletion throughout initialization (after each\n // promise resolution)\n this._initializationPromise = this.queue(async () => {\n var _a, _b;\n if (this._deleted) {\n return;\n }\n this.persistenceManager = await PersistenceUserManager.create(this, persistenceHierarchy);\n if (this._deleted) {\n return;\n }\n // Initialize the resolver early if necessary (only applicable to web:\n // this will cause the iframe to load immediately in certain cases)\n if ((_a = this._popupRedirectResolver) === null || _a === void 0 ? void 0 : _a._shouldInitProactively) {\n // If this fails, don't halt auth loading\n try {\n await this._popupRedirectResolver._initialize(this);\n } catch (e) {\n /* Ignore the error */\n }\n }\n await this.initializeCurrentUser(popupRedirectResolver);\n this.lastNotifiedUid = ((_b = this.currentUser) === null || _b === void 0 ? void 0 : _b.uid) || null;\n if (this._deleted) {\n return;\n }\n this._isInitialized = true;\n });\n return this._initializationPromise;\n }\n /**\r\n * If the persistence is changed in another window, the user manager will let us know\r\n */\n async _onStorageEvent() {\n if (this._deleted) {\n return;\n }\n const user = await this.assertedPersistence.getCurrentUser();\n if (!this.currentUser && !user) {\n // No change, do nothing (was signed out and remained signed out).\n return;\n }\n // If the same user is to be synchronized.\n if (this.currentUser && user && this.currentUser.uid === user.uid) {\n // Data update, simply copy data changes.\n this._currentUser._assign(user);\n // If tokens changed from previous user tokens, this will trigger\n // notifyAuthListeners_.\n await this.currentUser.getIdToken();\n return;\n }\n // Update current Auth state. Either a new login or logout.\n // Skip blocking callbacks, they should not apply to a change in another tab.\n await this._updateCurrentUser(user, /* skipBeforeStateCallbacks */true);\n }\n async initializeCurrentUserFromIdToken(idToken) {\n try {\n const response = await getAccountInfo(this, {\n idToken\n });\n const user = await UserImpl._fromGetAccountInfoResponse(this, response, idToken);\n await this.directlySetCurrentUser(user);\n } catch (err) {\n console.warn('FirebaseServerApp could not login user with provided authIdToken: ', err);\n await this.directlySetCurrentUser(null);\n }\n }\n async initializeCurrentUser(popupRedirectResolver) {\n var _a;\n if (_isFirebaseServerApp(this.app)) {\n const idToken = this.app.settings.authIdToken;\n if (idToken) {\n // Start the auth operation in the next tick to allow a moment for the customer's app to\n // attach an emulator, if desired.\n return new Promise(resolve => {\n setTimeout(() => this.initializeCurrentUserFromIdToken(idToken).then(resolve, resolve));\n });\n } else {\n return this.directlySetCurrentUser(null);\n }\n }\n // First check to see if we have a pending redirect event.\n const previouslyStoredUser = await this.assertedPersistence.getCurrentUser();\n let futureCurrentUser = previouslyStoredUser;\n let needsTocheckMiddleware = false;\n if (popupRedirectResolver && this.config.authDomain) {\n await this.getOrInitRedirectPersistenceManager();\n const redirectUserEventId = (_a = this.redirectUser) === null || _a === void 0 ? void 0 : _a._redirectEventId;\n const storedUserEventId = futureCurrentUser === null || futureCurrentUser === void 0 ? void 0 : futureCurrentUser._redirectEventId;\n const result = await this.tryRedirectSignIn(popupRedirectResolver);\n // If the stored user (i.e. the old \"currentUser\") has a redirectId that\n // matches the redirect user, then we want to initially sign in with the\n // new user object from result.\n // TODO(samgho): More thoroughly test all of this\n if ((!redirectUserEventId || redirectUserEventId === storedUserEventId) && (result === null || result === void 0 ? void 0 : result.user)) {\n futureCurrentUser = result.user;\n needsTocheckMiddleware = true;\n }\n }\n // If no user in persistence, there is no current user. Set to null.\n if (!futureCurrentUser) {\n return this.directlySetCurrentUser(null);\n }\n if (!futureCurrentUser._redirectEventId) {\n // This isn't a redirect link operation, we can reload and bail.\n // First though, ensure that we check the middleware is happy.\n if (needsTocheckMiddleware) {\n try {\n await this.beforeStateQueue.runMiddleware(futureCurrentUser);\n } catch (e) {\n futureCurrentUser = previouslyStoredUser;\n // We know this is available since the bit is only set when the\n // resolver is available\n this._popupRedirectResolver._overrideRedirectResult(this, () => Promise.reject(e));\n }\n }\n if (futureCurrentUser) {\n return this.reloadAndSetCurrentUserOrClear(futureCurrentUser);\n } else {\n return this.directlySetCurrentUser(null);\n }\n }\n _assert(this._popupRedirectResolver, this, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n await this.getOrInitRedirectPersistenceManager();\n // If the redirect user's event ID matches the current user's event ID,\n // DO NOT reload the current user, otherwise they'll be cleared from storage.\n // This is important for the reauthenticateWithRedirect() flow.\n if (this.redirectUser && this.redirectUser._redirectEventId === futureCurrentUser._redirectEventId) {\n return this.directlySetCurrentUser(futureCurrentUser);\n }\n return this.reloadAndSetCurrentUserOrClear(futureCurrentUser);\n }\n async tryRedirectSignIn(redirectResolver) {\n // The redirect user needs to be checked (and signed in if available)\n // during auth initialization. All of the normal sign in and link/reauth\n // flows call back into auth and push things onto the promise queue. We\n // need to await the result of the redirect sign in *inside the promise\n // queue*. This presents a problem: we run into deadlock. See:\n // ┌> [Initialization] ─────┐\n // ┌> [] │\n // └─ [getRedirectResult] <─┘\n // where [] are tasks on the queue and arrows denote awaits\n // Initialization will never complete because it's waiting on something\n // that's waiting for initialization to complete!\n //\n // Instead, this method calls getRedirectResult() (stored in\n // _completeRedirectFn) with an optional parameter that instructs all of\n // the underlying auth operations to skip anything that mutates auth state.\n let result = null;\n try {\n // We know this._popupRedirectResolver is set since redirectResolver\n // is passed in. The _completeRedirectFn expects the unwrapped extern.\n result = await this._popupRedirectResolver._completeRedirectFn(this, redirectResolver, true);\n } catch (e) {\n // Swallow any errors here; the code can retrieve them in\n // getRedirectResult().\n await this._setRedirectUser(null);\n }\n return result;\n }\n async reloadAndSetCurrentUserOrClear(user) {\n try {\n await _reloadWithoutSaving(user);\n } catch (e) {\n if ((e === null || e === void 0 ? void 0 : e.code) !== `auth/${\"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */}`) {\n // Something's wrong with the user's token. Log them out and remove\n // them from storage\n return this.directlySetCurrentUser(null);\n }\n }\n return this.directlySetCurrentUser(user);\n }\n useDeviceLanguage() {\n this.languageCode = _getUserLanguage();\n }\n async _delete() {\n this._deleted = true;\n }\n async updateCurrentUser(userExtern) {\n if (_isFirebaseServerApp(this.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(this));\n }\n // The public updateCurrentUser method needs to make a copy of the user,\n // and also check that the project matches\n const user = userExtern ? getModularInstance(userExtern) : null;\n if (user) {\n _assert(user.auth.config.apiKey === this.config.apiKey, this, \"invalid-user-token\" /* AuthErrorCode.INVALID_AUTH */);\n }\n return this._updateCurrentUser(user && user._clone(this));\n }\n async _updateCurrentUser(user, skipBeforeStateCallbacks = false) {\n if (this._deleted) {\n return;\n }\n if (user) {\n _assert(this.tenantId === user.tenantId, this, \"tenant-id-mismatch\" /* AuthErrorCode.TENANT_ID_MISMATCH */);\n }\n if (!skipBeforeStateCallbacks) {\n await this.beforeStateQueue.runMiddleware(user);\n }\n return this.queue(async () => {\n await this.directlySetCurrentUser(user);\n this.notifyAuthListeners();\n });\n }\n async signOut() {\n if (_isFirebaseServerApp(this.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(this));\n }\n // Run first, to block _setRedirectUser() if any callbacks fail.\n await this.beforeStateQueue.runMiddleware(null);\n // Clear the redirect user when signOut is called\n if (this.redirectPersistenceManager || this._popupRedirectResolver) {\n await this._setRedirectUser(null);\n }\n // Prevent callbacks from being called again in _updateCurrentUser, as\n // they were already called in the first line.\n return this._updateCurrentUser(null, /* skipBeforeStateCallbacks */true);\n }\n setPersistence(persistence) {\n if (_isFirebaseServerApp(this.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(this));\n }\n return this.queue(async () => {\n await this.assertedPersistence.setPersistence(_getInstance(persistence));\n });\n }\n _getRecaptchaConfig() {\n if (this.tenantId == null) {\n return this._agentRecaptchaConfig;\n } else {\n return this._tenantRecaptchaConfigs[this.tenantId];\n }\n }\n async validatePassword(password) {\n if (!this._getPasswordPolicyInternal()) {\n await this._updatePasswordPolicy();\n }\n // Password policy will be defined after fetching.\n const passwordPolicy = this._getPasswordPolicyInternal();\n // Check that the policy schema version is supported by the SDK.\n // TODO: Update this logic to use a max supported policy schema version once we have multiple schema versions.\n if (passwordPolicy.schemaVersion !== this.EXPECTED_PASSWORD_POLICY_SCHEMA_VERSION) {\n return Promise.reject(this._errorFactory.create(\"unsupported-password-policy-schema-version\" /* AuthErrorCode.UNSUPPORTED_PASSWORD_POLICY_SCHEMA_VERSION */, {}));\n }\n return passwordPolicy.validatePassword(password);\n }\n _getPasswordPolicyInternal() {\n if (this.tenantId === null) {\n return this._projectPasswordPolicy;\n } else {\n return this._tenantPasswordPolicies[this.tenantId];\n }\n }\n async _updatePasswordPolicy() {\n const response = await _getPasswordPolicy(this);\n const passwordPolicy = new PasswordPolicyImpl(response);\n if (this.tenantId === null) {\n this._projectPasswordPolicy = passwordPolicy;\n } else {\n this._tenantPasswordPolicies[this.tenantId] = passwordPolicy;\n }\n }\n _getPersistence() {\n return this.assertedPersistence.persistence.type;\n }\n _updateErrorMap(errorMap) {\n this._errorFactory = new ErrorFactory('auth', 'Firebase', errorMap());\n }\n onAuthStateChanged(nextOrObserver, error, completed) {\n return this.registerStateListener(this.authStateSubscription, nextOrObserver, error, completed);\n }\n beforeAuthStateChanged(callback, onAbort) {\n return this.beforeStateQueue.pushCallback(callback, onAbort);\n }\n onIdTokenChanged(nextOrObserver, error, completed) {\n return this.registerStateListener(this.idTokenSubscription, nextOrObserver, error, completed);\n }\n authStateReady() {\n return new Promise((resolve, reject) => {\n if (this.currentUser) {\n resolve();\n } else {\n const unsubscribe = this.onAuthStateChanged(() => {\n unsubscribe();\n resolve();\n }, reject);\n }\n });\n }\n /**\r\n * Revokes the given access token. Currently only supports Apple OAuth access tokens.\r\n */\n async revokeAccessToken(token) {\n if (this.currentUser) {\n const idToken = await this.currentUser.getIdToken();\n // Generalize this to accept other providers once supported.\n const request = {\n providerId: 'apple.com',\n tokenType: \"ACCESS_TOKEN\" /* TokenType.ACCESS_TOKEN */,\n token,\n idToken\n };\n if (this.tenantId != null) {\n request.tenantId = this.tenantId;\n }\n await revokeToken(this, request);\n }\n }\n toJSON() {\n var _a;\n return {\n apiKey: this.config.apiKey,\n authDomain: this.config.authDomain,\n appName: this.name,\n currentUser: (_a = this._currentUser) === null || _a === void 0 ? void 0 : _a.toJSON()\n };\n }\n async _setRedirectUser(user, popupRedirectResolver) {\n const redirectManager = await this.getOrInitRedirectPersistenceManager(popupRedirectResolver);\n return user === null ? redirectManager.removeCurrentUser() : redirectManager.setCurrentUser(user);\n }\n async getOrInitRedirectPersistenceManager(popupRedirectResolver) {\n if (!this.redirectPersistenceManager) {\n const resolver = popupRedirectResolver && _getInstance(popupRedirectResolver) || this._popupRedirectResolver;\n _assert(resolver, this, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n this.redirectPersistenceManager = await PersistenceUserManager.create(this, [_getInstance(resolver._redirectPersistence)], \"redirectUser\" /* KeyName.REDIRECT_USER */);\n this.redirectUser = await this.redirectPersistenceManager.getCurrentUser();\n }\n return this.redirectPersistenceManager;\n }\n async _redirectUserForId(id) {\n var _a, _b;\n // Make sure we've cleared any pending persistence actions if we're not in\n // the initializer\n if (this._isInitialized) {\n await this.queue(async () => {});\n }\n if (((_a = this._currentUser) === null || _a === void 0 ? void 0 : _a._redirectEventId) === id) {\n return this._currentUser;\n }\n if (((_b = this.redirectUser) === null || _b === void 0 ? void 0 : _b._redirectEventId) === id) {\n return this.redirectUser;\n }\n return null;\n }\n async _persistUserIfCurrent(user) {\n if (user === this.currentUser) {\n return this.queue(async () => this.directlySetCurrentUser(user));\n }\n }\n /** Notifies listeners only if the user is current */\n _notifyListenersIfCurrent(user) {\n if (user === this.currentUser) {\n this.notifyAuthListeners();\n }\n }\n _key() {\n return `${this.config.authDomain}:${this.config.apiKey}:${this.name}`;\n }\n _startProactiveRefresh() {\n this.isProactiveRefreshEnabled = true;\n if (this.currentUser) {\n this._currentUser._startProactiveRefresh();\n }\n }\n _stopProactiveRefresh() {\n this.isProactiveRefreshEnabled = false;\n if (this.currentUser) {\n this._currentUser._stopProactiveRefresh();\n }\n }\n /** Returns the current user cast as the internal type */\n get _currentUser() {\n return this.currentUser;\n }\n notifyAuthListeners() {\n var _a, _b;\n if (!this._isInitialized) {\n return;\n }\n this.idTokenSubscription.next(this.currentUser);\n const currentUid = (_b = (_a = this.currentUser) === null || _a === void 0 ? void 0 : _a.uid) !== null && _b !== void 0 ? _b : null;\n if (this.lastNotifiedUid !== currentUid) {\n this.lastNotifiedUid = currentUid;\n this.authStateSubscription.next(this.currentUser);\n }\n }\n registerStateListener(subscription, nextOrObserver, error, completed) {\n if (this._deleted) {\n return () => {};\n }\n const cb = typeof nextOrObserver === 'function' ? nextOrObserver : nextOrObserver.next.bind(nextOrObserver);\n let isUnsubscribed = false;\n const promise = this._isInitialized ? Promise.resolve() : this._initializationPromise;\n _assert(promise, this, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n // The callback needs to be called asynchronously per the spec.\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n promise.then(() => {\n if (isUnsubscribed) {\n return;\n }\n cb(this.currentUser);\n });\n if (typeof nextOrObserver === 'function') {\n const unsubscribe = subscription.addObserver(nextOrObserver, error, completed);\n return () => {\n isUnsubscribed = true;\n unsubscribe();\n };\n } else {\n const unsubscribe = subscription.addObserver(nextOrObserver);\n return () => {\n isUnsubscribed = true;\n unsubscribe();\n };\n }\n }\n /**\r\n * Unprotected (from race conditions) method to set the current user. This\r\n * should only be called from within a queued callback. This is necessary\r\n * because the queue shouldn't rely on another queued callback.\r\n */\n async directlySetCurrentUser(user) {\n if (this.currentUser && this.currentUser !== user) {\n this._currentUser._stopProactiveRefresh();\n }\n if (user && this.isProactiveRefreshEnabled) {\n user._startProactiveRefresh();\n }\n this.currentUser = user;\n if (user) {\n await this.assertedPersistence.setCurrentUser(user);\n } else {\n await this.assertedPersistence.removeCurrentUser();\n }\n }\n queue(action) {\n // In case something errors, the callback still should be called in order\n // to keep the promise chain alive\n this.operations = this.operations.then(action, action);\n return this.operations;\n }\n get assertedPersistence() {\n _assert(this.persistenceManager, this, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return this.persistenceManager;\n }\n _logFramework(framework) {\n if (!framework || this.frameworks.includes(framework)) {\n return;\n }\n this.frameworks.push(framework);\n // Sort alphabetically so that \"FirebaseCore-web,FirebaseUI-web\" and\n // \"FirebaseUI-web,FirebaseCore-web\" aren't viewed as different.\n this.frameworks.sort();\n this.clientVersion = _getClientVersion(this.config.clientPlatform, this._getFrameworks());\n }\n _getFrameworks() {\n return this.frameworks;\n }\n async _getAdditionalHeaders() {\n var _a;\n // Additional headers on every request\n const headers = {\n [\"X-Client-Version\" /* HttpHeader.X_CLIENT_VERSION */]: this.clientVersion\n };\n if (this.app.options.appId) {\n headers[\"X-Firebase-gmpid\" /* HttpHeader.X_FIREBASE_GMPID */] = this.app.options.appId;\n }\n // If the heartbeat service exists, add the heartbeat string\n const heartbeatsHeader = await ((_a = this.heartbeatServiceProvider.getImmediate({\n optional: true\n })) === null || _a === void 0 ? void 0 : _a.getHeartbeatsHeader());\n if (heartbeatsHeader) {\n headers[\"X-Firebase-Client\" /* HttpHeader.X_FIREBASE_CLIENT */] = heartbeatsHeader;\n }\n // If the App Check service exists, add the App Check token in the headers\n const appCheckToken = await this._getAppCheckToken();\n if (appCheckToken) {\n headers[\"X-Firebase-AppCheck\" /* HttpHeader.X_FIREBASE_APP_CHECK */] = appCheckToken;\n }\n return headers;\n }\n async _getAppCheckToken() {\n var _a;\n const appCheckTokenResult = await ((_a = this.appCheckServiceProvider.getImmediate({\n optional: true\n })) === null || _a === void 0 ? void 0 : _a.getToken());\n if (appCheckTokenResult === null || appCheckTokenResult === void 0 ? void 0 : appCheckTokenResult.error) {\n // Context: appCheck.getToken() will never throw even if an error happened.\n // In the error case, a dummy token will be returned along with an error field describing\n // the error. In general, we shouldn't care about the error condition and just use\n // the token (actual or dummy) to send requests.\n _logWarn(`Error while retrieving App Check token: ${appCheckTokenResult.error}`);\n }\n return appCheckTokenResult === null || appCheckTokenResult === void 0 ? void 0 : appCheckTokenResult.token;\n }\n}\n/**\r\n * Method to be used to cast down to our private implmentation of Auth.\r\n * It will also handle unwrapping from the compat type if necessary\r\n *\r\n * @param auth Auth object passed in from developer\r\n */\nfunction _castAuth(auth) {\n return getModularInstance(auth);\n}\n/** Helper class to wrap subscriber logic */\nclass Subscription {\n constructor(auth) {\n this.auth = auth;\n this.observer = null;\n this.addObserver = createSubscribe(observer => this.observer = observer);\n }\n get next() {\n _assert(this.observer, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return this.observer.next.bind(this.observer);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nlet externalJSProvider = {\n async loadJS() {\n throw new Error('Unable to load external scripts');\n },\n recaptchaV2Script: '',\n recaptchaEnterpriseScript: '',\n gapiScript: ''\n};\nfunction _setExternalJSProvider(p) {\n externalJSProvider = p;\n}\nfunction _loadJS(url) {\n return externalJSProvider.loadJS(url);\n}\nfunction _recaptchaV2ScriptUrl() {\n return externalJSProvider.recaptchaV2Script;\n}\nfunction _recaptchaEnterpriseScriptUrl() {\n return externalJSProvider.recaptchaEnterpriseScript;\n}\nfunction _gapiScriptUrl() {\n return externalJSProvider.gapiScript;\n}\nfunction _generateCallbackName(prefix) {\n return `__${prefix}${Math.floor(Math.random() * 1000000)}`;\n}\n\n/* eslint-disable @typescript-eslint/no-require-imports */\nconst RECAPTCHA_ENTERPRISE_VERIFIER_TYPE = 'recaptcha-enterprise';\nconst FAKE_TOKEN = 'NO_RECAPTCHA';\nclass RecaptchaEnterpriseVerifier {\n /**\r\n *\r\n * @param authExtern - The corresponding Firebase {@link Auth} instance.\r\n *\r\n */\n constructor(authExtern) {\n /**\r\n * Identifies the type of application verifier (e.g. \"recaptcha-enterprise\").\r\n */\n this.type = RECAPTCHA_ENTERPRISE_VERIFIER_TYPE;\n this.auth = _castAuth(authExtern);\n }\n /**\r\n * Executes the verification process.\r\n *\r\n * @returns A Promise for a token that can be used to assert the validity of a request.\r\n */\n async verify(action = 'verify', forceRefresh = false) {\n async function retrieveSiteKey(auth) {\n if (!forceRefresh) {\n if (auth.tenantId == null && auth._agentRecaptchaConfig != null) {\n return auth._agentRecaptchaConfig.siteKey;\n }\n if (auth.tenantId != null && auth._tenantRecaptchaConfigs[auth.tenantId] !== undefined) {\n return auth._tenantRecaptchaConfigs[auth.tenantId].siteKey;\n }\n }\n return new Promise(async (resolve, reject) => {\n getRecaptchaConfig(auth, {\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */,\n version: \"RECAPTCHA_ENTERPRISE\" /* RecaptchaVersion.ENTERPRISE */\n }).then(response => {\n if (response.recaptchaKey === undefined) {\n reject(new Error('recaptcha Enterprise site key undefined'));\n } else {\n const config = new RecaptchaConfig(response);\n if (auth.tenantId == null) {\n auth._agentRecaptchaConfig = config;\n } else {\n auth._tenantRecaptchaConfigs[auth.tenantId] = config;\n }\n return resolve(config.siteKey);\n }\n }).catch(error => {\n reject(error);\n });\n });\n }\n function retrieveRecaptchaToken(siteKey, resolve, reject) {\n const grecaptcha = window.grecaptcha;\n if (isEnterprise(grecaptcha)) {\n grecaptcha.enterprise.ready(() => {\n grecaptcha.enterprise.execute(siteKey, {\n action\n }).then(token => {\n resolve(token);\n }).catch(() => {\n resolve(FAKE_TOKEN);\n });\n });\n } else {\n reject(Error('No reCAPTCHA enterprise script loaded.'));\n }\n }\n return new Promise((resolve, reject) => {\n retrieveSiteKey(this.auth).then(siteKey => {\n if (!forceRefresh && isEnterprise(window.grecaptcha)) {\n retrieveRecaptchaToken(siteKey, resolve, reject);\n } else {\n if (typeof window === 'undefined') {\n reject(new Error('RecaptchaVerifier is only supported in browser'));\n return;\n }\n let url = _recaptchaEnterpriseScriptUrl();\n if (url.length !== 0) {\n url += siteKey;\n }\n _loadJS(url).then(() => {\n retrieveRecaptchaToken(siteKey, resolve, reject);\n }).catch(error => {\n reject(error);\n });\n }\n }).catch(error => {\n reject(error);\n });\n });\n }\n}\nasync function injectRecaptchaFields(auth, request, action, captchaResp = false) {\n const verifier = new RecaptchaEnterpriseVerifier(auth);\n let captchaResponse;\n try {\n captchaResponse = await verifier.verify(action);\n } catch (error) {\n captchaResponse = await verifier.verify(action, true);\n }\n const newRequest = Object.assign({}, request);\n if (!captchaResp) {\n Object.assign(newRequest, {\n captchaResponse\n });\n } else {\n Object.assign(newRequest, {\n 'captchaResp': captchaResponse\n });\n }\n Object.assign(newRequest, {\n 'clientType': \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */\n });\n Object.assign(newRequest, {\n 'recaptchaVersion': \"RECAPTCHA_ENTERPRISE\" /* RecaptchaVersion.ENTERPRISE */\n });\n return newRequest;\n}\nasync function handleRecaptchaFlow(authInstance, request, actionName, actionMethod) {\n var _a;\n if ((_a = authInstance._getRecaptchaConfig()) === null || _a === void 0 ? void 0 : _a.isProviderEnabled(\"EMAIL_PASSWORD_PROVIDER\" /* RecaptchaProvider.EMAIL_PASSWORD_PROVIDER */)) {\n const requestWithRecaptcha = await injectRecaptchaFields(authInstance, request, actionName, actionName === \"getOobCode\" /* RecaptchaActionName.GET_OOB_CODE */);\n return actionMethod(authInstance, requestWithRecaptcha);\n } else {\n return actionMethod(authInstance, request).catch(async error => {\n if (error.code === `auth/${\"missing-recaptcha-token\" /* AuthErrorCode.MISSING_RECAPTCHA_TOKEN */}`) {\n console.log(`${actionName} is protected by reCAPTCHA Enterprise for this project. Automatically triggering the reCAPTCHA flow and restarting the flow.`);\n const requestWithRecaptcha = await injectRecaptchaFields(authInstance, request, actionName, actionName === \"getOobCode\" /* RecaptchaActionName.GET_OOB_CODE */);\n return actionMethod(authInstance, requestWithRecaptcha);\n } else {\n return Promise.reject(error);\n }\n });\n }\n}\nasync function _initializeRecaptchaConfig(auth) {\n const authInternal = _castAuth(auth);\n const response = await getRecaptchaConfig(authInternal, {\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */,\n version: \"RECAPTCHA_ENTERPRISE\" /* RecaptchaVersion.ENTERPRISE */\n });\n const config = new RecaptchaConfig(response);\n if (authInternal.tenantId == null) {\n authInternal._agentRecaptchaConfig = config;\n } else {\n authInternal._tenantRecaptchaConfigs[authInternal.tenantId] = config;\n }\n if (config.isProviderEnabled(\"EMAIL_PASSWORD_PROVIDER\" /* RecaptchaProvider.EMAIL_PASSWORD_PROVIDER */)) {\n const verifier = new RecaptchaEnterpriseVerifier(authInternal);\n void verifier.verify();\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Initializes an {@link Auth} instance with fine-grained control over\r\n * {@link Dependencies}.\r\n *\r\n * @remarks\r\n *\r\n * This function allows more control over the {@link Auth} instance than\r\n * {@link getAuth}. `getAuth` uses platform-specific defaults to supply\r\n * the {@link Dependencies}. In general, `getAuth` is the easiest way to\r\n * initialize Auth and works for most use cases. Use `initializeAuth` if you\r\n * need control over which persistence layer is used, or to minimize bundle\r\n * size if you're not using either `signInWithPopup` or `signInWithRedirect`.\r\n *\r\n * For example, if your app only uses anonymous accounts and you only want\r\n * accounts saved for the current session, initialize `Auth` with:\r\n *\r\n * ```js\r\n * const auth = initializeAuth(app, {\r\n * persistence: browserSessionPersistence,\r\n * popupRedirectResolver: undefined,\r\n * });\r\n * ```\r\n *\r\n * @public\r\n */\nfunction initializeAuth(app, deps) {\n const provider = _getProvider(app, 'auth');\n if (provider.isInitialized()) {\n const auth = provider.getImmediate();\n const initialOptions = provider.getOptions();\n if (deepEqual(initialOptions, deps !== null && deps !== void 0 ? deps : {})) {\n return auth;\n } else {\n _fail(auth, \"already-initialized\" /* AuthErrorCode.ALREADY_INITIALIZED */);\n }\n }\n const auth = provider.initialize({\n options: deps\n });\n return auth;\n}\nfunction _initializeAuthInstance(auth, deps) {\n const persistence = (deps === null || deps === void 0 ? void 0 : deps.persistence) || [];\n const hierarchy = (Array.isArray(persistence) ? persistence : [persistence]).map(_getInstance);\n if (deps === null || deps === void 0 ? void 0 : deps.errorMap) {\n auth._updateErrorMap(deps.errorMap);\n }\n // This promise is intended to float; auth initialization happens in the\n // background, meanwhile the auth object may be used by the app.\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n auth._initializeWithPersistence(hierarchy, deps === null || deps === void 0 ? void 0 : deps.popupRedirectResolver);\n}\n\n/**\r\n * Changes the {@link Auth} instance to communicate with the Firebase Auth Emulator, instead of production\r\n * Firebase Auth services.\r\n *\r\n * @remarks\r\n * This must be called synchronously immediately following the first call to\r\n * {@link initializeAuth}. Do not use with production credentials as emulator\r\n * traffic is not encrypted.\r\n *\r\n *\r\n * @example\r\n * ```javascript\r\n * connectAuthEmulator(auth, 'http://127.0.0.1:9099', { disableWarnings: true });\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param url - The URL at which the emulator is running (eg, 'http://localhost:9099').\r\n * @param options - Optional. `options.disableWarnings` defaults to `false`. Set it to\r\n * `true` to disable the warning banner attached to the DOM.\r\n *\r\n * @public\r\n */\nfunction connectAuthEmulator(auth, url, options) {\n const authInternal = _castAuth(auth);\n _assert(authInternal._canInitEmulator, authInternal, \"emulator-config-failed\" /* AuthErrorCode.EMULATOR_CONFIG_FAILED */);\n _assert(/^https?:\\/\\//.test(url), authInternal, \"invalid-emulator-scheme\" /* AuthErrorCode.INVALID_EMULATOR_SCHEME */);\n const disableWarnings = !!(options === null || options === void 0 ? void 0 : options.disableWarnings);\n const protocol = extractProtocol(url);\n const {\n host,\n port\n } = extractHostAndPort(url);\n const portStr = port === null ? '' : `:${port}`;\n // Always replace path with \"/\" (even if input url had no path at all, or had a different one).\n authInternal.config.emulator = {\n url: `${protocol}//${host}${portStr}/`\n };\n authInternal.settings.appVerificationDisabledForTesting = true;\n authInternal.emulatorConfig = Object.freeze({\n host,\n port,\n protocol: protocol.replace(':', ''),\n options: Object.freeze({\n disableWarnings\n })\n });\n if (!disableWarnings) {\n emitEmulatorWarning();\n }\n}\nfunction extractProtocol(url) {\n const protocolEnd = url.indexOf(':');\n return protocolEnd < 0 ? '' : url.substr(0, protocolEnd + 1);\n}\nfunction extractHostAndPort(url) {\n const protocol = extractProtocol(url);\n const authority = /(\\/\\/)?([^?#/]+)/.exec(url.substr(protocol.length)); // Between // and /, ? or #.\n if (!authority) {\n return {\n host: '',\n port: null\n };\n }\n const hostAndPort = authority[2].split('@').pop() || ''; // Strip out \"username:password@\".\n const bracketedIPv6 = /^(\\[[^\\]]+\\])(:|$)/.exec(hostAndPort);\n if (bracketedIPv6) {\n const host = bracketedIPv6[1];\n return {\n host,\n port: parsePort(hostAndPort.substr(host.length + 1))\n };\n } else {\n const [host, port] = hostAndPort.split(':');\n return {\n host,\n port: parsePort(port)\n };\n }\n}\nfunction parsePort(portStr) {\n if (!portStr) {\n return null;\n }\n const port = Number(portStr);\n if (isNaN(port)) {\n return null;\n }\n return port;\n}\nfunction emitEmulatorWarning() {\n function attachBanner() {\n const el = document.createElement('p');\n const sty = el.style;\n el.innerText = 'Running in emulator mode. Do not use with production credentials.';\n sty.position = 'fixed';\n sty.width = '100%';\n sty.backgroundColor = '#ffffff';\n sty.border = '.1em solid #000000';\n sty.color = '#b50000';\n sty.bottom = '0px';\n sty.left = '0px';\n sty.margin = '0px';\n sty.zIndex = '10000';\n sty.textAlign = 'center';\n el.classList.add('firebase-emulator-warning');\n document.body.appendChild(el);\n }\n if (typeof console !== 'undefined' && typeof console.info === 'function') {\n console.info('WARNING: You are using the Auth Emulator,' + ' which is intended for local testing only. Do not use with' + ' production credentials.');\n }\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n if (document.readyState === 'loading') {\n window.addEventListener('DOMContentLoaded', attachBanner);\n } else {\n attachBanner();\n }\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Interface that represents the credentials returned by an {@link AuthProvider}.\r\n *\r\n * @remarks\r\n * Implementations specify the details about each auth provider's credential requirements.\r\n *\r\n * @public\r\n */\nclass AuthCredential {\n /** @internal */\n constructor(\n /**\r\n * The authentication provider ID for the credential.\r\n *\r\n * @remarks\r\n * For example, 'facebook.com', or 'google.com'.\r\n */\n providerId,\n /**\r\n * The authentication sign in method for the credential.\r\n *\r\n * @remarks\r\n * For example, {@link SignInMethod}.EMAIL_PASSWORD, or\r\n * {@link SignInMethod}.EMAIL_LINK. This corresponds to the sign-in method\r\n * identifier as returned in {@link fetchSignInMethodsForEmail}.\r\n */\n signInMethod) {\n this.providerId = providerId;\n this.signInMethod = signInMethod;\n }\n /**\r\n * Returns a JSON-serializable representation of this object.\r\n *\r\n * @returns a JSON-serializable representation of this object.\r\n */\n toJSON() {\n return debugFail('not implemented');\n }\n /** @internal */\n _getIdTokenResponse(_auth) {\n return debugFail('not implemented');\n }\n /** @internal */\n _linkToIdToken(_auth, _idToken) {\n return debugFail('not implemented');\n }\n /** @internal */\n _getReauthenticationResolver(_auth) {\n return debugFail('not implemented');\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function resetPassword(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:resetPassword\" /* Endpoint.RESET_PASSWORD */, _addTidIfNecessary(auth, request));\n}\nasync function updateEmailPassword(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:update\" /* Endpoint.SET_ACCOUNT_INFO */, request);\n}\n// Used for linking an email/password account to an existing idToken. Uses the same request/response\n// format as updateEmailPassword.\nasync function linkEmailPassword(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signUp\" /* Endpoint.SIGN_UP */, request);\n}\nasync function applyActionCode$1(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:update\" /* Endpoint.SET_ACCOUNT_INFO */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function signInWithPassword(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithPassword\" /* Endpoint.SIGN_IN_WITH_PASSWORD */, _addTidIfNecessary(auth, request));\n}\nasync function sendOobCode(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:sendOobCode\" /* Endpoint.SEND_OOB_CODE */, _addTidIfNecessary(auth, request));\n}\nasync function sendEmailVerification$1(auth, request) {\n return sendOobCode(auth, request);\n}\nasync function sendPasswordResetEmail$1(auth, request) {\n return sendOobCode(auth, request);\n}\nasync function sendSignInLinkToEmail$1(auth, request) {\n return sendOobCode(auth, request);\n}\nasync function verifyAndChangeEmail(auth, request) {\n return sendOobCode(auth, request);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function signInWithEmailLink$1(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithEmailLink\" /* Endpoint.SIGN_IN_WITH_EMAIL_LINK */, _addTidIfNecessary(auth, request));\n}\nasync function signInWithEmailLinkForLinking(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithEmailLink\" /* Endpoint.SIGN_IN_WITH_EMAIL_LINK */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Interface that represents the credentials returned by {@link EmailAuthProvider} for\r\n * {@link ProviderId}.PASSWORD\r\n *\r\n * @remarks\r\n * Covers both {@link SignInMethod}.EMAIL_PASSWORD and\r\n * {@link SignInMethod}.EMAIL_LINK.\r\n *\r\n * @public\r\n */\nclass EmailAuthCredential extends AuthCredential {\n /** @internal */\n constructor(/** @internal */\n _email, /** @internal */\n _password, signInMethod, /** @internal */\n _tenantId = null) {\n super(\"password\" /* ProviderId.PASSWORD */, signInMethod);\n this._email = _email;\n this._password = _password;\n this._tenantId = _tenantId;\n }\n /** @internal */\n static _fromEmailAndPassword(email, password) {\n return new EmailAuthCredential(email, password, \"password\" /* SignInMethod.EMAIL_PASSWORD */);\n }\n /** @internal */\n static _fromEmailAndCode(email, oobCode, tenantId = null) {\n return new EmailAuthCredential(email, oobCode, \"emailLink\" /* SignInMethod.EMAIL_LINK */, tenantId);\n }\n /** {@inheritdoc AuthCredential.toJSON} */\n toJSON() {\n return {\n email: this._email,\n password: this._password,\n signInMethod: this.signInMethod,\n tenantId: this._tenantId\n };\n }\n /**\r\n * Static method to deserialize a JSON representation of an object into an {@link AuthCredential}.\r\n *\r\n * @param json - Either `object` or the stringified representation of the object. When string is\r\n * provided, `JSON.parse` would be called first.\r\n *\r\n * @returns If the JSON input does not represent an {@link AuthCredential}, null is returned.\r\n */\n static fromJSON(json) {\n const obj = typeof json === 'string' ? JSON.parse(json) : json;\n if ((obj === null || obj === void 0 ? void 0 : obj.email) && (obj === null || obj === void 0 ? void 0 : obj.password)) {\n if (obj.signInMethod === \"password\" /* SignInMethod.EMAIL_PASSWORD */) {\n return this._fromEmailAndPassword(obj.email, obj.password);\n } else if (obj.signInMethod === \"emailLink\" /* SignInMethod.EMAIL_LINK */) {\n return this._fromEmailAndCode(obj.email, obj.password, obj.tenantId);\n }\n }\n return null;\n }\n /** @internal */\n async _getIdTokenResponse(auth) {\n switch (this.signInMethod) {\n case \"password\" /* SignInMethod.EMAIL_PASSWORD */:\n const request = {\n returnSecureToken: true,\n email: this._email,\n password: this._password,\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */\n };\n return handleRecaptchaFlow(auth, request, \"signInWithPassword\" /* RecaptchaActionName.SIGN_IN_WITH_PASSWORD */, signInWithPassword);\n case \"emailLink\" /* SignInMethod.EMAIL_LINK */:\n return signInWithEmailLink$1(auth, {\n email: this._email,\n oobCode: this._password\n });\n default:\n _fail(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n }\n /** @internal */\n async _linkToIdToken(auth, idToken) {\n switch (this.signInMethod) {\n case \"password\" /* SignInMethod.EMAIL_PASSWORD */:\n const request = {\n idToken,\n returnSecureToken: true,\n email: this._email,\n password: this._password,\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */\n };\n return handleRecaptchaFlow(auth, request, \"signUpPassword\" /* RecaptchaActionName.SIGN_UP_PASSWORD */, linkEmailPassword);\n case \"emailLink\" /* SignInMethod.EMAIL_LINK */:\n return signInWithEmailLinkForLinking(auth, {\n idToken,\n email: this._email,\n oobCode: this._password\n });\n default:\n _fail(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n }\n /** @internal */\n _getReauthenticationResolver(auth) {\n return this._getIdTokenResponse(auth);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function signInWithIdp(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithIdp\" /* Endpoint.SIGN_IN_WITH_IDP */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst IDP_REQUEST_URI$1 = 'http://localhost';\n/**\r\n * Represents the OAuth credentials returned by an {@link OAuthProvider}.\r\n *\r\n * @remarks\r\n * Implementations specify the details about each auth provider's credential requirements.\r\n *\r\n * @public\r\n */\nclass OAuthCredential extends AuthCredential {\n constructor() {\n super(...arguments);\n this.pendingToken = null;\n }\n /** @internal */\n static _fromParams(params) {\n const cred = new OAuthCredential(params.providerId, params.signInMethod);\n if (params.idToken || params.accessToken) {\n // OAuth 2 and either ID token or access token.\n if (params.idToken) {\n cred.idToken = params.idToken;\n }\n if (params.accessToken) {\n cred.accessToken = params.accessToken;\n }\n // Add nonce if available and no pendingToken is present.\n if (params.nonce && !params.pendingToken) {\n cred.nonce = params.nonce;\n }\n if (params.pendingToken) {\n cred.pendingToken = params.pendingToken;\n }\n } else if (params.oauthToken && params.oauthTokenSecret) {\n // OAuth 1 and OAuth token with token secret\n cred.accessToken = params.oauthToken;\n cred.secret = params.oauthTokenSecret;\n } else {\n _fail(\"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n }\n return cred;\n }\n /** {@inheritdoc AuthCredential.toJSON} */\n toJSON() {\n return {\n idToken: this.idToken,\n accessToken: this.accessToken,\n secret: this.secret,\n nonce: this.nonce,\n pendingToken: this.pendingToken,\n providerId: this.providerId,\n signInMethod: this.signInMethod\n };\n }\n /**\r\n * Static method to deserialize a JSON representation of an object into an\r\n * {@link AuthCredential}.\r\n *\r\n * @param json - Input can be either Object or the stringified representation of the object.\r\n * When string is provided, JSON.parse would be called first.\r\n *\r\n * @returns If the JSON input does not represent an {@link AuthCredential}, null is returned.\r\n */\n static fromJSON(json) {\n const obj = typeof json === 'string' ? JSON.parse(json) : json;\n const {\n providerId,\n signInMethod\n } = obj,\n rest = __rest(obj, [\"providerId\", \"signInMethod\"]);\n if (!providerId || !signInMethod) {\n return null;\n }\n const cred = new OAuthCredential(providerId, signInMethod);\n cred.idToken = rest.idToken || undefined;\n cred.accessToken = rest.accessToken || undefined;\n cred.secret = rest.secret;\n cred.nonce = rest.nonce;\n cred.pendingToken = rest.pendingToken || null;\n return cred;\n }\n /** @internal */\n _getIdTokenResponse(auth) {\n const request = this.buildRequest();\n return signInWithIdp(auth, request);\n }\n /** @internal */\n _linkToIdToken(auth, idToken) {\n const request = this.buildRequest();\n request.idToken = idToken;\n return signInWithIdp(auth, request);\n }\n /** @internal */\n _getReauthenticationResolver(auth) {\n const request = this.buildRequest();\n request.autoCreate = false;\n return signInWithIdp(auth, request);\n }\n buildRequest() {\n const request = {\n requestUri: IDP_REQUEST_URI$1,\n returnSecureToken: true\n };\n if (this.pendingToken) {\n request.pendingToken = this.pendingToken;\n } else {\n const postBody = {};\n if (this.idToken) {\n postBody['id_token'] = this.idToken;\n }\n if (this.accessToken) {\n postBody['access_token'] = this.accessToken;\n }\n if (this.secret) {\n postBody['oauth_token_secret'] = this.secret;\n }\n postBody['providerId'] = this.providerId;\n if (this.nonce && !this.pendingToken) {\n postBody['nonce'] = this.nonce;\n }\n request.postBody = querystring(postBody);\n }\n return request;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function sendPhoneVerificationCode(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:sendVerificationCode\" /* Endpoint.SEND_VERIFICATION_CODE */, _addTidIfNecessary(auth, request));\n}\nasync function signInWithPhoneNumber$1(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithPhoneNumber\" /* Endpoint.SIGN_IN_WITH_PHONE_NUMBER */, _addTidIfNecessary(auth, request));\n}\nasync function linkWithPhoneNumber$1(auth, request) {\n const response = await _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithPhoneNumber\" /* Endpoint.SIGN_IN_WITH_PHONE_NUMBER */, _addTidIfNecessary(auth, request));\n if (response.temporaryProof) {\n throw _makeTaggedError(auth, \"account-exists-with-different-credential\" /* AuthErrorCode.NEED_CONFIRMATION */, response);\n }\n return response;\n}\nconst VERIFY_PHONE_NUMBER_FOR_EXISTING_ERROR_MAP_ = {\n [\"USER_NOT_FOUND\" /* ServerError.USER_NOT_FOUND */]: \"user-not-found\" /* AuthErrorCode.USER_DELETED */\n};\nasync function verifyPhoneNumberForExisting(auth, request) {\n const apiRequest = Object.assign(Object.assign({}, request), {\n operation: 'REAUTH'\n });\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithPhoneNumber\" /* Endpoint.SIGN_IN_WITH_PHONE_NUMBER */, _addTidIfNecessary(auth, apiRequest), VERIFY_PHONE_NUMBER_FOR_EXISTING_ERROR_MAP_);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Represents the credentials returned by {@link PhoneAuthProvider}.\r\n *\r\n * @public\r\n */\nclass PhoneAuthCredential extends AuthCredential {\n constructor(params) {\n super(\"phone\" /* ProviderId.PHONE */, \"phone\" /* SignInMethod.PHONE */);\n this.params = params;\n }\n /** @internal */\n static _fromVerification(verificationId, verificationCode) {\n return new PhoneAuthCredential({\n verificationId,\n verificationCode\n });\n }\n /** @internal */\n static _fromTokenResponse(phoneNumber, temporaryProof) {\n return new PhoneAuthCredential({\n phoneNumber,\n temporaryProof\n });\n }\n /** @internal */\n _getIdTokenResponse(auth) {\n return signInWithPhoneNumber$1(auth, this._makeVerificationRequest());\n }\n /** @internal */\n _linkToIdToken(auth, idToken) {\n return linkWithPhoneNumber$1(auth, Object.assign({\n idToken\n }, this._makeVerificationRequest()));\n }\n /** @internal */\n _getReauthenticationResolver(auth) {\n return verifyPhoneNumberForExisting(auth, this._makeVerificationRequest());\n }\n /** @internal */\n _makeVerificationRequest() {\n const {\n temporaryProof,\n phoneNumber,\n verificationId,\n verificationCode\n } = this.params;\n if (temporaryProof && phoneNumber) {\n return {\n temporaryProof,\n phoneNumber\n };\n }\n return {\n sessionInfo: verificationId,\n code: verificationCode\n };\n }\n /** {@inheritdoc AuthCredential.toJSON} */\n toJSON() {\n const obj = {\n providerId: this.providerId\n };\n if (this.params.phoneNumber) {\n obj.phoneNumber = this.params.phoneNumber;\n }\n if (this.params.temporaryProof) {\n obj.temporaryProof = this.params.temporaryProof;\n }\n if (this.params.verificationCode) {\n obj.verificationCode = this.params.verificationCode;\n }\n if (this.params.verificationId) {\n obj.verificationId = this.params.verificationId;\n }\n return obj;\n }\n /** Generates a phone credential based on a plain object or a JSON string. */\n static fromJSON(json) {\n if (typeof json === 'string') {\n json = JSON.parse(json);\n }\n const {\n verificationId,\n verificationCode,\n phoneNumber,\n temporaryProof\n } = json;\n if (!verificationCode && !verificationId && !phoneNumber && !temporaryProof) {\n return null;\n }\n return new PhoneAuthCredential({\n verificationId,\n verificationCode,\n phoneNumber,\n temporaryProof\n });\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Maps the mode string in action code URL to Action Code Info operation.\r\n *\r\n * @param mode\r\n */\nfunction parseMode(mode) {\n switch (mode) {\n case 'recoverEmail':\n return \"RECOVER_EMAIL\" /* ActionCodeOperation.RECOVER_EMAIL */;\n case 'resetPassword':\n return \"PASSWORD_RESET\" /* ActionCodeOperation.PASSWORD_RESET */;\n case 'signIn':\n return \"EMAIL_SIGNIN\" /* ActionCodeOperation.EMAIL_SIGNIN */;\n case 'verifyEmail':\n return \"VERIFY_EMAIL\" /* ActionCodeOperation.VERIFY_EMAIL */;\n case 'verifyAndChangeEmail':\n return \"VERIFY_AND_CHANGE_EMAIL\" /* ActionCodeOperation.VERIFY_AND_CHANGE_EMAIL */;\n case 'revertSecondFactorAddition':\n return \"REVERT_SECOND_FACTOR_ADDITION\" /* ActionCodeOperation.REVERT_SECOND_FACTOR_ADDITION */;\n default:\n return null;\n }\n}\n/**\r\n * Helper to parse FDL links\r\n *\r\n * @param url\r\n */\nfunction parseDeepLink(url) {\n const link = querystringDecode(extractQuerystring(url))['link'];\n // Double link case (automatic redirect).\n const doubleDeepLink = link ? querystringDecode(extractQuerystring(link))['deep_link_id'] : null;\n // iOS custom scheme links.\n const iOSDeepLink = querystringDecode(extractQuerystring(url))['deep_link_id'];\n const iOSDoubleDeepLink = iOSDeepLink ? querystringDecode(extractQuerystring(iOSDeepLink))['link'] : null;\n return iOSDoubleDeepLink || iOSDeepLink || doubleDeepLink || link || url;\n}\n/**\r\n * A utility class to parse email action URLs such as password reset, email verification,\r\n * email link sign in, etc.\r\n *\r\n * @public\r\n */\nclass ActionCodeURL {\n /**\r\n * @param actionLink - The link from which to extract the URL.\r\n * @returns The {@link ActionCodeURL} object, or null if the link is invalid.\r\n *\r\n * @internal\r\n */\n constructor(actionLink) {\n var _a, _b, _c, _d, _e, _f;\n const searchParams = querystringDecode(extractQuerystring(actionLink));\n const apiKey = (_a = searchParams[\"apiKey\" /* QueryField.API_KEY */]) !== null && _a !== void 0 ? _a : null;\n const code = (_b = searchParams[\"oobCode\" /* QueryField.CODE */]) !== null && _b !== void 0 ? _b : null;\n const operation = parseMode((_c = searchParams[\"mode\" /* QueryField.MODE */]) !== null && _c !== void 0 ? _c : null);\n // Validate API key, code and mode.\n _assert(apiKey && code && operation, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n this.apiKey = apiKey;\n this.operation = operation;\n this.code = code;\n this.continueUrl = (_d = searchParams[\"continueUrl\" /* QueryField.CONTINUE_URL */]) !== null && _d !== void 0 ? _d : null;\n this.languageCode = (_e = searchParams[\"languageCode\" /* QueryField.LANGUAGE_CODE */]) !== null && _e !== void 0 ? _e : null;\n this.tenantId = (_f = searchParams[\"tenantId\" /* QueryField.TENANT_ID */]) !== null && _f !== void 0 ? _f : null;\n }\n /**\r\n * Parses the email action link string and returns an {@link ActionCodeURL} if the link is valid,\r\n * otherwise returns null.\r\n *\r\n * @param link - The email action link string.\r\n * @returns The {@link ActionCodeURL} object, or null if the link is invalid.\r\n *\r\n * @public\r\n */\n static parseLink(link) {\n const actionLink = parseDeepLink(link);\n try {\n return new ActionCodeURL(actionLink);\n } catch (_a) {\n return null;\n }\n }\n}\n/**\r\n * Parses the email action link string and returns an {@link ActionCodeURL} if\r\n * the link is valid, otherwise returns null.\r\n *\r\n * @public\r\n */\nfunction parseActionCodeURL(link) {\n return ActionCodeURL.parseLink(link);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Provider for generating {@link EmailAuthCredential}.\r\n *\r\n * @public\r\n */\nlet EmailAuthProvider = /*#__PURE__*/(() => {\n class EmailAuthProvider {\n constructor() {\n /**\r\n * Always set to {@link ProviderId}.PASSWORD, even for email link.\r\n */\n this.providerId = EmailAuthProvider.PROVIDER_ID;\n }\n /**\r\n * Initialize an {@link AuthCredential} using an email and password.\r\n *\r\n * @example\r\n * ```javascript\r\n * const authCredential = EmailAuthProvider.credential(email, password);\r\n * const userCredential = await signInWithCredential(auth, authCredential);\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * const userCredential = await signInWithEmailAndPassword(auth, email, password);\r\n * ```\r\n *\r\n * @param email - Email address.\r\n * @param password - User account password.\r\n * @returns The auth provider credential.\r\n */\n static credential(email, password) {\n return EmailAuthCredential._fromEmailAndPassword(email, password);\n }\n /**\r\n * Initialize an {@link AuthCredential} using an email and an email link after a sign in with\r\n * email link operation.\r\n *\r\n * @example\r\n * ```javascript\r\n * const authCredential = EmailAuthProvider.credentialWithLink(auth, email, emailLink);\r\n * const userCredential = await signInWithCredential(auth, authCredential);\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * await sendSignInLinkToEmail(auth, email);\r\n * // Obtain emailLink from user.\r\n * const userCredential = await signInWithEmailLink(auth, email, emailLink);\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance used to verify the link.\r\n * @param email - Email address.\r\n * @param emailLink - Sign-in email link.\r\n * @returns - The auth provider credential.\r\n */\n static credentialWithLink(email, emailLink) {\n const actionCodeUrl = ActionCodeURL.parseLink(emailLink);\n _assert(actionCodeUrl, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n return EmailAuthCredential._fromEmailAndCode(email, actionCodeUrl.code, actionCodeUrl.tenantId);\n }\n }\n /**\r\n * Always set to {@link ProviderId}.PASSWORD, even for email link.\r\n */\n\n /**\r\n * Always set to {@link SignInMethod}.EMAIL_PASSWORD.\r\n */\n\n /**\r\n * Always set to {@link SignInMethod}.EMAIL_LINK.\r\n */\n EmailAuthProvider.PROVIDER_ID = \"password\" /* ProviderId.PASSWORD */;\n\n EmailAuthProvider.EMAIL_PASSWORD_SIGN_IN_METHOD = \"password\" /* SignInMethod.EMAIL_PASSWORD */;\n\n EmailAuthProvider.EMAIL_LINK_SIGN_IN_METHOD = \"emailLink\" /* SignInMethod.EMAIL_LINK */;\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n /**\r\n * The base class for all Federated providers (OAuth (including OIDC), SAML).\r\n *\r\n * This class is not meant to be instantiated directly.\r\n *\r\n * @public\r\n */\n return EmailAuthProvider;\n})();\nclass FederatedAuthProvider {\n /**\r\n * Constructor for generic OAuth providers.\r\n *\r\n * @param providerId - Provider for which credentials should be generated.\r\n */\n constructor(providerId) {\n this.providerId = providerId;\n /** @internal */\n this.defaultLanguageCode = null;\n /** @internal */\n this.customParameters = {};\n }\n /**\r\n * Set the language gode.\r\n *\r\n * @param languageCode - language code\r\n */\n setDefaultLanguage(languageCode) {\n this.defaultLanguageCode = languageCode;\n }\n /**\r\n * Sets the OAuth custom parameters to pass in an OAuth request for popup and redirect sign-in\r\n * operations.\r\n *\r\n * @remarks\r\n * For a detailed list, check the reserved required OAuth 2.0 parameters such as `client_id`,\r\n * `redirect_uri`, `scope`, `response_type`, and `state` are not allowed and will be ignored.\r\n *\r\n * @param customOAuthParameters - The custom OAuth parameters to pass in the OAuth request.\r\n */\n setCustomParameters(customOAuthParameters) {\n this.customParameters = customOAuthParameters;\n return this;\n }\n /**\r\n * Retrieve the current list of {@link CustomParameters}.\r\n */\n getCustomParameters() {\n return this.customParameters;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Common code to all OAuth providers. This is separate from the\r\n * {@link OAuthProvider} so that child providers (like\r\n * {@link GoogleAuthProvider}) don't inherit the `credential` instance method.\r\n * Instead, they rely on a static `credential` method.\r\n */\nclass BaseOAuthProvider extends FederatedAuthProvider {\n constructor() {\n super(...arguments);\n /** @internal */\n this.scopes = [];\n }\n /**\r\n * Add an OAuth scope to the credential.\r\n *\r\n * @param scope - Provider OAuth scope to add.\r\n */\n addScope(scope) {\n // If not already added, add scope to list.\n if (!this.scopes.includes(scope)) {\n this.scopes.push(scope);\n }\n return this;\n }\n /**\r\n * Retrieve the current list of OAuth scopes.\r\n */\n getScopes() {\n return [...this.scopes];\n }\n}\n/**\r\n * Provider for generating generic {@link OAuthCredential}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new OAuthProvider('google.com');\r\n * // Start a sign in process for an unauthenticated user.\r\n * provider.addScope('profile');\r\n * provider.addScope('email');\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a OAuth Access Token for the provider.\r\n * const credential = provider.credentialFromResult(auth, result);\r\n * const token = credential.accessToken;\r\n * }\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new OAuthProvider('google.com');\r\n * provider.addScope('profile');\r\n * provider.addScope('email');\r\n * const result = await signInWithPopup(auth, provider);\r\n *\r\n * // The signed-in user info.\r\n * const user = result.user;\r\n * // This gives you a OAuth Access Token for the provider.\r\n * const credential = provider.credentialFromResult(auth, result);\r\n * const token = credential.accessToken;\r\n * ```\r\n * @public\r\n */\nclass OAuthProvider extends BaseOAuthProvider {\n /**\r\n * Creates an {@link OAuthCredential} from a JSON string or a plain object.\r\n * @param json - A plain object or a JSON string\r\n */\n static credentialFromJSON(json) {\n const obj = typeof json === 'string' ? JSON.parse(json) : json;\n _assert('providerId' in obj && 'signInMethod' in obj, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n return OAuthCredential._fromParams(obj);\n }\n /**\r\n * Creates a {@link OAuthCredential} from a generic OAuth provider's access token or ID token.\r\n *\r\n * @remarks\r\n * The raw nonce is required when an ID token with a nonce field is provided. The SHA-256 hash of\r\n * the raw nonce must match the nonce field in the ID token.\r\n *\r\n * @example\r\n * ```javascript\r\n * // `googleUser` from the onsuccess Google Sign In callback.\r\n * // Initialize a generate OAuth provider with a `google.com` providerId.\r\n * const provider = new OAuthProvider('google.com');\r\n * const credential = provider.credential({\r\n * idToken: googleUser.getAuthResponse().id_token,\r\n * });\r\n * const result = await signInWithCredential(credential);\r\n * ```\r\n *\r\n * @param params - Either the options object containing the ID token, access token and raw nonce\r\n * or the ID token string.\r\n */\n credential(params) {\n return this._credential(Object.assign(Object.assign({}, params), {\n nonce: params.rawNonce\n }));\n }\n /** An internal credential method that accepts more permissive options */\n _credential(params) {\n _assert(params.idToken || params.accessToken, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n // For OAuthCredential, sign in method is same as providerId.\n return OAuthCredential._fromParams(Object.assign(Object.assign({}, params), {\n providerId: this.providerId,\n signInMethod: this.providerId\n }));\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link UserCredential}.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n return OAuthProvider.oauthCredentialFromTaggedObject(userCredential);\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link AuthError} which was\r\n * thrown during a sign-in, link, or reauthenticate operation.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromError(error) {\n return OAuthProvider.oauthCredentialFromTaggedObject(error.customData || {});\n }\n static oauthCredentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse) {\n return null;\n }\n const {\n oauthIdToken,\n oauthAccessToken,\n oauthTokenSecret,\n pendingToken,\n nonce,\n providerId\n } = tokenResponse;\n if (!oauthAccessToken && !oauthTokenSecret && !oauthIdToken && !pendingToken) {\n return null;\n }\n if (!providerId) {\n return null;\n }\n try {\n return new OAuthProvider(providerId)._credential({\n idToken: oauthIdToken,\n accessToken: oauthAccessToken,\n nonce,\n pendingToken\n });\n } catch (e) {\n return null;\n }\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Provider for generating an {@link OAuthCredential} for {@link ProviderId}.FACEBOOK.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new FacebookAuthProvider();\r\n * // Start a sign in process for an unauthenticated user.\r\n * provider.addScope('user_birthday');\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a Facebook Access Token.\r\n * const credential = FacebookAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * }\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new FacebookAuthProvider();\r\n * provider.addScope('user_birthday');\r\n * const result = await signInWithPopup(auth, provider);\r\n *\r\n * // The signed-in user info.\r\n * const user = result.user;\r\n * // This gives you a Facebook Access Token.\r\n * const credential = FacebookAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * ```\r\n *\r\n * @public\r\n */\nlet FacebookAuthProvider = /*#__PURE__*/(() => {\n class FacebookAuthProvider extends BaseOAuthProvider {\n constructor() {\n super(\"facebook.com\" /* ProviderId.FACEBOOK */);\n }\n /**\r\n * Creates a credential for Facebook.\r\n *\r\n * @example\r\n * ```javascript\r\n * // `event` from the Facebook auth.authResponseChange callback.\r\n * const credential = FacebookAuthProvider.credential(event.authResponse.accessToken);\r\n * const result = await signInWithCredential(credential);\r\n * ```\r\n *\r\n * @param accessToken - Facebook access token.\r\n */\n static credential(accessToken) {\n return OAuthCredential._fromParams({\n providerId: FacebookAuthProvider.PROVIDER_ID,\n signInMethod: FacebookAuthProvider.FACEBOOK_SIGN_IN_METHOD,\n accessToken\n });\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link UserCredential}.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n return FacebookAuthProvider.credentialFromTaggedObject(userCredential);\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link AuthError} which was\r\n * thrown during a sign-in, link, or reauthenticate operation.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromError(error) {\n return FacebookAuthProvider.credentialFromTaggedObject(error.customData || {});\n }\n static credentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse || !('oauthAccessToken' in tokenResponse)) {\n return null;\n }\n if (!tokenResponse.oauthAccessToken) {\n return null;\n }\n try {\n return FacebookAuthProvider.credential(tokenResponse.oauthAccessToken);\n } catch (_a) {\n return null;\n }\n }\n }\n /** Always set to {@link SignInMethod}.FACEBOOK. */\n\n /** Always set to {@link ProviderId}.FACEBOOK. */FacebookAuthProvider.FACEBOOK_SIGN_IN_METHOD = \"facebook.com\" /* SignInMethod.FACEBOOK */;\n\n FacebookAuthProvider.PROVIDER_ID = \"facebook.com\" /* ProviderId.FACEBOOK */;\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n /**\r\n * Provider for generating an {@link OAuthCredential} for {@link ProviderId}.GOOGLE.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new GoogleAuthProvider();\r\n * // Start a sign in process for an unauthenticated user.\r\n * provider.addScope('profile');\r\n * provider.addScope('email');\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a Google Access Token.\r\n * const credential = GoogleAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * }\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new GoogleAuthProvider();\r\n * provider.addScope('profile');\r\n * provider.addScope('email');\r\n * const result = await signInWithPopup(auth, provider);\r\n *\r\n * // The signed-in user info.\r\n * const user = result.user;\r\n * // This gives you a Google Access Token.\r\n * const credential = GoogleAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * ```\r\n *\r\n * @public\r\n */\n return FacebookAuthProvider;\n})();\nlet GoogleAuthProvider = /*#__PURE__*/(() => {\n class GoogleAuthProvider extends BaseOAuthProvider {\n constructor() {\n super(\"google.com\" /* ProviderId.GOOGLE */);\n this.addScope('profile');\n }\n /**\r\n * Creates a credential for Google. At least one of ID token and access token is required.\r\n *\r\n * @example\r\n * ```javascript\r\n * // \\`googleUser\\` from the onsuccess Google Sign In callback.\r\n * const credential = GoogleAuthProvider.credential(googleUser.getAuthResponse().id_token);\r\n * const result = await signInWithCredential(credential);\r\n * ```\r\n *\r\n * @param idToken - Google ID token.\r\n * @param accessToken - Google access token.\r\n */\n static credential(idToken, accessToken) {\n return OAuthCredential._fromParams({\n providerId: GoogleAuthProvider.PROVIDER_ID,\n signInMethod: GoogleAuthProvider.GOOGLE_SIGN_IN_METHOD,\n idToken,\n accessToken\n });\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link UserCredential}.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n return GoogleAuthProvider.credentialFromTaggedObject(userCredential);\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link AuthError} which was\r\n * thrown during a sign-in, link, or reauthenticate operation.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromError(error) {\n return GoogleAuthProvider.credentialFromTaggedObject(error.customData || {});\n }\n static credentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse) {\n return null;\n }\n const {\n oauthIdToken,\n oauthAccessToken\n } = tokenResponse;\n if (!oauthIdToken && !oauthAccessToken) {\n // This could be an oauth 1 credential or a phone credential\n return null;\n }\n try {\n return GoogleAuthProvider.credential(oauthIdToken, oauthAccessToken);\n } catch (_a) {\n return null;\n }\n }\n }\n /** Always set to {@link SignInMethod}.GOOGLE. */\n\n /** Always set to {@link ProviderId}.GOOGLE. */GoogleAuthProvider.GOOGLE_SIGN_IN_METHOD = \"google.com\" /* SignInMethod.GOOGLE */;\n\n GoogleAuthProvider.PROVIDER_ID = \"google.com\" /* ProviderId.GOOGLE */;\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n /**\r\n * Provider for generating an {@link OAuthCredential} for {@link ProviderId}.GITHUB.\r\n *\r\n * @remarks\r\n * GitHub requires an OAuth 2.0 redirect, so you can either handle the redirect directly, or use\r\n * the {@link signInWithPopup} handler:\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new GithubAuthProvider();\r\n * // Start a sign in process for an unauthenticated user.\r\n * provider.addScope('repo');\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a Github Access Token.\r\n * const credential = GithubAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * }\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new GithubAuthProvider();\r\n * provider.addScope('repo');\r\n * const result = await signInWithPopup(auth, provider);\r\n *\r\n * // The signed-in user info.\r\n * const user = result.user;\r\n * // This gives you a Github Access Token.\r\n * const credential = GithubAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * ```\r\n * @public\r\n */\n return GoogleAuthProvider;\n})();\nlet GithubAuthProvider = /*#__PURE__*/(() => {\n class GithubAuthProvider extends BaseOAuthProvider {\n constructor() {\n super(\"github.com\" /* ProviderId.GITHUB */);\n }\n /**\r\n * Creates a credential for Github.\r\n *\r\n * @param accessToken - Github access token.\r\n */\n static credential(accessToken) {\n return OAuthCredential._fromParams({\n providerId: GithubAuthProvider.PROVIDER_ID,\n signInMethod: GithubAuthProvider.GITHUB_SIGN_IN_METHOD,\n accessToken\n });\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link UserCredential}.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n return GithubAuthProvider.credentialFromTaggedObject(userCredential);\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link AuthError} which was\r\n * thrown during a sign-in, link, or reauthenticate operation.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromError(error) {\n return GithubAuthProvider.credentialFromTaggedObject(error.customData || {});\n }\n static credentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse || !('oauthAccessToken' in tokenResponse)) {\n return null;\n }\n if (!tokenResponse.oauthAccessToken) {\n return null;\n }\n try {\n return GithubAuthProvider.credential(tokenResponse.oauthAccessToken);\n } catch (_a) {\n return null;\n }\n }\n }\n /** Always set to {@link SignInMethod}.GITHUB. */\n\n /** Always set to {@link ProviderId}.GITHUB. */GithubAuthProvider.GITHUB_SIGN_IN_METHOD = \"github.com\" /* SignInMethod.GITHUB */;\n\n GithubAuthProvider.PROVIDER_ID = \"github.com\" /* ProviderId.GITHUB */;\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n return GithubAuthProvider;\n})();\nconst IDP_REQUEST_URI = 'http://localhost';\n/**\r\n * @public\r\n */\nclass SAMLAuthCredential extends AuthCredential {\n /** @internal */\n constructor(providerId, pendingToken) {\n super(providerId, providerId);\n this.pendingToken = pendingToken;\n }\n /** @internal */\n _getIdTokenResponse(auth) {\n const request = this.buildRequest();\n return signInWithIdp(auth, request);\n }\n /** @internal */\n _linkToIdToken(auth, idToken) {\n const request = this.buildRequest();\n request.idToken = idToken;\n return signInWithIdp(auth, request);\n }\n /** @internal */\n _getReauthenticationResolver(auth) {\n const request = this.buildRequest();\n request.autoCreate = false;\n return signInWithIdp(auth, request);\n }\n /** {@inheritdoc AuthCredential.toJSON} */\n toJSON() {\n return {\n signInMethod: this.signInMethod,\n providerId: this.providerId,\n pendingToken: this.pendingToken\n };\n }\n /**\r\n * Static method to deserialize a JSON representation of an object into an\r\n * {@link AuthCredential}.\r\n *\r\n * @param json - Input can be either Object or the stringified representation of the object.\r\n * When string is provided, JSON.parse would be called first.\r\n *\r\n * @returns If the JSON input does not represent an {@link AuthCredential}, null is returned.\r\n */\n static fromJSON(json) {\n const obj = typeof json === 'string' ? JSON.parse(json) : json;\n const {\n providerId,\n signInMethod,\n pendingToken\n } = obj;\n if (!providerId || !signInMethod || !pendingToken || providerId !== signInMethod) {\n return null;\n }\n return new SAMLAuthCredential(providerId, pendingToken);\n }\n /**\r\n * Helper static method to avoid exposing the constructor to end users.\r\n *\r\n * @internal\r\n */\n static _create(providerId, pendingToken) {\n return new SAMLAuthCredential(providerId, pendingToken);\n }\n buildRequest() {\n return {\n requestUri: IDP_REQUEST_URI,\n returnSecureToken: true,\n pendingToken: this.pendingToken\n };\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst SAML_PROVIDER_PREFIX = 'saml.';\n/**\r\n * An {@link AuthProvider} for SAML.\r\n *\r\n * @public\r\n */\nclass SAMLAuthProvider extends FederatedAuthProvider {\n /**\r\n * Constructor. The providerId must start with \"saml.\"\r\n * @param providerId - SAML provider ID.\r\n */\n constructor(providerId) {\n _assert(providerId.startsWith(SAML_PROVIDER_PREFIX), \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n super(providerId);\n }\n /**\r\n * Generates an {@link AuthCredential} from a {@link UserCredential} after a\r\n * successful SAML flow completes.\r\n *\r\n * @remarks\r\n *\r\n * For example, to get an {@link AuthCredential}, you could write the\r\n * following code:\r\n *\r\n * ```js\r\n * const userCredential = await signInWithPopup(auth, samlProvider);\r\n * const credential = SAMLAuthProvider.credentialFromResult(userCredential);\r\n * ```\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n return SAMLAuthProvider.samlCredentialFromTaggedObject(userCredential);\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link AuthError} which was\r\n * thrown during a sign-in, link, or reauthenticate operation.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromError(error) {\n return SAMLAuthProvider.samlCredentialFromTaggedObject(error.customData || {});\n }\n /**\r\n * Creates an {@link AuthCredential} from a JSON string or a plain object.\r\n * @param json - A plain object or a JSON string\r\n */\n static credentialFromJSON(json) {\n const credential = SAMLAuthCredential.fromJSON(json);\n _assert(credential, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n return credential;\n }\n static samlCredentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse) {\n return null;\n }\n const {\n pendingToken,\n providerId\n } = tokenResponse;\n if (!pendingToken || !providerId) {\n return null;\n }\n try {\n return SAMLAuthCredential._create(providerId, pendingToken);\n } catch (e) {\n return null;\n }\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Provider for generating an {@link OAuthCredential} for {@link ProviderId}.TWITTER.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new TwitterAuthProvider();\r\n * // Start a sign in process for an unauthenticated user.\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a Twitter Access Token and Secret.\r\n * const credential = TwitterAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * const secret = credential.secret;\r\n * }\r\n * ```\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new TwitterAuthProvider();\r\n * const result = await signInWithPopup(auth, provider);\r\n *\r\n * // The signed-in user info.\r\n * const user = result.user;\r\n * // This gives you a Twitter Access Token and Secret.\r\n * const credential = TwitterAuthProvider.credentialFromResult(result);\r\n * const token = credential.accessToken;\r\n * const secret = credential.secret;\r\n * ```\r\n *\r\n * @public\r\n */\nlet TwitterAuthProvider = /*#__PURE__*/(() => {\n class TwitterAuthProvider extends BaseOAuthProvider {\n constructor() {\n super(\"twitter.com\" /* ProviderId.TWITTER */);\n }\n /**\r\n * Creates a credential for Twitter.\r\n *\r\n * @param token - Twitter access token.\r\n * @param secret - Twitter secret.\r\n */\n static credential(token, secret) {\n return OAuthCredential._fromParams({\n providerId: TwitterAuthProvider.PROVIDER_ID,\n signInMethod: TwitterAuthProvider.TWITTER_SIGN_IN_METHOD,\n oauthToken: token,\n oauthTokenSecret: secret\n });\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link UserCredential}.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n return TwitterAuthProvider.credentialFromTaggedObject(userCredential);\n }\n /**\r\n * Used to extract the underlying {@link OAuthCredential} from a {@link AuthError} which was\r\n * thrown during a sign-in, link, or reauthenticate operation.\r\n *\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromError(error) {\n return TwitterAuthProvider.credentialFromTaggedObject(error.customData || {});\n }\n static credentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse) {\n return null;\n }\n const {\n oauthAccessToken,\n oauthTokenSecret\n } = tokenResponse;\n if (!oauthAccessToken || !oauthTokenSecret) {\n return null;\n }\n try {\n return TwitterAuthProvider.credential(oauthAccessToken, oauthTokenSecret);\n } catch (_a) {\n return null;\n }\n }\n }\n /** Always set to {@link SignInMethod}.TWITTER. */\n\n /** Always set to {@link ProviderId}.TWITTER. */TwitterAuthProvider.TWITTER_SIGN_IN_METHOD = \"twitter.com\" /* SignInMethod.TWITTER */;\n\n TwitterAuthProvider.PROVIDER_ID = \"twitter.com\" /* ProviderId.TWITTER */;\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n return TwitterAuthProvider;\n})();\nasync function signUp(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signUp\" /* Endpoint.SIGN_UP */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass UserCredentialImpl {\n constructor(params) {\n this.user = params.user;\n this.providerId = params.providerId;\n this._tokenResponse = params._tokenResponse;\n this.operationType = params.operationType;\n }\n static async _fromIdTokenResponse(auth, operationType, idTokenResponse, isAnonymous = false) {\n const user = await UserImpl._fromIdTokenResponse(auth, idTokenResponse, isAnonymous);\n const providerId = providerIdForResponse(idTokenResponse);\n const userCred = new UserCredentialImpl({\n user,\n providerId,\n _tokenResponse: idTokenResponse,\n operationType\n });\n return userCred;\n }\n static async _forOperation(user, operationType, response) {\n await user._updateTokensIfNecessary(response, /* reload */true);\n const providerId = providerIdForResponse(response);\n return new UserCredentialImpl({\n user,\n providerId,\n _tokenResponse: response,\n operationType\n });\n }\n}\nfunction providerIdForResponse(response) {\n if (response.providerId) {\n return response.providerId;\n }\n if ('phoneNumber' in response) {\n return \"phone\" /* ProviderId.PHONE */;\n }\n return null;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Asynchronously signs in as an anonymous user.\r\n *\r\n * @remarks\r\n * If there is already an anonymous user signed in, that user will be returned; otherwise, a\r\n * new anonymous user identity will be created and returned.\r\n *\r\n * This method is not supported by {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n *\r\n * @public\r\n */\nasync function signInAnonymously(auth) {\n var _a;\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authInternal = _castAuth(auth);\n await authInternal._initializationPromise;\n if ((_a = authInternal.currentUser) === null || _a === void 0 ? void 0 : _a.isAnonymous) {\n // If an anonymous user is already signed in, no need to sign them in again.\n return new UserCredentialImpl({\n user: authInternal.currentUser,\n providerId: null,\n operationType: \"signIn\" /* OperationType.SIGN_IN */\n });\n }\n const response = await signUp(authInternal, {\n returnSecureToken: true\n });\n const userCredential = await UserCredentialImpl._fromIdTokenResponse(authInternal, \"signIn\" /* OperationType.SIGN_IN */, response, true);\n await authInternal._updateCurrentUser(userCredential.user);\n return userCredential;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass MultiFactorError extends FirebaseError {\n constructor(auth, error, operationType, user) {\n var _a;\n super(error.code, error.message);\n this.operationType = operationType;\n this.user = user;\n // https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n Object.setPrototypeOf(this, MultiFactorError.prototype);\n this.customData = {\n appName: auth.name,\n tenantId: (_a = auth.tenantId) !== null && _a !== void 0 ? _a : undefined,\n _serverResponse: error.customData._serverResponse,\n operationType\n };\n }\n static _fromErrorAndOperation(auth, error, operationType, user) {\n return new MultiFactorError(auth, error, operationType, user);\n }\n}\nfunction _processCredentialSavingMfaContextIfNecessary(auth, operationType, credential, user) {\n const idTokenProvider = operationType === \"reauthenticate\" /* OperationType.REAUTHENTICATE */ ? credential._getReauthenticationResolver(auth) : credential._getIdTokenResponse(auth);\n return idTokenProvider.catch(error => {\n if (error.code === `auth/${\"multi-factor-auth-required\" /* AuthErrorCode.MFA_REQUIRED */}`) {\n throw MultiFactorError._fromErrorAndOperation(auth, error, operationType, user);\n }\n throw error;\n });\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Takes a set of UserInfo provider data and converts it to a set of names\r\n */\nfunction providerDataAsNames(providerData) {\n return new Set(providerData.map(({\n providerId\n }) => providerId).filter(pid => !!pid));\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Unlinks a provider from a user account.\r\n *\r\n * @param user - The user.\r\n * @param providerId - The provider to unlink.\r\n *\r\n * @public\r\n */\nasync function unlink(user, providerId) {\n const userInternal = getModularInstance(user);\n await _assertLinkedStatus(true, userInternal, providerId);\n const {\n providerUserInfo\n } = await deleteLinkedAccounts(userInternal.auth, {\n idToken: await userInternal.getIdToken(),\n deleteProvider: [providerId]\n });\n const providersLeft = providerDataAsNames(providerUserInfo || []);\n userInternal.providerData = userInternal.providerData.filter(pd => providersLeft.has(pd.providerId));\n if (!providersLeft.has(\"phone\" /* ProviderId.PHONE */)) {\n userInternal.phoneNumber = null;\n }\n await userInternal.auth._persistUserIfCurrent(userInternal);\n return userInternal;\n}\nasync function _link$1(user, credential, bypassAuthState = false) {\n const response = await _logoutIfInvalidated(user, credential._linkToIdToken(user.auth, await user.getIdToken()), bypassAuthState);\n return UserCredentialImpl._forOperation(user, \"link\" /* OperationType.LINK */, response);\n}\nasync function _assertLinkedStatus(expected, user, provider) {\n await _reloadWithoutSaving(user);\n const providerIds = providerDataAsNames(user.providerData);\n const code = expected === false ? \"provider-already-linked\" /* AuthErrorCode.PROVIDER_ALREADY_LINKED */ : \"no-such-provider\" /* AuthErrorCode.NO_SUCH_PROVIDER */;\n _assert(providerIds.has(provider) === expected, user.auth, code);\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function _reauthenticate(user, credential, bypassAuthState = false) {\n const {\n auth\n } = user;\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const operationType = \"reauthenticate\" /* OperationType.REAUTHENTICATE */;\n try {\n const response = await _logoutIfInvalidated(user, _processCredentialSavingMfaContextIfNecessary(auth, operationType, credential, user), bypassAuthState);\n _assert(response.idToken, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const parsed = _parseToken(response.idToken);\n _assert(parsed, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const {\n sub: localId\n } = parsed;\n _assert(user.uid === localId, auth, \"user-mismatch\" /* AuthErrorCode.USER_MISMATCH */);\n return UserCredentialImpl._forOperation(user, operationType, response);\n } catch (e) {\n // Convert user deleted error into user mismatch\n if ((e === null || e === void 0 ? void 0 : e.code) === `auth/${\"user-not-found\" /* AuthErrorCode.USER_DELETED */}`) {\n _fail(auth, \"user-mismatch\" /* AuthErrorCode.USER_MISMATCH */);\n }\n throw e;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function _signInWithCredential(auth, credential, bypassAuthState = false) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const operationType = \"signIn\" /* OperationType.SIGN_IN */;\n const response = await _processCredentialSavingMfaContextIfNecessary(auth, operationType, credential);\n const userCredential = await UserCredentialImpl._fromIdTokenResponse(auth, operationType, response);\n if (!bypassAuthState) {\n await auth._updateCurrentUser(userCredential.user);\n }\n return userCredential;\n}\n/**\r\n * Asynchronously signs in with the given credentials.\r\n *\r\n * @remarks\r\n * An {@link AuthProvider} can be used to generate the credential.\r\n *\r\n * This method is not supported by {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param credential - The auth credential.\r\n *\r\n * @public\r\n */\nasync function signInWithCredential(auth, credential) {\n return _signInWithCredential(_castAuth(auth), credential);\n}\n/**\r\n * Links the user account with the given credentials.\r\n *\r\n * @remarks\r\n * An {@link AuthProvider} can be used to generate the credential.\r\n *\r\n * @param user - The user.\r\n * @param credential - The auth credential.\r\n *\r\n * @public\r\n */\nasync function linkWithCredential(user, credential) {\n const userInternal = getModularInstance(user);\n await _assertLinkedStatus(false, userInternal, credential.providerId);\n return _link$1(userInternal, credential);\n}\n/**\r\n * Re-authenticates a user using a fresh credential.\r\n *\r\n * @remarks\r\n * Use before operations such as {@link updatePassword} that require tokens from recent sign-in\r\n * attempts. This method can be used to recover from a `CREDENTIAL_TOO_OLD_LOGIN_AGAIN` error\r\n * or a `TOKEN_EXPIRED` error.\r\n *\r\n * This method is not supported on any {@link User} signed in by {@link Auth} instances\r\n * created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param user - The user.\r\n * @param credential - The auth credential.\r\n *\r\n * @public\r\n */\nasync function reauthenticateWithCredential(user, credential) {\n return _reauthenticate(getModularInstance(user), credential);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function signInWithCustomToken$1(auth, request) {\n return _performSignInRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:signInWithCustomToken\" /* Endpoint.SIGN_IN_WITH_CUSTOM_TOKEN */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Asynchronously signs in using a custom token.\r\n *\r\n * @remarks\r\n * Custom tokens are used to integrate Firebase Auth with existing auth systems, and must\r\n * be generated by an auth backend using the\r\n * {@link https://firebase.google.com/docs/reference/admin/node/admin.auth.Auth#createcustomtoken | createCustomToken}\r\n * method in the {@link https://firebase.google.com/docs/auth/admin | Admin SDK} .\r\n *\r\n * Fails with an error if the token is invalid, expired, or not accepted by the Firebase Auth service.\r\n *\r\n * This method is not supported by {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param customToken - The custom token to sign in with.\r\n *\r\n * @public\r\n */\nasync function signInWithCustomToken(auth, customToken) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authInternal = _castAuth(auth);\n const response = await signInWithCustomToken$1(authInternal, {\n token: customToken,\n returnSecureToken: true\n });\n const cred = await UserCredentialImpl._fromIdTokenResponse(authInternal, \"signIn\" /* OperationType.SIGN_IN */, response);\n await authInternal._updateCurrentUser(cred.user);\n return cred;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass MultiFactorInfoImpl {\n constructor(factorId, response) {\n this.factorId = factorId;\n this.uid = response.mfaEnrollmentId;\n this.enrollmentTime = new Date(response.enrolledAt).toUTCString();\n this.displayName = response.displayName;\n }\n static _fromServerResponse(auth, enrollment) {\n if ('phoneInfo' in enrollment) {\n return PhoneMultiFactorInfoImpl._fromServerResponse(auth, enrollment);\n } else if ('totpInfo' in enrollment) {\n return TotpMultiFactorInfoImpl._fromServerResponse(auth, enrollment);\n }\n return _fail(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n}\nclass PhoneMultiFactorInfoImpl extends MultiFactorInfoImpl {\n constructor(response) {\n super(\"phone\" /* FactorId.PHONE */, response);\n this.phoneNumber = response.phoneInfo;\n }\n static _fromServerResponse(_auth, enrollment) {\n return new PhoneMultiFactorInfoImpl(enrollment);\n }\n}\nclass TotpMultiFactorInfoImpl extends MultiFactorInfoImpl {\n constructor(response) {\n super(\"totp\" /* FactorId.TOTP */, response);\n }\n static _fromServerResponse(_auth, enrollment) {\n return new TotpMultiFactorInfoImpl(enrollment);\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _setActionCodeSettingsOnRequest(auth, request, actionCodeSettings) {\n var _a;\n _assert(((_a = actionCodeSettings.url) === null || _a === void 0 ? void 0 : _a.length) > 0, auth, \"invalid-continue-uri\" /* AuthErrorCode.INVALID_CONTINUE_URI */);\n _assert(typeof actionCodeSettings.dynamicLinkDomain === 'undefined' || actionCodeSettings.dynamicLinkDomain.length > 0, auth, \"invalid-dynamic-link-domain\" /* AuthErrorCode.INVALID_DYNAMIC_LINK_DOMAIN */);\n request.continueUrl = actionCodeSettings.url;\n request.dynamicLinkDomain = actionCodeSettings.dynamicLinkDomain;\n request.canHandleCodeInApp = actionCodeSettings.handleCodeInApp;\n if (actionCodeSettings.iOS) {\n _assert(actionCodeSettings.iOS.bundleId.length > 0, auth, \"missing-ios-bundle-id\" /* AuthErrorCode.MISSING_IOS_BUNDLE_ID */);\n request.iOSBundleId = actionCodeSettings.iOS.bundleId;\n }\n if (actionCodeSettings.android) {\n _assert(actionCodeSettings.android.packageName.length > 0, auth, \"missing-android-pkg-name\" /* AuthErrorCode.MISSING_ANDROID_PACKAGE_NAME */);\n request.androidInstallApp = actionCodeSettings.android.installApp;\n request.androidMinimumVersionCode = actionCodeSettings.android.minimumVersion;\n request.androidPackageName = actionCodeSettings.android.packageName;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Updates the password policy cached in the {@link Auth} instance if a policy is already\r\n * cached for the project or tenant.\r\n *\r\n * @remarks\r\n * We only fetch the password policy if the password did not meet policy requirements and\r\n * there is an existing policy cached. A developer must call validatePassword at least\r\n * once for the cache to be automatically updated.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n *\r\n * @private\r\n */\nasync function recachePasswordPolicy(auth) {\n const authInternal = _castAuth(auth);\n if (authInternal._getPasswordPolicyInternal()) {\n await authInternal._updatePasswordPolicy();\n }\n}\n/**\r\n * Sends a password reset email to the given email address. This method does not throw an error when\r\n * there's no user account with the given email address and\r\n * {@link https://cloud.google.com/identity-platform/docs/admin/email-enumeration-protection | Email Enumeration Protection}\r\n * is enabled.\r\n *\r\n * @remarks\r\n * To complete the password reset, call {@link confirmPasswordReset} with the code supplied in\r\n * the email sent to the user, along with the new password specified by the user.\r\n *\r\n * @example\r\n * ```javascript\r\n * const actionCodeSettings = {\r\n * url: 'https://www.example.com/?email=user@example.com',\r\n * iOS: {\r\n * bundleId: 'com.example.ios'\r\n * },\r\n * android: {\r\n * packageName: 'com.example.android',\r\n * installApp: true,\r\n * minimumVersion: '12'\r\n * },\r\n * handleCodeInApp: true\r\n * };\r\n * await sendPasswordResetEmail(auth, 'user@example.com', actionCodeSettings);\r\n * // Obtain code from user.\r\n * await confirmPasswordReset('user@example.com', code);\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param email - The user's email address.\r\n * @param actionCodeSettings - The {@link ActionCodeSettings}.\r\n *\r\n * @public\r\n */\nasync function sendPasswordResetEmail(auth, email, actionCodeSettings) {\n const authInternal = _castAuth(auth);\n const request = {\n requestType: \"PASSWORD_RESET\" /* ActionCodeOperation.PASSWORD_RESET */,\n email,\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */\n };\n if (actionCodeSettings) {\n _setActionCodeSettingsOnRequest(authInternal, request, actionCodeSettings);\n }\n await handleRecaptchaFlow(authInternal, request, \"getOobCode\" /* RecaptchaActionName.GET_OOB_CODE */, sendPasswordResetEmail$1);\n}\n/**\r\n * Completes the password reset process, given a confirmation code and new password.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param oobCode - A confirmation code sent to the user.\r\n * @param newPassword - The new password.\r\n *\r\n * @public\r\n */\nasync function confirmPasswordReset(auth, oobCode, newPassword) {\n await resetPassword(getModularInstance(auth), {\n oobCode,\n newPassword\n }).catch(async error => {\n if (error.code === `auth/${\"password-does-not-meet-requirements\" /* AuthErrorCode.PASSWORD_DOES_NOT_MEET_REQUIREMENTS */}`) {\n void recachePasswordPolicy(auth);\n }\n throw error;\n });\n // Do not return the email.\n}\n/**\r\n * Applies a verification code sent to the user by email or other out-of-band mechanism.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param oobCode - A verification code sent to the user.\r\n *\r\n * @public\r\n */\nasync function applyActionCode(auth, oobCode) {\n await applyActionCode$1(getModularInstance(auth), {\n oobCode\n });\n}\n/**\r\n * Checks a verification code sent to the user by email or other out-of-band mechanism.\r\n *\r\n * @returns metadata about the code.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param oobCode - A verification code sent to the user.\r\n *\r\n * @public\r\n */\nasync function checkActionCode(auth, oobCode) {\n const authModular = getModularInstance(auth);\n const response = await resetPassword(authModular, {\n oobCode\n });\n // Email could be empty only if the request type is EMAIL_SIGNIN or\n // VERIFY_AND_CHANGE_EMAIL.\n // New email should not be empty if the request type is\n // VERIFY_AND_CHANGE_EMAIL.\n // Multi-factor info could not be empty if the request type is\n // REVERT_SECOND_FACTOR_ADDITION.\n const operation = response.requestType;\n _assert(operation, authModular, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n switch (operation) {\n case \"EMAIL_SIGNIN\" /* ActionCodeOperation.EMAIL_SIGNIN */:\n break;\n case \"VERIFY_AND_CHANGE_EMAIL\" /* ActionCodeOperation.VERIFY_AND_CHANGE_EMAIL */:\n _assert(response.newEmail, authModular, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n break;\n case \"REVERT_SECOND_FACTOR_ADDITION\" /* ActionCodeOperation.REVERT_SECOND_FACTOR_ADDITION */:\n _assert(response.mfaInfo, authModular, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n // fall through\n default:\n _assert(response.email, authModular, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n // The multi-factor info for revert second factor addition\n let multiFactorInfo = null;\n if (response.mfaInfo) {\n multiFactorInfo = MultiFactorInfoImpl._fromServerResponse(_castAuth(authModular), response.mfaInfo);\n }\n return {\n data: {\n email: (response.requestType === \"VERIFY_AND_CHANGE_EMAIL\" /* ActionCodeOperation.VERIFY_AND_CHANGE_EMAIL */ ? response.newEmail : response.email) || null,\n previousEmail: (response.requestType === \"VERIFY_AND_CHANGE_EMAIL\" /* ActionCodeOperation.VERIFY_AND_CHANGE_EMAIL */ ? response.email : response.newEmail) || null,\n multiFactorInfo\n },\n operation\n };\n}\n/**\r\n * Checks a password reset code sent to the user by email or other out-of-band mechanism.\r\n *\r\n * @returns the user's email address if valid.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param code - A verification code sent to the user.\r\n *\r\n * @public\r\n */\nasync function verifyPasswordResetCode(auth, code) {\n const {\n data\n } = await checkActionCode(getModularInstance(auth), code);\n // Email should always be present since a code was sent to it\n return data.email;\n}\n/**\r\n * Creates a new user account associated with the specified email address and password.\r\n *\r\n * @remarks\r\n * On successful creation of the user account, this user will also be signed in to your application.\r\n *\r\n * User account creation can fail if the account already exists or the password is invalid.\r\n *\r\n * This method is not supported on {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * Note: The email address acts as a unique identifier for the user and enables an email-based\r\n * password reset. This function will create a new user account and set the initial user password.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param email - The user's email address.\r\n * @param password - The user's chosen password.\r\n *\r\n * @public\r\n */\nasync function createUserWithEmailAndPassword(auth, email, password) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authInternal = _castAuth(auth);\n const request = {\n returnSecureToken: true,\n email,\n password,\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */\n };\n const signUpResponse = handleRecaptchaFlow(authInternal, request, \"signUpPassword\" /* RecaptchaActionName.SIGN_UP_PASSWORD */, signUp);\n const response = await signUpResponse.catch(error => {\n if (error.code === `auth/${\"password-does-not-meet-requirements\" /* AuthErrorCode.PASSWORD_DOES_NOT_MEET_REQUIREMENTS */}`) {\n void recachePasswordPolicy(auth);\n }\n throw error;\n });\n const userCredential = await UserCredentialImpl._fromIdTokenResponse(authInternal, \"signIn\" /* OperationType.SIGN_IN */, response);\n await authInternal._updateCurrentUser(userCredential.user);\n return userCredential;\n}\n/**\r\n * Asynchronously signs in using an email and password.\r\n *\r\n * @remarks\r\n * Fails with an error if the email address and password do not match. When\r\n * {@link https://cloud.google.com/identity-platform/docs/admin/email-enumeration-protection | Email Enumeration Protection}\r\n * is enabled, this method fails with \"auth/invalid-credential\" in case of an invalid\r\n * email/password.\r\n *\r\n * This method is not supported on {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * Note: The user's password is NOT the password used to access the user's email account. The\r\n * email address serves as a unique identifier for the user, and the password is used to access\r\n * the user's account in your Firebase project. See also: {@link createUserWithEmailAndPassword}.\r\n *\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param email - The users email address.\r\n * @param password - The users password.\r\n *\r\n * @public\r\n */\nfunction signInWithEmailAndPassword(auth, email, password) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n return signInWithCredential(getModularInstance(auth), EmailAuthProvider.credential(email, password)).catch(async error => {\n if (error.code === `auth/${\"password-does-not-meet-requirements\" /* AuthErrorCode.PASSWORD_DOES_NOT_MEET_REQUIREMENTS */}`) {\n void recachePasswordPolicy(auth);\n }\n throw error;\n });\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Sends a sign-in email link to the user with the specified email.\r\n *\r\n * @remarks\r\n * The sign-in operation has to always be completed in the app unlike other out of band email\r\n * actions (password reset and email verifications). This is because, at the end of the flow,\r\n * the user is expected to be signed in and their Auth state persisted within the app.\r\n *\r\n * To complete sign in with the email link, call {@link signInWithEmailLink} with the email\r\n * address and the email link supplied in the email sent to the user.\r\n *\r\n * @example\r\n * ```javascript\r\n * const actionCodeSettings = {\r\n * url: 'https://www.example.com/?email=user@example.com',\r\n * iOS: {\r\n * bundleId: 'com.example.ios'\r\n * },\r\n * android: {\r\n * packageName: 'com.example.android',\r\n * installApp: true,\r\n * minimumVersion: '12'\r\n * },\r\n * handleCodeInApp: true\r\n * };\r\n * await sendSignInLinkToEmail(auth, 'user@example.com', actionCodeSettings);\r\n * // Obtain emailLink from the user.\r\n * if(isSignInWithEmailLink(auth, emailLink)) {\r\n * await signInWithEmailLink(auth, 'user@example.com', emailLink);\r\n * }\r\n * ```\r\n *\r\n * @param authInternal - The {@link Auth} instance.\r\n * @param email - The user's email address.\r\n * @param actionCodeSettings - The {@link ActionCodeSettings}.\r\n *\r\n * @public\r\n */\nasync function sendSignInLinkToEmail(auth, email, actionCodeSettings) {\n const authInternal = _castAuth(auth);\n const request = {\n requestType: \"EMAIL_SIGNIN\" /* ActionCodeOperation.EMAIL_SIGNIN */,\n email,\n clientType: \"CLIENT_TYPE_WEB\" /* RecaptchaClientType.WEB */\n };\n function setActionCodeSettings(request, actionCodeSettings) {\n _assert(actionCodeSettings.handleCodeInApp, authInternal, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n if (actionCodeSettings) {\n _setActionCodeSettingsOnRequest(authInternal, request, actionCodeSettings);\n }\n }\n setActionCodeSettings(request, actionCodeSettings);\n await handleRecaptchaFlow(authInternal, request, \"getOobCode\" /* RecaptchaActionName.GET_OOB_CODE */, sendSignInLinkToEmail$1);\n}\n/**\r\n * Checks if an incoming link is a sign-in with email link suitable for {@link signInWithEmailLink}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param emailLink - The link sent to the user's email address.\r\n *\r\n * @public\r\n */\nfunction isSignInWithEmailLink(auth, emailLink) {\n const actionCodeUrl = ActionCodeURL.parseLink(emailLink);\n return (actionCodeUrl === null || actionCodeUrl === void 0 ? void 0 : actionCodeUrl.operation) === \"EMAIL_SIGNIN\" /* ActionCodeOperation.EMAIL_SIGNIN */;\n}\n/**\r\n * Asynchronously signs in using an email and sign-in email link.\r\n *\r\n * @remarks\r\n * If no link is passed, the link is inferred from the current URL.\r\n *\r\n * Fails with an error if the email address is invalid or OTP in email link expires.\r\n *\r\n * This method is not supported by {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * Note: Confirm the link is a sign-in email link before calling this method firebase.auth.Auth.isSignInWithEmailLink.\r\n *\r\n * @example\r\n * ```javascript\r\n * const actionCodeSettings = {\r\n * url: 'https://www.example.com/?email=user@example.com',\r\n * iOS: {\r\n * bundleId: 'com.example.ios'\r\n * },\r\n * android: {\r\n * packageName: 'com.example.android',\r\n * installApp: true,\r\n * minimumVersion: '12'\r\n * },\r\n * handleCodeInApp: true\r\n * };\r\n * await sendSignInLinkToEmail(auth, 'user@example.com', actionCodeSettings);\r\n * // Obtain emailLink from the user.\r\n * if(isSignInWithEmailLink(auth, emailLink)) {\r\n * await signInWithEmailLink(auth, 'user@example.com', emailLink);\r\n * }\r\n * ```\r\n *\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param email - The user's email address.\r\n * @param emailLink - The link sent to the user's email address.\r\n *\r\n * @public\r\n */\nasync function signInWithEmailLink(auth, email, emailLink) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authModular = getModularInstance(auth);\n const credential = EmailAuthProvider.credentialWithLink(email, emailLink || _getCurrentUrl());\n // Check if the tenant ID in the email link matches the tenant ID on Auth\n // instance.\n _assert(credential._tenantId === (authModular.tenantId || null), authModular, \"tenant-id-mismatch\" /* AuthErrorCode.TENANT_ID_MISMATCH */);\n return signInWithCredential(authModular, credential);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function createAuthUri(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:createAuthUri\" /* Endpoint.CREATE_AUTH_URI */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Gets the list of possible sign in methods for the given email address. This method returns an\r\n * empty list when\r\n * {@link https://cloud.google.com/identity-platform/docs/admin/email-enumeration-protection | Email Enumeration Protection}\r\n * is enabled, irrespective of the number of authentication methods available for the given email.\r\n *\r\n * @remarks\r\n * This is useful to differentiate methods of sign-in for the same provider, eg.\r\n * {@link EmailAuthProvider} which has 2 methods of sign-in,\r\n * {@link SignInMethod}.EMAIL_PASSWORD and\r\n * {@link SignInMethod}.EMAIL_LINK.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param email - The user's email address.\r\n *\r\n * Deprecated. Migrating off of this method is recommended as a security best-practice.\r\n * Learn more in the Identity Platform documentation for\r\n * {@link https://cloud.google.com/identity-platform/docs/admin/email-enumeration-protection | Email Enumeration Protection}.\r\n * @public\r\n */\nasync function fetchSignInMethodsForEmail(auth, email) {\n // createAuthUri returns an error if continue URI is not http or https.\n // For environments like Cordova, Chrome extensions, native frameworks, file\n // systems, etc, use http://localhost as continue URL.\n const continueUri = _isHttpOrHttps() ? _getCurrentUrl() : 'http://localhost';\n const request = {\n identifier: email,\n continueUri\n };\n const {\n signinMethods\n } = await createAuthUri(getModularInstance(auth), request);\n return signinMethods || [];\n}\n/**\r\n * Sends a verification email to a user.\r\n *\r\n * @remarks\r\n * The verification process is completed by calling {@link applyActionCode}.\r\n *\r\n * @example\r\n * ```javascript\r\n * const actionCodeSettings = {\r\n * url: 'https://www.example.com/?email=user@example.com',\r\n * iOS: {\r\n * bundleId: 'com.example.ios'\r\n * },\r\n * android: {\r\n * packageName: 'com.example.android',\r\n * installApp: true,\r\n * minimumVersion: '12'\r\n * },\r\n * handleCodeInApp: true\r\n * };\r\n * await sendEmailVerification(user, actionCodeSettings);\r\n * // Obtain code from the user.\r\n * await applyActionCode(auth, code);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param actionCodeSettings - The {@link ActionCodeSettings}.\r\n *\r\n * @public\r\n */\nasync function sendEmailVerification(user, actionCodeSettings) {\n const userInternal = getModularInstance(user);\n const idToken = await user.getIdToken();\n const request = {\n requestType: \"VERIFY_EMAIL\" /* ActionCodeOperation.VERIFY_EMAIL */,\n idToken\n };\n if (actionCodeSettings) {\n _setActionCodeSettingsOnRequest(userInternal.auth, request, actionCodeSettings);\n }\n const {\n email\n } = await sendEmailVerification$1(userInternal.auth, request);\n if (email !== user.email) {\n await user.reload();\n }\n}\n/**\r\n * Sends a verification email to a new email address.\r\n *\r\n * @remarks\r\n * The user's email will be updated to the new one after being verified.\r\n *\r\n * If you have a custom email action handler, you can complete the verification process by calling\r\n * {@link applyActionCode}.\r\n *\r\n * @example\r\n * ```javascript\r\n * const actionCodeSettings = {\r\n * url: 'https://www.example.com/?email=user@example.com',\r\n * iOS: {\r\n * bundleId: 'com.example.ios'\r\n * },\r\n * android: {\r\n * packageName: 'com.example.android',\r\n * installApp: true,\r\n * minimumVersion: '12'\r\n * },\r\n * handleCodeInApp: true\r\n * };\r\n * await verifyBeforeUpdateEmail(user, 'newemail@example.com', actionCodeSettings);\r\n * // Obtain code from the user.\r\n * await applyActionCode(auth, code);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param newEmail - The new email address to be verified before update.\r\n * @param actionCodeSettings - The {@link ActionCodeSettings}.\r\n *\r\n * @public\r\n */\nasync function verifyBeforeUpdateEmail(user, newEmail, actionCodeSettings) {\n const userInternal = getModularInstance(user);\n const idToken = await user.getIdToken();\n const request = {\n requestType: \"VERIFY_AND_CHANGE_EMAIL\" /* ActionCodeOperation.VERIFY_AND_CHANGE_EMAIL */,\n idToken,\n newEmail\n };\n if (actionCodeSettings) {\n _setActionCodeSettingsOnRequest(userInternal.auth, request, actionCodeSettings);\n }\n const {\n email\n } = await verifyAndChangeEmail(userInternal.auth, request);\n if (email !== user.email) {\n // If the local copy of the email on user is outdated, reload the\n // user.\n await user.reload();\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function updateProfile$1(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v1/accounts:update\" /* Endpoint.SET_ACCOUNT_INFO */, request);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Updates a user's profile data.\r\n *\r\n * @param user - The user.\r\n * @param profile - The profile's `displayName` and `photoURL` to update.\r\n *\r\n * @public\r\n */\nasync function updateProfile(user, {\n displayName,\n photoURL: photoUrl\n}) {\n if (displayName === undefined && photoUrl === undefined) {\n return;\n }\n const userInternal = getModularInstance(user);\n const idToken = await userInternal.getIdToken();\n const profileRequest = {\n idToken,\n displayName,\n photoUrl,\n returnSecureToken: true\n };\n const response = await _logoutIfInvalidated(userInternal, updateProfile$1(userInternal.auth, profileRequest));\n userInternal.displayName = response.displayName || null;\n userInternal.photoURL = response.photoUrl || null;\n // Update the password provider as well\n const passwordProvider = userInternal.providerData.find(({\n providerId\n }) => providerId === \"password\" /* ProviderId.PASSWORD */);\n if (passwordProvider) {\n passwordProvider.displayName = userInternal.displayName;\n passwordProvider.photoURL = userInternal.photoURL;\n }\n await userInternal._updateTokensIfNecessary(response);\n}\n/**\r\n * Updates the user's email address.\r\n *\r\n * @remarks\r\n * An email will be sent to the original email address (if it was set) that allows to revoke the\r\n * email address change, in order to protect them from account hijacking.\r\n *\r\n * This method is not supported on any {@link User} signed in by {@link Auth} instances\r\n * created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * Important: this is a security sensitive operation that requires the user to have recently signed\r\n * in. If this requirement isn't met, ask the user to authenticate again and then call\r\n * {@link reauthenticateWithCredential}.\r\n *\r\n * @param user - The user.\r\n * @param newEmail - The new email address.\r\n *\r\n * Throws \"auth/operation-not-allowed\" error when\r\n * {@link https://cloud.google.com/identity-platform/docs/admin/email-enumeration-protection | Email Enumeration Protection}\r\n * is enabled.\r\n * Deprecated - Use {@link verifyBeforeUpdateEmail} instead.\r\n *\r\n * @public\r\n */\nfunction updateEmail(user, newEmail) {\n const userInternal = getModularInstance(user);\n if (_isFirebaseServerApp(userInternal.auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(userInternal.auth));\n }\n return updateEmailOrPassword(userInternal, newEmail, null);\n}\n/**\r\n * Updates the user's password.\r\n *\r\n * @remarks\r\n * Important: this is a security sensitive operation that requires the user to have recently signed\r\n * in. If this requirement isn't met, ask the user to authenticate again and then call\r\n * {@link reauthenticateWithCredential}.\r\n *\r\n * @param user - The user.\r\n * @param newPassword - The new password.\r\n *\r\n * @public\r\n */\nfunction updatePassword(user, newPassword) {\n return updateEmailOrPassword(getModularInstance(user), null, newPassword);\n}\nasync function updateEmailOrPassword(user, email, password) {\n const {\n auth\n } = user;\n const idToken = await user.getIdToken();\n const request = {\n idToken,\n returnSecureToken: true\n };\n if (email) {\n request.email = email;\n }\n if (password) {\n request.password = password;\n }\n const response = await _logoutIfInvalidated(user, updateEmailPassword(auth, request));\n await user._updateTokensIfNecessary(response, /* reload */true);\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Parse the `AdditionalUserInfo` from the ID token response.\r\n *\r\n */\nfunction _fromIdTokenResponse(idTokenResponse) {\n var _a, _b;\n if (!idTokenResponse) {\n return null;\n }\n const {\n providerId\n } = idTokenResponse;\n const profile = idTokenResponse.rawUserInfo ? JSON.parse(idTokenResponse.rawUserInfo) : {};\n const isNewUser = idTokenResponse.isNewUser || idTokenResponse.kind === \"identitytoolkit#SignupNewUserResponse\" /* IdTokenResponseKind.SignupNewUser */;\n if (!providerId && (idTokenResponse === null || idTokenResponse === void 0 ? void 0 : idTokenResponse.idToken)) {\n const signInProvider = (_b = (_a = _parseToken(idTokenResponse.idToken)) === null || _a === void 0 ? void 0 : _a.firebase) === null || _b === void 0 ? void 0 : _b['sign_in_provider'];\n if (signInProvider) {\n const filteredProviderId = signInProvider !== \"anonymous\" /* ProviderId.ANONYMOUS */ && signInProvider !== \"custom\" /* ProviderId.CUSTOM */ ? signInProvider : null;\n // Uses generic class in accordance with the legacy SDK.\n return new GenericAdditionalUserInfo(isNewUser, filteredProviderId);\n }\n }\n if (!providerId) {\n return null;\n }\n switch (providerId) {\n case \"facebook.com\" /* ProviderId.FACEBOOK */:\n return new FacebookAdditionalUserInfo(isNewUser, profile);\n case \"github.com\" /* ProviderId.GITHUB */:\n return new GithubAdditionalUserInfo(isNewUser, profile);\n case \"google.com\" /* ProviderId.GOOGLE */:\n return new GoogleAdditionalUserInfo(isNewUser, profile);\n case \"twitter.com\" /* ProviderId.TWITTER */:\n return new TwitterAdditionalUserInfo(isNewUser, profile, idTokenResponse.screenName || null);\n case \"custom\" /* ProviderId.CUSTOM */:\n case \"anonymous\" /* ProviderId.ANONYMOUS */:\n return new GenericAdditionalUserInfo(isNewUser, null);\n default:\n return new GenericAdditionalUserInfo(isNewUser, providerId, profile);\n }\n}\nclass GenericAdditionalUserInfo {\n constructor(isNewUser, providerId, profile = {}) {\n this.isNewUser = isNewUser;\n this.providerId = providerId;\n this.profile = profile;\n }\n}\nclass FederatedAdditionalUserInfoWithUsername extends GenericAdditionalUserInfo {\n constructor(isNewUser, providerId, profile, username) {\n super(isNewUser, providerId, profile);\n this.username = username;\n }\n}\nclass FacebookAdditionalUserInfo extends GenericAdditionalUserInfo {\n constructor(isNewUser, profile) {\n super(isNewUser, \"facebook.com\" /* ProviderId.FACEBOOK */, profile);\n }\n}\nclass GithubAdditionalUserInfo extends FederatedAdditionalUserInfoWithUsername {\n constructor(isNewUser, profile) {\n super(isNewUser, \"github.com\" /* ProviderId.GITHUB */, profile, typeof (profile === null || profile === void 0 ? void 0 : profile.login) === 'string' ? profile === null || profile === void 0 ? void 0 : profile.login : null);\n }\n}\nclass GoogleAdditionalUserInfo extends GenericAdditionalUserInfo {\n constructor(isNewUser, profile) {\n super(isNewUser, \"google.com\" /* ProviderId.GOOGLE */, profile);\n }\n}\nclass TwitterAdditionalUserInfo extends FederatedAdditionalUserInfoWithUsername {\n constructor(isNewUser, profile, screenName) {\n super(isNewUser, \"twitter.com\" /* ProviderId.TWITTER */, profile, screenName);\n }\n}\n/**\r\n * Extracts provider specific {@link AdditionalUserInfo} for the given credential.\r\n *\r\n * @param userCredential - The user credential.\r\n *\r\n * @public\r\n */\nfunction getAdditionalUserInfo(userCredential) {\n const {\n user,\n _tokenResponse\n } = userCredential;\n if (user.isAnonymous && !_tokenResponse) {\n // Handle the special case where signInAnonymously() gets called twice.\n // No network call is made so there's nothing to actually fill this in\n return {\n providerId: null,\n isNewUser: false,\n profile: null\n };\n }\n return _fromIdTokenResponse(_tokenResponse);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n// Non-optional auth methods.\n/**\r\n * Changes the type of persistence on the {@link Auth} instance for the currently saved\r\n * `Auth` session and applies this type of persistence for future sign-in requests, including\r\n * sign-in with redirect requests.\r\n *\r\n * @remarks\r\n * This makes it easy for a user signing in to specify whether their session should be\r\n * remembered or not. It also makes it easier to never persist the `Auth` state for applications\r\n * that are shared by other users or have sensitive data.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * setPersistence(auth, browserSessionPersistence);\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param persistence - The {@link Persistence} to use.\r\n * @returns A `Promise` that resolves once the persistence change has completed\r\n *\r\n * @public\r\n */\nfunction setPersistence(auth, persistence) {\n return getModularInstance(auth).setPersistence(persistence);\n}\n/**\r\n * Loads the reCAPTCHA configuration into the `Auth` instance.\r\n *\r\n * @remarks\r\n * This will load the reCAPTCHA config, which indicates whether the reCAPTCHA\r\n * verification flow should be triggered for each auth provider, into the\r\n * current Auth session.\r\n *\r\n * If initializeRecaptchaConfig() is not invoked, the auth flow will always start\r\n * without reCAPTCHA verification. If the provider is configured to require reCAPTCHA\r\n * verification, the SDK will transparently load the reCAPTCHA config and restart the\r\n * auth flows.\r\n *\r\n * Thus, by calling this optional method, you will reduce the latency of future auth flows.\r\n * Loading the reCAPTCHA config early will also enhance the signal collected by reCAPTCHA.\r\n *\r\n * This method does not work in a Node.js environment.\r\n *\r\n * @example\r\n * ```javascript\r\n * initializeRecaptchaConfig(auth);\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n *\r\n * @public\r\n */\nfunction initializeRecaptchaConfig(auth) {\n return _initializeRecaptchaConfig(auth);\n}\n/**\r\n * Validates the password against the password policy configured for the project or tenant.\r\n *\r\n * @remarks\r\n * If no tenant ID is set on the `Auth` instance, then this method will use the password\r\n * policy configured for the project. Otherwise, this method will use the policy configured\r\n * for the tenant. If a password policy has not been configured, then the default policy\r\n * configured for all projects will be used.\r\n *\r\n * If an auth flow fails because a submitted password does not meet the password policy\r\n * requirements and this method has previously been called, then this method will use the\r\n * most recent policy available when called again.\r\n *\r\n * @example\r\n * ```javascript\r\n * validatePassword(auth, 'some-password');\r\n * ```\r\n *\r\n * @param auth The {@link Auth} instance.\r\n * @param password The password to validate.\r\n *\r\n * @public\r\n */\nasync function validatePassword(auth, password) {\n const authInternal = _castAuth(auth);\n return authInternal.validatePassword(password);\n}\n/**\r\n * Adds an observer for changes to the signed-in user's ID token.\r\n *\r\n * @remarks\r\n * This includes sign-in, sign-out, and token refresh events.\r\n * This will not be triggered automatically upon ID token expiration. Use {@link User.getIdToken} to refresh the ID token.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param nextOrObserver - callback triggered on change.\r\n * @param error - Deprecated. This callback is never triggered. Errors\r\n * on signing in/out can be caught in promises returned from\r\n * sign-in/sign-out functions.\r\n * @param completed - Deprecated. This callback is never triggered.\r\n *\r\n * @public\r\n */\nfunction onIdTokenChanged(auth, nextOrObserver, error, completed) {\n return getModularInstance(auth).onIdTokenChanged(nextOrObserver, error, completed);\n}\n/**\r\n * Adds a blocking callback that runs before an auth state change\r\n * sets a new user.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param callback - callback triggered before new user value is set.\r\n * If this throws, it blocks the user from being set.\r\n * @param onAbort - callback triggered if a later `beforeAuthStateChanged()`\r\n * callback throws, allowing you to undo any side effects.\r\n */\nfunction beforeAuthStateChanged(auth, callback, onAbort) {\n return getModularInstance(auth).beforeAuthStateChanged(callback, onAbort);\n}\n/**\r\n * Adds an observer for changes to the user's sign-in state.\r\n *\r\n * @remarks\r\n * To keep the old behavior, see {@link onIdTokenChanged}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param nextOrObserver - callback triggered on change.\r\n * @param error - Deprecated. This callback is never triggered. Errors\r\n * on signing in/out can be caught in promises returned from\r\n * sign-in/sign-out functions.\r\n * @param completed - Deprecated. This callback is never triggered.\r\n *\r\n * @public\r\n */\nfunction onAuthStateChanged(auth, nextOrObserver, error, completed) {\n return getModularInstance(auth).onAuthStateChanged(nextOrObserver, error, completed);\n}\n/**\r\n * Sets the current language to the default device/browser preference.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n *\r\n * @public\r\n */\nfunction useDeviceLanguage(auth) {\n getModularInstance(auth).useDeviceLanguage();\n}\n/**\r\n * Asynchronously sets the provided user as {@link Auth.currentUser} on the\r\n * {@link Auth} instance.\r\n *\r\n * @remarks\r\n * A new instance copy of the user provided will be made and set as currentUser.\r\n *\r\n * This will trigger {@link onAuthStateChanged} and {@link onIdTokenChanged} listeners\r\n * like other sign in methods.\r\n *\r\n * The operation fails with an error if the user to be updated belongs to a different Firebase\r\n * project.\r\n *\r\n * This method is not supported by {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param user - The new {@link User}.\r\n *\r\n * @public\r\n */\nfunction updateCurrentUser(auth, user) {\n return getModularInstance(auth).updateCurrentUser(user);\n}\n/**\r\n * Signs out the current user.\r\n *\r\n * @remarks\r\n * This method is not supported by {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n *\r\n * @public\r\n */\nfunction signOut(auth) {\n return getModularInstance(auth).signOut();\n}\n/**\r\n * Revokes the given access token. Currently only supports Apple OAuth access tokens.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param token - The Apple OAuth access token.\r\n *\r\n * @public\r\n */\nfunction revokeAccessToken(auth, token) {\n const authInternal = _castAuth(auth);\n return authInternal.revokeAccessToken(token);\n}\n/**\r\n * Deletes and signs out the user.\r\n *\r\n * @remarks\r\n * Important: this is a security-sensitive operation that requires the user to have recently\r\n * signed in. If this requirement isn't met, ask the user to authenticate again and then call\r\n * {@link reauthenticateWithCredential}.\r\n *\r\n * @param user - The user.\r\n *\r\n * @public\r\n */\nasync function deleteUser(user) {\n return getModularInstance(user).delete();\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass MultiFactorSessionImpl {\n constructor(type, credential, user) {\n this.type = type;\n this.credential = credential;\n this.user = user;\n }\n static _fromIdtoken(idToken, user) {\n return new MultiFactorSessionImpl(\"enroll\" /* MultiFactorSessionType.ENROLL */, idToken, user);\n }\n static _fromMfaPendingCredential(mfaPendingCredential) {\n return new MultiFactorSessionImpl(\"signin\" /* MultiFactorSessionType.SIGN_IN */, mfaPendingCredential);\n }\n toJSON() {\n const key = this.type === \"enroll\" /* MultiFactorSessionType.ENROLL */ ? 'idToken' : 'pendingCredential';\n return {\n multiFactorSession: {\n [key]: this.credential\n }\n };\n }\n static fromJSON(obj) {\n var _a, _b;\n if (obj === null || obj === void 0 ? void 0 : obj.multiFactorSession) {\n if ((_a = obj.multiFactorSession) === null || _a === void 0 ? void 0 : _a.pendingCredential) {\n return MultiFactorSessionImpl._fromMfaPendingCredential(obj.multiFactorSession.pendingCredential);\n } else if ((_b = obj.multiFactorSession) === null || _b === void 0 ? void 0 : _b.idToken) {\n return MultiFactorSessionImpl._fromIdtoken(obj.multiFactorSession.idToken);\n }\n }\n return null;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass MultiFactorResolverImpl {\n constructor(session, hints, signInResolver) {\n this.session = session;\n this.hints = hints;\n this.signInResolver = signInResolver;\n }\n /** @internal */\n static _fromError(authExtern, error) {\n const auth = _castAuth(authExtern);\n const serverResponse = error.customData._serverResponse;\n const hints = (serverResponse.mfaInfo || []).map(enrollment => MultiFactorInfoImpl._fromServerResponse(auth, enrollment));\n _assert(serverResponse.mfaPendingCredential, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const session = MultiFactorSessionImpl._fromMfaPendingCredential(serverResponse.mfaPendingCredential);\n return new MultiFactorResolverImpl(session, hints, async assertion => {\n const mfaResponse = await assertion._process(auth, session);\n // Clear out the unneeded fields from the old login response\n delete serverResponse.mfaInfo;\n delete serverResponse.mfaPendingCredential;\n // Use in the new token & refresh token in the old response\n const idTokenResponse = Object.assign(Object.assign({}, serverResponse), {\n idToken: mfaResponse.idToken,\n refreshToken: mfaResponse.refreshToken\n });\n // TODO: we should collapse this switch statement into UserCredentialImpl._forOperation and have it support the SIGN_IN case\n switch (error.operationType) {\n case \"signIn\" /* OperationType.SIGN_IN */:\n const userCredential = await UserCredentialImpl._fromIdTokenResponse(auth, error.operationType, idTokenResponse);\n await auth._updateCurrentUser(userCredential.user);\n return userCredential;\n case \"reauthenticate\" /* OperationType.REAUTHENTICATE */:\n _assert(error.user, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return UserCredentialImpl._forOperation(error.user, error.operationType, idTokenResponse);\n default:\n _fail(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n });\n }\n async resolveSignIn(assertionExtern) {\n const assertion = assertionExtern;\n return this.signInResolver(assertion);\n }\n}\n/**\r\n * Provides a {@link MultiFactorResolver} suitable for completion of a\r\n * multi-factor flow.\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param error - The {@link MultiFactorError} raised during a sign-in, or\r\n * reauthentication operation.\r\n *\r\n * @public\r\n */\nfunction getMultiFactorResolver(auth, error) {\n var _a;\n const authModular = getModularInstance(auth);\n const errorInternal = error;\n _assert(error.customData.operationType, authModular, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n _assert((_a = errorInternal.customData._serverResponse) === null || _a === void 0 ? void 0 : _a.mfaPendingCredential, authModular, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n return MultiFactorResolverImpl._fromError(authModular, errorInternal);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction startEnrollPhoneMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaEnrollment:start\" /* Endpoint.START_MFA_ENROLLMENT */, _addTidIfNecessary(auth, request));\n}\nfunction finalizeEnrollPhoneMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaEnrollment:finalize\" /* Endpoint.FINALIZE_MFA_ENROLLMENT */, _addTidIfNecessary(auth, request));\n}\nfunction startEnrollTotpMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaEnrollment:start\" /* Endpoint.START_MFA_ENROLLMENT */, _addTidIfNecessary(auth, request));\n}\nfunction finalizeEnrollTotpMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaEnrollment:finalize\" /* Endpoint.FINALIZE_MFA_ENROLLMENT */, _addTidIfNecessary(auth, request));\n}\nfunction withdrawMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaEnrollment:withdraw\" /* Endpoint.WITHDRAW_MFA */, _addTidIfNecessary(auth, request));\n}\nclass MultiFactorUserImpl {\n constructor(user) {\n this.user = user;\n this.enrolledFactors = [];\n user._onReload(userInfo => {\n if (userInfo.mfaInfo) {\n this.enrolledFactors = userInfo.mfaInfo.map(enrollment => MultiFactorInfoImpl._fromServerResponse(user.auth, enrollment));\n }\n });\n }\n static _fromUser(user) {\n return new MultiFactorUserImpl(user);\n }\n async getSession() {\n return MultiFactorSessionImpl._fromIdtoken(await this.user.getIdToken(), this.user);\n }\n async enroll(assertionExtern, displayName) {\n const assertion = assertionExtern;\n const session = await this.getSession();\n const finalizeMfaResponse = await _logoutIfInvalidated(this.user, assertion._process(this.user.auth, session, displayName));\n // New tokens will be issued after enrollment of the new second factors.\n // They need to be updated on the user.\n await this.user._updateTokensIfNecessary(finalizeMfaResponse);\n // The user needs to be reloaded to get the new multi-factor information\n // from server. USER_RELOADED event will be triggered and `enrolledFactors`\n // will be updated.\n return this.user.reload();\n }\n async unenroll(infoOrUid) {\n const mfaEnrollmentId = typeof infoOrUid === 'string' ? infoOrUid : infoOrUid.uid;\n const idToken = await this.user.getIdToken();\n try {\n const idTokenResponse = await _logoutIfInvalidated(this.user, withdrawMfa(this.user.auth, {\n idToken,\n mfaEnrollmentId\n }));\n // Remove the second factor from the user's list.\n this.enrolledFactors = this.enrolledFactors.filter(({\n uid\n }) => uid !== mfaEnrollmentId);\n // Depending on whether the backend decided to revoke the user's session,\n // the tokenResponse may be empty. If the tokens were not updated (and they\n // are now invalid), reloading the user will discover this and invalidate\n // the user's state accordingly.\n await this.user._updateTokensIfNecessary(idTokenResponse);\n await this.user.reload();\n } catch (e) {\n throw e;\n }\n }\n}\nconst multiFactorUserCache = new WeakMap();\n/**\r\n * The {@link MultiFactorUser} corresponding to the user.\r\n *\r\n * @remarks\r\n * This is used to access all multi-factor properties and operations related to the user.\r\n *\r\n * @param user - The user.\r\n *\r\n * @public\r\n */\nfunction multiFactor(user) {\n const userModular = getModularInstance(user);\n if (!multiFactorUserCache.has(userModular)) {\n multiFactorUserCache.set(userModular, MultiFactorUserImpl._fromUser(userModular));\n }\n return multiFactorUserCache.get(userModular);\n}\nconst STORAGE_AVAILABLE_KEY = '__sak';\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n// There are two different browser persistence types: local and session.\n// Both have the same implementation but use a different underlying storage\n// object.\nclass BrowserPersistenceClass {\n constructor(storageRetriever, type) {\n this.storageRetriever = storageRetriever;\n this.type = type;\n }\n _isAvailable() {\n try {\n if (!this.storage) {\n return Promise.resolve(false);\n }\n this.storage.setItem(STORAGE_AVAILABLE_KEY, '1');\n this.storage.removeItem(STORAGE_AVAILABLE_KEY);\n return Promise.resolve(true);\n } catch (_a) {\n return Promise.resolve(false);\n }\n }\n _set(key, value) {\n this.storage.setItem(key, JSON.stringify(value));\n return Promise.resolve();\n }\n _get(key) {\n const json = this.storage.getItem(key);\n return Promise.resolve(json ? JSON.parse(json) : null);\n }\n _remove(key) {\n this.storage.removeItem(key);\n return Promise.resolve();\n }\n get storage() {\n return this.storageRetriever();\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _iframeCannotSyncWebStorage() {\n const ua = getUA();\n return _isSafari(ua) || _isIOS(ua);\n}\n// The polling period in case events are not supported\nconst _POLLING_INTERVAL_MS$1 = 1000;\n// The IE 10 localStorage cross tab synchronization delay in milliseconds\nconst IE10_LOCAL_STORAGE_SYNC_DELAY = 10;\nlet BrowserLocalPersistence = /*#__PURE__*/(() => {\n class BrowserLocalPersistence extends BrowserPersistenceClass {\n constructor() {\n super(() => window.localStorage, \"LOCAL\" /* PersistenceType.LOCAL */);\n this.boundEventHandler = (event, poll) => this.onStorageEvent(event, poll);\n this.listeners = {};\n this.localCache = {};\n // setTimeout return value is platform specific\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.pollTimer = null;\n // Safari or iOS browser and embedded in an iframe.\n this.safariLocalStorageNotSynced = _iframeCannotSyncWebStorage() && _isIframe();\n // Whether to use polling instead of depending on window events\n this.fallbackToPolling = _isMobileBrowser();\n this._shouldAllowMigration = true;\n }\n forAllChangedKeys(cb) {\n // Check all keys with listeners on them.\n for (const key of Object.keys(this.listeners)) {\n // Get value from localStorage.\n const newValue = this.storage.getItem(key);\n const oldValue = this.localCache[key];\n // If local map value does not match, trigger listener with storage event.\n // Differentiate this simulated event from the real storage event.\n if (newValue !== oldValue) {\n cb(key, oldValue, newValue);\n }\n }\n }\n onStorageEvent(event, poll = false) {\n // Key would be null in some situations, like when localStorage is cleared\n if (!event.key) {\n this.forAllChangedKeys((key, _oldValue, newValue) => {\n this.notifyListeners(key, newValue);\n });\n return;\n }\n const key = event.key;\n // Check the mechanism how this event was detected.\n // The first event will dictate the mechanism to be used.\n if (poll) {\n // Environment detects storage changes via polling.\n // Remove storage event listener to prevent possible event duplication.\n this.detachListener();\n } else {\n // Environment detects storage changes via storage event listener.\n // Remove polling listener to prevent possible event duplication.\n this.stopPolling();\n }\n // Safari embedded iframe. Storage event will trigger with the delta\n // changes but no changes will be applied to the iframe localStorage.\n if (this.safariLocalStorageNotSynced) {\n // Get current iframe page value.\n const storedValue = this.storage.getItem(key);\n // Value not synchronized, synchronize manually.\n if (event.newValue !== storedValue) {\n if (event.newValue !== null) {\n // Value changed from current value.\n this.storage.setItem(key, event.newValue);\n } else {\n // Current value deleted.\n this.storage.removeItem(key);\n }\n } else if (this.localCache[key] === event.newValue && !poll) {\n // Already detected and processed, do not trigger listeners again.\n return;\n }\n }\n const triggerListeners = () => {\n // Keep local map up to date in case storage event is triggered before\n // poll.\n const storedValue = this.storage.getItem(key);\n if (!poll && this.localCache[key] === storedValue) {\n // Real storage event which has already been detected, do nothing.\n // This seems to trigger in some IE browsers for some reason.\n return;\n }\n this.notifyListeners(key, storedValue);\n };\n const storedValue = this.storage.getItem(key);\n if (_isIE10() && storedValue !== event.newValue && event.newValue !== event.oldValue) {\n // IE 10 has this weird bug where a storage event would trigger with the\n // correct key, oldValue and newValue but localStorage.getItem(key) does\n // not yield the updated value until a few milliseconds. This ensures\n // this recovers from that situation.\n setTimeout(triggerListeners, IE10_LOCAL_STORAGE_SYNC_DELAY);\n } else {\n triggerListeners();\n }\n }\n notifyListeners(key, value) {\n this.localCache[key] = value;\n const listeners = this.listeners[key];\n if (listeners) {\n for (const listener of Array.from(listeners)) {\n listener(value ? JSON.parse(value) : value);\n }\n }\n }\n startPolling() {\n this.stopPolling();\n this.pollTimer = setInterval(() => {\n this.forAllChangedKeys((key, oldValue, newValue) => {\n this.onStorageEvent(new StorageEvent('storage', {\n key,\n oldValue,\n newValue\n }), /* poll */true);\n });\n }, _POLLING_INTERVAL_MS$1);\n }\n stopPolling() {\n if (this.pollTimer) {\n clearInterval(this.pollTimer);\n this.pollTimer = null;\n }\n }\n attachListener() {\n window.addEventListener('storage', this.boundEventHandler);\n }\n detachListener() {\n window.removeEventListener('storage', this.boundEventHandler);\n }\n _addListener(key, listener) {\n if (Object.keys(this.listeners).length === 0) {\n // Whether browser can detect storage event when it had already been pushed to the background.\n // This may happen in some mobile browsers. A localStorage change in the foreground window\n // will not be detected in the background window via the storage event.\n // This was detected in iOS 7.x mobile browsers\n if (this.fallbackToPolling) {\n this.startPolling();\n } else {\n this.attachListener();\n }\n }\n if (!this.listeners[key]) {\n this.listeners[key] = new Set();\n // Populate the cache to avoid spuriously triggering on first poll.\n this.localCache[key] = this.storage.getItem(key);\n }\n this.listeners[key].add(listener);\n }\n _removeListener(key, listener) {\n if (this.listeners[key]) {\n this.listeners[key].delete(listener);\n if (this.listeners[key].size === 0) {\n delete this.listeners[key];\n }\n }\n if (Object.keys(this.listeners).length === 0) {\n this.detachListener();\n this.stopPolling();\n }\n }\n // Update local cache on base operations:\n async _set(key, value) {\n await super._set(key, value);\n this.localCache[key] = JSON.stringify(value);\n }\n async _get(key) {\n const value = await super._get(key);\n this.localCache[key] = JSON.stringify(value);\n return value;\n }\n async _remove(key) {\n await super._remove(key);\n delete this.localCache[key];\n }\n }\n BrowserLocalPersistence.type = 'LOCAL';\n /**\r\n * An implementation of {@link Persistence} of type `LOCAL` using `localStorage`\r\n * for the underlying storage.\r\n *\r\n * @public\r\n */\n return BrowserLocalPersistence;\n})();\nconst browserLocalPersistence = BrowserLocalPersistence;\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nlet BrowserSessionPersistence = /*#__PURE__*/(() => {\n class BrowserSessionPersistence extends BrowserPersistenceClass {\n constructor() {\n super(() => window.sessionStorage, \"SESSION\" /* PersistenceType.SESSION */);\n }\n _addListener(_key, _listener) {\n // Listeners are not supported for session storage since it cannot be shared across windows\n return;\n }\n _removeListener(_key, _listener) {\n // Listeners are not supported for session storage since it cannot be shared across windows\n return;\n }\n }\n BrowserSessionPersistence.type = 'SESSION';\n /**\r\n * An implementation of {@link Persistence} of `SESSION` using `sessionStorage`\r\n * for the underlying storage.\r\n *\r\n * @public\r\n */\n return BrowserSessionPersistence;\n})();\nconst browserSessionPersistence = BrowserSessionPersistence;\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Shim for Promise.allSettled, note the slightly different format of `fulfilled` vs `status`.\r\n *\r\n * @param promises - Array of promises to wait on.\r\n */\nfunction _allSettled(promises) {\n return Promise.all(promises.map(async promise => {\n try {\n const value = await promise;\n return {\n fulfilled: true,\n value\n };\n } catch (reason) {\n return {\n fulfilled: false,\n reason\n };\n }\n }));\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Interface class for receiving messages.\r\n *\r\n */\nlet Receiver = /*#__PURE__*/(() => {\n class Receiver {\n constructor(eventTarget) {\n this.eventTarget = eventTarget;\n this.handlersMap = {};\n this.boundEventHandler = this.handleEvent.bind(this);\n }\n /**\r\n * Obtain an instance of a Receiver for a given event target, if none exists it will be created.\r\n *\r\n * @param eventTarget - An event target (such as window or self) through which the underlying\r\n * messages will be received.\r\n */\n static _getInstance(eventTarget) {\n // The results are stored in an array since objects can't be keys for other\n // objects. In addition, setting a unique property on an event target as a\n // hash map key may not be allowed due to CORS restrictions.\n const existingInstance = this.receivers.find(receiver => receiver.isListeningto(eventTarget));\n if (existingInstance) {\n return existingInstance;\n }\n const newInstance = new Receiver(eventTarget);\n this.receivers.push(newInstance);\n return newInstance;\n }\n isListeningto(eventTarget) {\n return this.eventTarget === eventTarget;\n }\n /**\r\n * Fans out a MessageEvent to the appropriate listeners.\r\n *\r\n * @remarks\r\n * Sends an {@link Status.ACK} upon receipt and a {@link Status.DONE} once all handlers have\r\n * finished processing.\r\n *\r\n * @param event - The MessageEvent.\r\n *\r\n */\n async handleEvent(event) {\n const messageEvent = event;\n const {\n eventId,\n eventType,\n data\n } = messageEvent.data;\n const handlers = this.handlersMap[eventType];\n if (!(handlers === null || handlers === void 0 ? void 0 : handlers.size)) {\n return;\n }\n messageEvent.ports[0].postMessage({\n status: \"ack\" /* _Status.ACK */,\n eventId,\n eventType\n });\n const promises = Array.from(handlers).map(async handler => handler(messageEvent.origin, data));\n const response = await _allSettled(promises);\n messageEvent.ports[0].postMessage({\n status: \"done\" /* _Status.DONE */,\n eventId,\n eventType,\n response\n });\n }\n /**\r\n * Subscribe an event handler for a particular event.\r\n *\r\n * @param eventType - Event name to subscribe to.\r\n * @param eventHandler - The event handler which should receive the events.\r\n *\r\n */\n _subscribe(eventType, eventHandler) {\n if (Object.keys(this.handlersMap).length === 0) {\n this.eventTarget.addEventListener('message', this.boundEventHandler);\n }\n if (!this.handlersMap[eventType]) {\n this.handlersMap[eventType] = new Set();\n }\n this.handlersMap[eventType].add(eventHandler);\n }\n /**\r\n * Unsubscribe an event handler from a particular event.\r\n *\r\n * @param eventType - Event name to unsubscribe from.\r\n * @param eventHandler - Optinoal event handler, if none provided, unsubscribe all handlers on this event.\r\n *\r\n */\n _unsubscribe(eventType, eventHandler) {\n if (this.handlersMap[eventType] && eventHandler) {\n this.handlersMap[eventType].delete(eventHandler);\n }\n if (!eventHandler || this.handlersMap[eventType].size === 0) {\n delete this.handlersMap[eventType];\n }\n if (Object.keys(this.handlersMap).length === 0) {\n this.eventTarget.removeEventListener('message', this.boundEventHandler);\n }\n }\n }\n Receiver.receivers = [];\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n return Receiver;\n})();\nfunction _generateEventId(prefix = '', digits = 10) {\n let random = '';\n for (let i = 0; i < digits; i++) {\n random += Math.floor(Math.random() * 10);\n }\n return prefix + random;\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Interface for sending messages and waiting for a completion response.\r\n *\r\n */\nclass Sender {\n constructor(target) {\n this.target = target;\n this.handlers = new Set();\n }\n /**\r\n * Unsubscribe the handler and remove it from our tracking Set.\r\n *\r\n * @param handler - The handler to unsubscribe.\r\n */\n removeMessageHandler(handler) {\n if (handler.messageChannel) {\n handler.messageChannel.port1.removeEventListener('message', handler.onMessage);\n handler.messageChannel.port1.close();\n }\n this.handlers.delete(handler);\n }\n /**\r\n * Send a message to the Receiver located at {@link target}.\r\n *\r\n * @remarks\r\n * We'll first wait a bit for an ACK , if we get one we will wait significantly longer until the\r\n * receiver has had a chance to fully process the event.\r\n *\r\n * @param eventType - Type of event to send.\r\n * @param data - The payload of the event.\r\n * @param timeout - Timeout for waiting on an ACK from the receiver.\r\n *\r\n * @returns An array of settled promises from all the handlers that were listening on the receiver.\r\n */\n async _send(eventType, data, timeout = 50 /* _TimeoutDuration.ACK */) {\n const messageChannel = typeof MessageChannel !== 'undefined' ? new MessageChannel() : null;\n if (!messageChannel) {\n throw new Error(\"connection_unavailable\" /* _MessageError.CONNECTION_UNAVAILABLE */);\n }\n // Node timers and browser timers return fundamentally different types.\n // We don't actually care what the value is but TS won't accept unknown and\n // we can't cast properly in both environments.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n let completionTimer;\n let handler;\n return new Promise((resolve, reject) => {\n const eventId = _generateEventId('', 20);\n messageChannel.port1.start();\n const ackTimer = setTimeout(() => {\n reject(new Error(\"unsupported_event\" /* _MessageError.UNSUPPORTED_EVENT */));\n }, timeout);\n handler = {\n messageChannel,\n onMessage(event) {\n const messageEvent = event;\n if (messageEvent.data.eventId !== eventId) {\n return;\n }\n switch (messageEvent.data.status) {\n case \"ack\" /* _Status.ACK */:\n // The receiver should ACK first.\n clearTimeout(ackTimer);\n completionTimer = setTimeout(() => {\n reject(new Error(\"timeout\" /* _MessageError.TIMEOUT */));\n }, 3000 /* _TimeoutDuration.COMPLETION */);\n break;\n case \"done\" /* _Status.DONE */:\n // Once the receiver's handlers are finished we will get the results.\n clearTimeout(completionTimer);\n resolve(messageEvent.data.response);\n break;\n default:\n clearTimeout(ackTimer);\n clearTimeout(completionTimer);\n reject(new Error(\"invalid_response\" /* _MessageError.INVALID_RESPONSE */));\n break;\n }\n }\n };\n this.handlers.add(handler);\n messageChannel.port1.addEventListener('message', handler.onMessage);\n this.target.postMessage({\n eventType,\n eventId,\n data\n }, [messageChannel.port2]);\n }).finally(() => {\n if (handler) {\n this.removeMessageHandler(handler);\n }\n });\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Lazy accessor for window, since the compat layer won't tree shake this out,\r\n * we need to make sure not to mess with window unless we have to\r\n */\nfunction _window() {\n return window;\n}\nfunction _setWindowLocation(url) {\n _window().location.href = url;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction _isWorker() {\n return typeof _window()['WorkerGlobalScope'] !== 'undefined' && typeof _window()['importScripts'] === 'function';\n}\nasync function _getActiveServiceWorker() {\n if (!(navigator === null || navigator === void 0 ? void 0 : navigator.serviceWorker)) {\n return null;\n }\n try {\n const registration = await navigator.serviceWorker.ready;\n return registration.active;\n } catch (_a) {\n return null;\n }\n}\nfunction _getServiceWorkerController() {\n var _a;\n return ((_a = navigator === null || navigator === void 0 ? void 0 : navigator.serviceWorker) === null || _a === void 0 ? void 0 : _a.controller) || null;\n}\nfunction _getWorkerGlobalScope() {\n return _isWorker() ? self : null;\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst DB_NAME = 'firebaseLocalStorageDb';\nconst DB_VERSION = 1;\nconst DB_OBJECTSTORE_NAME = 'firebaseLocalStorage';\nconst DB_DATA_KEYPATH = 'fbase_key';\n/**\r\n * Promise wrapper for IDBRequest\r\n *\r\n * Unfortunately we can't cleanly extend Promise since promises are not callable in ES6\r\n *\r\n */\nclass DBPromise {\n constructor(request) {\n this.request = request;\n }\n toPromise() {\n return new Promise((resolve, reject) => {\n this.request.addEventListener('success', () => {\n resolve(this.request.result);\n });\n this.request.addEventListener('error', () => {\n reject(this.request.error);\n });\n });\n }\n}\nfunction getObjectStore(db, isReadWrite) {\n return db.transaction([DB_OBJECTSTORE_NAME], isReadWrite ? 'readwrite' : 'readonly').objectStore(DB_OBJECTSTORE_NAME);\n}\nfunction _deleteDatabase() {\n const request = indexedDB.deleteDatabase(DB_NAME);\n return new DBPromise(request).toPromise();\n}\nfunction _openDatabase() {\n const request = indexedDB.open(DB_NAME, DB_VERSION);\n return new Promise((resolve, reject) => {\n request.addEventListener('error', () => {\n reject(request.error);\n });\n request.addEventListener('upgradeneeded', () => {\n const db = request.result;\n try {\n db.createObjectStore(DB_OBJECTSTORE_NAME, {\n keyPath: DB_DATA_KEYPATH\n });\n } catch (e) {\n reject(e);\n }\n });\n request.addEventListener('success', async () => {\n const db = request.result;\n // Strange bug that occurs in Firefox when multiple tabs are opened at the\n // same time. The only way to recover seems to be deleting the database\n // and re-initializing it.\n // https://github.com/firebase/firebase-js-sdk/issues/634\n if (!db.objectStoreNames.contains(DB_OBJECTSTORE_NAME)) {\n // Need to close the database or else you get a `blocked` event\n db.close();\n await _deleteDatabase();\n resolve(await _openDatabase());\n } else {\n resolve(db);\n }\n });\n });\n}\nasync function _putObject(db, key, value) {\n const request = getObjectStore(db, true).put({\n [DB_DATA_KEYPATH]: key,\n value\n });\n return new DBPromise(request).toPromise();\n}\nasync function getObject(db, key) {\n const request = getObjectStore(db, false).get(key);\n const data = await new DBPromise(request).toPromise();\n return data === undefined ? null : data.value;\n}\nfunction _deleteObject(db, key) {\n const request = getObjectStore(db, true).delete(key);\n return new DBPromise(request).toPromise();\n}\nconst _POLLING_INTERVAL_MS = 800;\nconst _TRANSACTION_RETRY_COUNT = 3;\nlet IndexedDBLocalPersistence = /*#__PURE__*/(() => {\n class IndexedDBLocalPersistence {\n constructor() {\n this.type = \"LOCAL\" /* PersistenceType.LOCAL */;\n this._shouldAllowMigration = true;\n this.listeners = {};\n this.localCache = {};\n // setTimeout return value is platform specific\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.pollTimer = null;\n this.pendingWrites = 0;\n this.receiver = null;\n this.sender = null;\n this.serviceWorkerReceiverAvailable = false;\n this.activeServiceWorker = null;\n // Fire & forget the service worker registration as it may never resolve\n this._workerInitializationPromise = this.initializeServiceWorkerMessaging().then(() => {}, () => {});\n }\n async _openDb() {\n if (this.db) {\n return this.db;\n }\n this.db = await _openDatabase();\n return this.db;\n }\n async _withRetries(op) {\n let numAttempts = 0;\n while (true) {\n try {\n const db = await this._openDb();\n return await op(db);\n } catch (e) {\n if (numAttempts++ > _TRANSACTION_RETRY_COUNT) {\n throw e;\n }\n if (this.db) {\n this.db.close();\n this.db = undefined;\n }\n // TODO: consider adding exponential backoff\n }\n }\n }\n /**\r\n * IndexedDB events do not propagate from the main window to the worker context. We rely on a\r\n * postMessage interface to send these events to the worker ourselves.\r\n */\n async initializeServiceWorkerMessaging() {\n return _isWorker() ? this.initializeReceiver() : this.initializeSender();\n }\n /**\r\n * As the worker we should listen to events from the main window.\r\n */\n async initializeReceiver() {\n this.receiver = Receiver._getInstance(_getWorkerGlobalScope());\n // Refresh from persistence if we receive a KeyChanged message.\n this.receiver._subscribe(\"keyChanged\" /* _EventType.KEY_CHANGED */, async (_origin, data) => {\n const keys = await this._poll();\n return {\n keyProcessed: keys.includes(data.key)\n };\n });\n // Let the sender know that we are listening so they give us more timeout.\n this.receiver._subscribe(\"ping\" /* _EventType.PING */, async (_origin, _data) => {\n return [\"keyChanged\" /* _EventType.KEY_CHANGED */];\n });\n }\n /**\r\n * As the main window, we should let the worker know when keys change (set and remove).\r\n *\r\n * @remarks\r\n * {@link https://developer.mozilla.org/en-US/docs/Web/API/ServiceWorkerContainer/ready | ServiceWorkerContainer.ready}\r\n * may not resolve.\r\n */\n async initializeSender() {\n var _a, _b;\n // Check to see if there's an active service worker.\n this.activeServiceWorker = await _getActiveServiceWorker();\n if (!this.activeServiceWorker) {\n return;\n }\n this.sender = new Sender(this.activeServiceWorker);\n // Ping the service worker to check what events they can handle.\n const results = await this.sender._send(\"ping\" /* _EventType.PING */, {}, 800 /* _TimeoutDuration.LONG_ACK */);\n if (!results) {\n return;\n }\n if (((_a = results[0]) === null || _a === void 0 ? void 0 : _a.fulfilled) && ((_b = results[0]) === null || _b === void 0 ? void 0 : _b.value.includes(\"keyChanged\" /* _EventType.KEY_CHANGED */))) {\n this.serviceWorkerReceiverAvailable = true;\n }\n }\n /**\r\n * Let the worker know about a changed key, the exact key doesn't technically matter since the\r\n * worker will just trigger a full sync anyway.\r\n *\r\n * @remarks\r\n * For now, we only support one service worker per page.\r\n *\r\n * @param key - Storage key which changed.\r\n */\n async notifyServiceWorker(key) {\n if (!this.sender || !this.activeServiceWorker || _getServiceWorkerController() !== this.activeServiceWorker) {\n return;\n }\n try {\n await this.sender._send(\"keyChanged\" /* _EventType.KEY_CHANGED */, {\n key\n },\n // Use long timeout if receiver has previously responded to a ping from us.\n this.serviceWorkerReceiverAvailable ? 800 /* _TimeoutDuration.LONG_ACK */ : 50 /* _TimeoutDuration.ACK */);\n } catch (_a) {\n // This is a best effort approach. Ignore errors.\n }\n }\n async _isAvailable() {\n try {\n if (!indexedDB) {\n return false;\n }\n const db = await _openDatabase();\n await _putObject(db, STORAGE_AVAILABLE_KEY, '1');\n await _deleteObject(db, STORAGE_AVAILABLE_KEY);\n return true;\n } catch (_a) {}\n return false;\n }\n async _withPendingWrite(write) {\n this.pendingWrites++;\n try {\n await write();\n } finally {\n this.pendingWrites--;\n }\n }\n async _set(key, value) {\n return this._withPendingWrite(async () => {\n await this._withRetries(db => _putObject(db, key, value));\n this.localCache[key] = value;\n return this.notifyServiceWorker(key);\n });\n }\n async _get(key) {\n const obj = await this._withRetries(db => getObject(db, key));\n this.localCache[key] = obj;\n return obj;\n }\n async _remove(key) {\n return this._withPendingWrite(async () => {\n await this._withRetries(db => _deleteObject(db, key));\n delete this.localCache[key];\n return this.notifyServiceWorker(key);\n });\n }\n async _poll() {\n // TODO: check if we need to fallback if getAll is not supported\n const result = await this._withRetries(db => {\n const getAllRequest = getObjectStore(db, false).getAll();\n return new DBPromise(getAllRequest).toPromise();\n });\n if (!result) {\n return [];\n }\n // If we have pending writes in progress abort, we'll get picked up on the next poll\n if (this.pendingWrites !== 0) {\n return [];\n }\n const keys = [];\n const keysInResult = new Set();\n if (result.length !== 0) {\n for (const {\n fbase_key: key,\n value\n } of result) {\n keysInResult.add(key);\n if (JSON.stringify(this.localCache[key]) !== JSON.stringify(value)) {\n this.notifyListeners(key, value);\n keys.push(key);\n }\n }\n }\n for (const localKey of Object.keys(this.localCache)) {\n if (this.localCache[localKey] && !keysInResult.has(localKey)) {\n // Deleted\n this.notifyListeners(localKey, null);\n keys.push(localKey);\n }\n }\n return keys;\n }\n notifyListeners(key, newValue) {\n this.localCache[key] = newValue;\n const listeners = this.listeners[key];\n if (listeners) {\n for (const listener of Array.from(listeners)) {\n listener(newValue);\n }\n }\n }\n startPolling() {\n this.stopPolling();\n this.pollTimer = setInterval(async () => this._poll(), _POLLING_INTERVAL_MS);\n }\n stopPolling() {\n if (this.pollTimer) {\n clearInterval(this.pollTimer);\n this.pollTimer = null;\n }\n }\n _addListener(key, listener) {\n if (Object.keys(this.listeners).length === 0) {\n this.startPolling();\n }\n if (!this.listeners[key]) {\n this.listeners[key] = new Set();\n // Populate the cache to avoid spuriously triggering on first poll.\n void this._get(key); // This can happen in the background async and we can return immediately.\n }\n this.listeners[key].add(listener);\n }\n _removeListener(key, listener) {\n if (this.listeners[key]) {\n this.listeners[key].delete(listener);\n if (this.listeners[key].size === 0) {\n delete this.listeners[key];\n }\n }\n if (Object.keys(this.listeners).length === 0) {\n this.stopPolling();\n }\n }\n }\n IndexedDBLocalPersistence.type = 'LOCAL';\n /**\r\n * An implementation of {@link Persistence} of type `LOCAL` using `indexedDB`\r\n * for the underlying storage.\r\n *\r\n * @public\r\n */\n return IndexedDBLocalPersistence;\n})();\nconst indexedDBLocalPersistence = IndexedDBLocalPersistence;\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction startSignInPhoneMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaSignIn:start\" /* Endpoint.START_MFA_SIGN_IN */, _addTidIfNecessary(auth, request));\n}\nfunction finalizeSignInPhoneMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaSignIn:finalize\" /* Endpoint.FINALIZE_MFA_SIGN_IN */, _addTidIfNecessary(auth, request));\n}\nfunction finalizeSignInTotpMfa(auth, request) {\n return _performApiRequest(auth, \"POST\" /* HttpMethod.POST */, \"/v2/accounts/mfaSignIn:finalize\" /* Endpoint.FINALIZE_MFA_SIGN_IN */, _addTidIfNecessary(auth, request));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst _SOLVE_TIME_MS = 500;\nconst _EXPIRATION_TIME_MS = 60000;\nconst _WIDGET_ID_START = 1000000000000;\nclass MockReCaptcha {\n constructor(auth) {\n this.auth = auth;\n this.counter = _WIDGET_ID_START;\n this._widgets = new Map();\n }\n render(container, parameters) {\n const id = this.counter;\n this._widgets.set(id, new MockWidget(container, this.auth.name, parameters || {}));\n this.counter++;\n return id;\n }\n reset(optWidgetId) {\n var _a;\n const id = optWidgetId || _WIDGET_ID_START;\n void ((_a = this._widgets.get(id)) === null || _a === void 0 ? void 0 : _a.delete());\n this._widgets.delete(id);\n }\n getResponse(optWidgetId) {\n var _a;\n const id = optWidgetId || _WIDGET_ID_START;\n return ((_a = this._widgets.get(id)) === null || _a === void 0 ? void 0 : _a.getResponse()) || '';\n }\n async execute(optWidgetId) {\n var _a;\n const id = optWidgetId || _WIDGET_ID_START;\n void ((_a = this._widgets.get(id)) === null || _a === void 0 ? void 0 : _a.execute());\n return '';\n }\n}\nclass MockWidget {\n constructor(containerOrId, appName, params) {\n this.params = params;\n this.timerId = null;\n this.deleted = false;\n this.responseToken = null;\n this.clickHandler = () => {\n this.execute();\n };\n const container = typeof containerOrId === 'string' ? document.getElementById(containerOrId) : containerOrId;\n _assert(container, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */, {\n appName\n });\n this.container = container;\n this.isVisible = this.params.size !== 'invisible';\n if (this.isVisible) {\n this.execute();\n } else {\n this.container.addEventListener('click', this.clickHandler);\n }\n }\n getResponse() {\n this.checkIfDeleted();\n return this.responseToken;\n }\n delete() {\n this.checkIfDeleted();\n this.deleted = true;\n if (this.timerId) {\n clearTimeout(this.timerId);\n this.timerId = null;\n }\n this.container.removeEventListener('click', this.clickHandler);\n }\n execute() {\n this.checkIfDeleted();\n if (this.timerId) {\n return;\n }\n this.timerId = window.setTimeout(() => {\n this.responseToken = generateRandomAlphaNumericString(50);\n const {\n callback,\n 'expired-callback': expiredCallback\n } = this.params;\n if (callback) {\n try {\n callback(this.responseToken);\n } catch (e) {}\n }\n this.timerId = window.setTimeout(() => {\n this.timerId = null;\n this.responseToken = null;\n if (expiredCallback) {\n try {\n expiredCallback();\n } catch (e) {}\n }\n if (this.isVisible) {\n this.execute();\n }\n }, _EXPIRATION_TIME_MS);\n }, _SOLVE_TIME_MS);\n }\n checkIfDeleted() {\n if (this.deleted) {\n throw new Error('reCAPTCHA mock was already deleted!');\n }\n }\n}\nfunction generateRandomAlphaNumericString(len) {\n const chars = [];\n const allowedChars = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';\n for (let i = 0; i < len; i++) {\n chars.push(allowedChars.charAt(Math.floor(Math.random() * allowedChars.length)));\n }\n return chars.join('');\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n// ReCaptcha will load using the same callback, so the callback function needs\n// to be kept around\nconst _JSLOAD_CALLBACK = _generateCallbackName('rcb');\nconst NETWORK_TIMEOUT_DELAY = new Delay(30000, 60000);\n/**\r\n * Loader for the GReCaptcha library. There should only ever be one of this.\r\n */\nclass ReCaptchaLoaderImpl {\n constructor() {\n var _a;\n this.hostLanguage = '';\n this.counter = 0;\n /**\r\n * Check for `render()` method. `window.grecaptcha` will exist if the Enterprise\r\n * version of the ReCAPTCHA script was loaded by someone else (e.g. App Check) but\r\n * `window.grecaptcha.render()` will not. Another load will add it.\r\n */\n this.librarySeparatelyLoaded = !!((_a = _window().grecaptcha) === null || _a === void 0 ? void 0 : _a.render);\n }\n load(auth, hl = '') {\n _assert(isHostLanguageValid(hl), auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n if (this.shouldResolveImmediately(hl) && isV2(_window().grecaptcha)) {\n return Promise.resolve(_window().grecaptcha);\n }\n return new Promise((resolve, reject) => {\n const networkTimeout = _window().setTimeout(() => {\n reject(_createError(auth, \"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */));\n }, NETWORK_TIMEOUT_DELAY.get());\n _window()[_JSLOAD_CALLBACK] = () => {\n _window().clearTimeout(networkTimeout);\n delete _window()[_JSLOAD_CALLBACK];\n const recaptcha = _window().grecaptcha;\n if (!recaptcha || !isV2(recaptcha)) {\n reject(_createError(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */));\n return;\n }\n // Wrap the greptcha render function so that we know if the developer has\n // called it separately\n const render = recaptcha.render;\n recaptcha.render = (container, params) => {\n const widgetId = render(container, params);\n this.counter++;\n return widgetId;\n };\n this.hostLanguage = hl;\n resolve(recaptcha);\n };\n const url = `${_recaptchaV2ScriptUrl()}?${querystring({\n onload: _JSLOAD_CALLBACK,\n render: 'explicit',\n hl\n })}`;\n _loadJS(url).catch(() => {\n clearTimeout(networkTimeout);\n reject(_createError(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */));\n });\n });\n }\n clearedOneInstance() {\n this.counter--;\n }\n shouldResolveImmediately(hl) {\n var _a;\n // We can resolve immediately if:\n // • grecaptcha is already defined AND (\n // 1. the requested language codes are the same OR\n // 2. there exists already a ReCaptcha on the page\n // 3. the library was already loaded by the app\n // In cases (2) and (3), we _can't_ reload as it would break the recaptchas\n // that are already in the page\n return !!((_a = _window().grecaptcha) === null || _a === void 0 ? void 0 : _a.render) && (hl === this.hostLanguage || this.counter > 0 || this.librarySeparatelyLoaded);\n }\n}\nfunction isHostLanguageValid(hl) {\n return hl.length <= 6 && /^\\s*[a-zA-Z0-9\\-]*\\s*$/.test(hl);\n}\nclass MockReCaptchaLoaderImpl {\n async load(auth) {\n return new MockReCaptcha(auth);\n }\n clearedOneInstance() {}\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst RECAPTCHA_VERIFIER_TYPE = 'recaptcha';\nconst DEFAULT_PARAMS = {\n theme: 'light',\n type: 'image'\n};\n/**\r\n * An {@link https://www.google.com/recaptcha/ | reCAPTCHA}-based application verifier.\r\n *\r\n * @remarks\r\n * `RecaptchaVerifier` does not work in a Node.js environment.\r\n *\r\n * @public\r\n */\nclass RecaptchaVerifier {\n /**\r\n * @param authExtern - The corresponding Firebase {@link Auth} instance.\r\n *\r\n * @param containerOrId - The reCAPTCHA container parameter.\r\n *\r\n * @remarks\r\n * This has different meaning depending on whether the reCAPTCHA is hidden or visible. For a\r\n * visible reCAPTCHA the container must be empty. If a string is used, it has to correspond to\r\n * an element ID. The corresponding element must also must be in the DOM at the time of\r\n * initialization.\r\n *\r\n * @param parameters - The optional reCAPTCHA parameters.\r\n *\r\n * @remarks\r\n * Check the reCAPTCHA docs for a comprehensive list. All parameters are accepted except for\r\n * the sitekey. Firebase Auth backend provisions a reCAPTCHA for each project and will\r\n * configure this upon rendering. For an invisible reCAPTCHA, a size key must have the value\r\n * 'invisible'.\r\n */\n constructor(authExtern, containerOrId, parameters = Object.assign({}, DEFAULT_PARAMS)) {\n this.parameters = parameters;\n /**\r\n * The application verifier type.\r\n *\r\n * @remarks\r\n * For a reCAPTCHA verifier, this is 'recaptcha'.\r\n */\n this.type = RECAPTCHA_VERIFIER_TYPE;\n this.destroyed = false;\n this.widgetId = null;\n this.tokenChangeListeners = new Set();\n this.renderPromise = null;\n this.recaptcha = null;\n this.auth = _castAuth(authExtern);\n this.isInvisible = this.parameters.size === 'invisible';\n _assert(typeof document !== 'undefined', this.auth, \"operation-not-supported-in-this-environment\" /* AuthErrorCode.OPERATION_NOT_SUPPORTED */);\n const container = typeof containerOrId === 'string' ? document.getElementById(containerOrId) : containerOrId;\n _assert(container, this.auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n this.container = container;\n this.parameters.callback = this.makeTokenCallback(this.parameters.callback);\n this._recaptchaLoader = this.auth.settings.appVerificationDisabledForTesting ? new MockReCaptchaLoaderImpl() : new ReCaptchaLoaderImpl();\n this.validateStartingState();\n // TODO: Figure out if sdk version is needed\n }\n /**\r\n * Waits for the user to solve the reCAPTCHA and resolves with the reCAPTCHA token.\r\n *\r\n * @returns A Promise for the reCAPTCHA token.\r\n */\n async verify() {\n this.assertNotDestroyed();\n const id = await this.render();\n const recaptcha = this.getAssertedRecaptcha();\n const response = recaptcha.getResponse(id);\n if (response) {\n return response;\n }\n return new Promise(resolve => {\n const tokenChange = token => {\n if (!token) {\n return; // Ignore token expirations.\n }\n this.tokenChangeListeners.delete(tokenChange);\n resolve(token);\n };\n this.tokenChangeListeners.add(tokenChange);\n if (this.isInvisible) {\n recaptcha.execute(id);\n }\n });\n }\n /**\r\n * Renders the reCAPTCHA widget on the page.\r\n *\r\n * @returns A Promise that resolves with the reCAPTCHA widget ID.\r\n */\n render() {\n try {\n this.assertNotDestroyed();\n } catch (e) {\n // This method returns a promise. Since it's not async (we want to return the\n // _same_ promise if rendering is still occurring), the API surface should\n // reject with the error rather than just throw\n return Promise.reject(e);\n }\n if (this.renderPromise) {\n return this.renderPromise;\n }\n this.renderPromise = this.makeRenderPromise().catch(e => {\n this.renderPromise = null;\n throw e;\n });\n return this.renderPromise;\n }\n /** @internal */\n _reset() {\n this.assertNotDestroyed();\n if (this.widgetId !== null) {\n this.getAssertedRecaptcha().reset(this.widgetId);\n }\n }\n /**\r\n * Clears the reCAPTCHA widget from the page and destroys the instance.\r\n */\n clear() {\n this.assertNotDestroyed();\n this.destroyed = true;\n this._recaptchaLoader.clearedOneInstance();\n if (!this.isInvisible) {\n this.container.childNodes.forEach(node => {\n this.container.removeChild(node);\n });\n }\n }\n validateStartingState() {\n _assert(!this.parameters.sitekey, this.auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n _assert(this.isInvisible || !this.container.hasChildNodes(), this.auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n _assert(typeof document !== 'undefined', this.auth, \"operation-not-supported-in-this-environment\" /* AuthErrorCode.OPERATION_NOT_SUPPORTED */);\n }\n makeTokenCallback(existing) {\n return token => {\n this.tokenChangeListeners.forEach(listener => listener(token));\n if (typeof existing === 'function') {\n existing(token);\n } else if (typeof existing === 'string') {\n const globalFunc = _window()[existing];\n if (typeof globalFunc === 'function') {\n globalFunc(token);\n }\n }\n };\n }\n assertNotDestroyed() {\n _assert(!this.destroyed, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n async makeRenderPromise() {\n await this.init();\n if (!this.widgetId) {\n let container = this.container;\n if (!this.isInvisible) {\n const guaranteedEmpty = document.createElement('div');\n container.appendChild(guaranteedEmpty);\n container = guaranteedEmpty;\n }\n this.widgetId = this.getAssertedRecaptcha().render(container, this.parameters);\n }\n return this.widgetId;\n }\n async init() {\n _assert(_isHttpOrHttps() && !_isWorker(), this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n await domReady();\n this.recaptcha = await this._recaptchaLoader.load(this.auth, this.auth.languageCode || undefined);\n const siteKey = await getRecaptchaParams(this.auth);\n _assert(siteKey, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n this.parameters.sitekey = siteKey;\n }\n getAssertedRecaptcha() {\n _assert(this.recaptcha, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return this.recaptcha;\n }\n}\nfunction domReady() {\n let resolver = null;\n return new Promise(resolve => {\n if (document.readyState === 'complete') {\n resolve();\n return;\n }\n // Document not ready, wait for load before resolving.\n // Save resolver, so we can remove listener in case it was externally\n // cancelled.\n resolver = () => resolve();\n window.addEventListener('load', resolver);\n }).catch(e => {\n if (resolver) {\n window.removeEventListener('load', resolver);\n }\n throw e;\n });\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass ConfirmationResultImpl {\n constructor(verificationId, onConfirmation) {\n this.verificationId = verificationId;\n this.onConfirmation = onConfirmation;\n }\n confirm(verificationCode) {\n const authCredential = PhoneAuthCredential._fromVerification(this.verificationId, verificationCode);\n return this.onConfirmation(authCredential);\n }\n}\n/**\r\n * Asynchronously signs in using a phone number.\r\n *\r\n * @remarks\r\n * This method sends a code via SMS to the given\r\n * phone number, and returns a {@link ConfirmationResult}. After the user\r\n * provides the code sent to their phone, call {@link ConfirmationResult.confirm}\r\n * with the code to sign the user in.\r\n *\r\n * For abuse prevention, this method also requires a {@link ApplicationVerifier}.\r\n * This SDK includes a reCAPTCHA-based implementation, {@link RecaptchaVerifier}.\r\n * This function can work on other platforms that do not support the\r\n * {@link RecaptchaVerifier} (like React Native), but you need to use a\r\n * third-party {@link ApplicationVerifier} implementation.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // 'recaptcha-container' is the ID of an element in the DOM.\r\n * const applicationVerifier = new firebase.auth.RecaptchaVerifier('recaptcha-container');\r\n * const confirmationResult = await signInWithPhoneNumber(auth, phoneNumber, applicationVerifier);\r\n * // Obtain a verificationCode from the user.\r\n * const credential = await confirmationResult.confirm(verificationCode);\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param phoneNumber - The user's phone number in E.164 format (e.g. +16505550101).\r\n * @param appVerifier - The {@link ApplicationVerifier}.\r\n *\r\n * @public\r\n */\nasync function signInWithPhoneNumber(auth, phoneNumber, appVerifier) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authInternal = _castAuth(auth);\n const verificationId = await _verifyPhoneNumber(authInternal, phoneNumber, getModularInstance(appVerifier));\n return new ConfirmationResultImpl(verificationId, cred => signInWithCredential(authInternal, cred));\n}\n/**\r\n * Links the user account with the given phone number.\r\n *\r\n * @remarks\r\n * This method does not work in a Node.js environment.\r\n *\r\n * @param user - The user.\r\n * @param phoneNumber - The user's phone number in E.164 format (e.g. +16505550101).\r\n * @param appVerifier - The {@link ApplicationVerifier}.\r\n *\r\n * @public\r\n */\nasync function linkWithPhoneNumber(user, phoneNumber, appVerifier) {\n const userInternal = getModularInstance(user);\n await _assertLinkedStatus(false, userInternal, \"phone\" /* ProviderId.PHONE */);\n const verificationId = await _verifyPhoneNumber(userInternal.auth, phoneNumber, getModularInstance(appVerifier));\n return new ConfirmationResultImpl(verificationId, cred => linkWithCredential(userInternal, cred));\n}\n/**\r\n * Re-authenticates a user using a fresh phone credential.\r\n *\r\n * @remarks\r\n * Use before operations such as {@link updatePassword} that require tokens from recent sign-in attempts.\r\n *\r\n * This method does not work in a Node.js environment or on any {@link User} signed in by\r\n * {@link Auth} instances created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @param user - The user.\r\n * @param phoneNumber - The user's phone number in E.164 format (e.g. +16505550101).\r\n * @param appVerifier - The {@link ApplicationVerifier}.\r\n *\r\n * @public\r\n */\nasync function reauthenticateWithPhoneNumber(user, phoneNumber, appVerifier) {\n const userInternal = getModularInstance(user);\n if (_isFirebaseServerApp(userInternal.auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(userInternal.auth));\n }\n const verificationId = await _verifyPhoneNumber(userInternal.auth, phoneNumber, getModularInstance(appVerifier));\n return new ConfirmationResultImpl(verificationId, cred => reauthenticateWithCredential(userInternal, cred));\n}\n/**\r\n * Returns a verification ID to be used in conjunction with the SMS code that is sent.\r\n *\r\n */\nasync function _verifyPhoneNumber(auth, options, verifier) {\n var _a;\n const recaptchaToken = await verifier.verify();\n try {\n _assert(typeof recaptchaToken === 'string', auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n _assert(verifier.type === RECAPTCHA_VERIFIER_TYPE, auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n let phoneInfoOptions;\n if (typeof options === 'string') {\n phoneInfoOptions = {\n phoneNumber: options\n };\n } else {\n phoneInfoOptions = options;\n }\n if ('session' in phoneInfoOptions) {\n const session = phoneInfoOptions.session;\n if ('phoneNumber' in phoneInfoOptions) {\n _assert(session.type === \"enroll\" /* MultiFactorSessionType.ENROLL */, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const response = await startEnrollPhoneMfa(auth, {\n idToken: session.credential,\n phoneEnrollmentInfo: {\n phoneNumber: phoneInfoOptions.phoneNumber,\n recaptchaToken\n }\n });\n return response.phoneSessionInfo.sessionInfo;\n } else {\n _assert(session.type === \"signin\" /* MultiFactorSessionType.SIGN_IN */, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const mfaEnrollmentId = ((_a = phoneInfoOptions.multiFactorHint) === null || _a === void 0 ? void 0 : _a.uid) || phoneInfoOptions.multiFactorUid;\n _assert(mfaEnrollmentId, auth, \"missing-multi-factor-info\" /* AuthErrorCode.MISSING_MFA_INFO */);\n const response = await startSignInPhoneMfa(auth, {\n mfaPendingCredential: session.credential,\n mfaEnrollmentId,\n phoneSignInInfo: {\n recaptchaToken\n }\n });\n return response.phoneResponseInfo.sessionInfo;\n }\n } else {\n const {\n sessionInfo\n } = await sendPhoneVerificationCode(auth, {\n phoneNumber: phoneInfoOptions.phoneNumber,\n recaptchaToken\n });\n return sessionInfo;\n }\n } finally {\n verifier._reset();\n }\n}\n/**\r\n * Updates the user's phone number.\r\n *\r\n * @remarks\r\n * This method does not work in a Node.js environment or on any {@link User} signed in by\r\n * {@link Auth} instances created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```\r\n * // 'recaptcha-container' is the ID of an element in the DOM.\r\n * const applicationVerifier = new RecaptchaVerifier('recaptcha-container');\r\n * const provider = new PhoneAuthProvider(auth);\r\n * const verificationId = await provider.verifyPhoneNumber('+16505550101', applicationVerifier);\r\n * // Obtain the verificationCode from the user.\r\n * const phoneCredential = PhoneAuthProvider.credential(verificationId, verificationCode);\r\n * await updatePhoneNumber(user, phoneCredential);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param credential - A credential authenticating the new phone number.\r\n *\r\n * @public\r\n */\nasync function updatePhoneNumber(user, credential) {\n const userInternal = getModularInstance(user);\n if (_isFirebaseServerApp(userInternal.auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(userInternal.auth));\n }\n await _link$1(userInternal, credential);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Provider for generating an {@link PhoneAuthCredential}.\r\n *\r\n * @remarks\r\n * `PhoneAuthProvider` does not work in a Node.js environment.\r\n *\r\n * @example\r\n * ```javascript\r\n * // 'recaptcha-container' is the ID of an element in the DOM.\r\n * const applicationVerifier = new RecaptchaVerifier('recaptcha-container');\r\n * const provider = new PhoneAuthProvider(auth);\r\n * const verificationId = await provider.verifyPhoneNumber('+16505550101', applicationVerifier);\r\n * // Obtain the verificationCode from the user.\r\n * const phoneCredential = PhoneAuthProvider.credential(verificationId, verificationCode);\r\n * const userCredential = await signInWithCredential(auth, phoneCredential);\r\n * ```\r\n *\r\n * @public\r\n */\nlet PhoneAuthProvider = /*#__PURE__*/(() => {\n class PhoneAuthProvider {\n /**\r\n * @param auth - The Firebase {@link Auth} instance in which sign-ins should occur.\r\n *\r\n */\n constructor(auth) {\n /** Always set to {@link ProviderId}.PHONE. */\n this.providerId = PhoneAuthProvider.PROVIDER_ID;\n this.auth = _castAuth(auth);\n }\n /**\r\n *\r\n * Starts a phone number authentication flow by sending a verification code to the given phone\r\n * number.\r\n *\r\n * @example\r\n * ```javascript\r\n * const provider = new PhoneAuthProvider(auth);\r\n * const verificationId = await provider.verifyPhoneNumber(phoneNumber, applicationVerifier);\r\n * // Obtain verificationCode from the user.\r\n * const authCredential = PhoneAuthProvider.credential(verificationId, verificationCode);\r\n * const userCredential = await signInWithCredential(auth, authCredential);\r\n * ```\r\n *\r\n * @example\r\n * An alternative flow is provided using the `signInWithPhoneNumber` method.\r\n * ```javascript\r\n * const confirmationResult = signInWithPhoneNumber(auth, phoneNumber, applicationVerifier);\r\n * // Obtain verificationCode from the user.\r\n * const userCredential = confirmationResult.confirm(verificationCode);\r\n * ```\r\n *\r\n * @param phoneInfoOptions - The user's {@link PhoneInfoOptions}. The phone number should be in\r\n * E.164 format (e.g. +16505550101).\r\n * @param applicationVerifier - For abuse prevention, this method also requires a\r\n * {@link ApplicationVerifier}. This SDK includes a reCAPTCHA-based implementation,\r\n * {@link RecaptchaVerifier}.\r\n *\r\n * @returns A Promise for a verification ID that can be passed to\r\n * {@link PhoneAuthProvider.credential} to identify this flow..\r\n */\n verifyPhoneNumber(phoneOptions, applicationVerifier) {\n return _verifyPhoneNumber(this.auth, phoneOptions, getModularInstance(applicationVerifier));\n }\n /**\r\n * Creates a phone auth credential, given the verification ID from\r\n * {@link PhoneAuthProvider.verifyPhoneNumber} and the code that was sent to the user's\r\n * mobile device.\r\n *\r\n * @example\r\n * ```javascript\r\n * const provider = new PhoneAuthProvider(auth);\r\n * const verificationId = provider.verifyPhoneNumber(phoneNumber, applicationVerifier);\r\n * // Obtain verificationCode from the user.\r\n * const authCredential = PhoneAuthProvider.credential(verificationId, verificationCode);\r\n * const userCredential = signInWithCredential(auth, authCredential);\r\n * ```\r\n *\r\n * @example\r\n * An alternative flow is provided using the `signInWithPhoneNumber` method.\r\n * ```javascript\r\n * const confirmationResult = await signInWithPhoneNumber(auth, phoneNumber, applicationVerifier);\r\n * // Obtain verificationCode from the user.\r\n * const userCredential = await confirmationResult.confirm(verificationCode);\r\n * ```\r\n *\r\n * @param verificationId - The verification ID returned from {@link PhoneAuthProvider.verifyPhoneNumber}.\r\n * @param verificationCode - The verification code sent to the user's mobile device.\r\n *\r\n * @returns The auth provider credential.\r\n */\n static credential(verificationId, verificationCode) {\n return PhoneAuthCredential._fromVerification(verificationId, verificationCode);\n }\n /**\r\n * Generates an {@link AuthCredential} from a {@link UserCredential}.\r\n * @param userCredential - The user credential.\r\n */\n static credentialFromResult(userCredential) {\n const credential = userCredential;\n return PhoneAuthProvider.credentialFromTaggedObject(credential);\n }\n /**\r\n * Returns an {@link AuthCredential} when passed an error.\r\n *\r\n * @remarks\r\n *\r\n * This method works for errors like\r\n * `auth/account-exists-with-different-credentials`. This is useful for\r\n * recovering when attempting to set a user's phone number but the number\r\n * in question is already tied to another account. For example, the following\r\n * code tries to update the current user's phone number, and if that\r\n * fails, links the user with the account associated with that number:\r\n *\r\n * ```js\r\n * const provider = new PhoneAuthProvider(auth);\r\n * const verificationId = await provider.verifyPhoneNumber(number, verifier);\r\n * try {\r\n * const code = ''; // Prompt the user for the verification code\r\n * await updatePhoneNumber(\r\n * auth.currentUser,\r\n * PhoneAuthProvider.credential(verificationId, code));\r\n * } catch (e) {\r\n * if ((e as FirebaseError)?.code === 'auth/account-exists-with-different-credential') {\r\n * const cred = PhoneAuthProvider.credentialFromError(e);\r\n * await linkWithCredential(auth.currentUser, cred);\r\n * }\r\n * }\r\n *\r\n * // At this point, auth.currentUser.phoneNumber === number.\r\n * ```\r\n *\r\n * @param error - The error to generate a credential from.\r\n */\n static credentialFromError(error) {\n return PhoneAuthProvider.credentialFromTaggedObject(error.customData || {});\n }\n static credentialFromTaggedObject({\n _tokenResponse: tokenResponse\n }) {\n if (!tokenResponse) {\n return null;\n }\n const {\n phoneNumber,\n temporaryProof\n } = tokenResponse;\n if (phoneNumber && temporaryProof) {\n return PhoneAuthCredential._fromTokenResponse(phoneNumber, temporaryProof);\n }\n return null;\n }\n }\n /** Always set to {@link ProviderId}.PHONE. */\n\n /** Always set to {@link SignInMethod}.PHONE. */PhoneAuthProvider.PROVIDER_ID = \"phone\" /* ProviderId.PHONE */;\n\n PhoneAuthProvider.PHONE_SIGN_IN_METHOD = \"phone\" /* SignInMethod.PHONE */;\n\n /**\r\n * @license\r\n * Copyright 2021 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n /**\r\n * Chooses a popup/redirect resolver to use. This prefers the override (which\r\n * is directly passed in), and falls back to the property set on the auth\r\n * object. If neither are available, this function errors w/ an argument error.\r\n */\n return PhoneAuthProvider;\n})();\nfunction _withDefaultResolver(auth, resolverOverride) {\n if (resolverOverride) {\n return _getInstance(resolverOverride);\n }\n _assert(auth._popupRedirectResolver, auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n return auth._popupRedirectResolver;\n}\n\n/**\r\n * @license\r\n * Copyright 2019 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass IdpCredential extends AuthCredential {\n constructor(params) {\n super(\"custom\" /* ProviderId.CUSTOM */, \"custom\" /* ProviderId.CUSTOM */);\n this.params = params;\n }\n _getIdTokenResponse(auth) {\n return signInWithIdp(auth, this._buildIdpRequest());\n }\n _linkToIdToken(auth, idToken) {\n return signInWithIdp(auth, this._buildIdpRequest(idToken));\n }\n _getReauthenticationResolver(auth) {\n return signInWithIdp(auth, this._buildIdpRequest());\n }\n _buildIdpRequest(idToken) {\n const request = {\n requestUri: this.params.requestUri,\n sessionId: this.params.sessionId,\n postBody: this.params.postBody,\n tenantId: this.params.tenantId,\n pendingToken: this.params.pendingToken,\n returnSecureToken: true,\n returnIdpCredential: true\n };\n if (idToken) {\n request.idToken = idToken;\n }\n return request;\n }\n}\nfunction _signIn(params) {\n return _signInWithCredential(params.auth, new IdpCredential(params), params.bypassAuthState);\n}\nfunction _reauth(params) {\n const {\n auth,\n user\n } = params;\n _assert(user, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return _reauthenticate(user, new IdpCredential(params), params.bypassAuthState);\n}\nasync function _link(params) {\n const {\n auth,\n user\n } = params;\n _assert(user, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return _link$1(user, new IdpCredential(params), params.bypassAuthState);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Popup event manager. Handles the popup's entire lifecycle; listens to auth\r\n * events\r\n */\nclass AbstractPopupRedirectOperation {\n constructor(auth, filter, resolver, user, bypassAuthState = false) {\n this.auth = auth;\n this.resolver = resolver;\n this.user = user;\n this.bypassAuthState = bypassAuthState;\n this.pendingPromise = null;\n this.eventManager = null;\n this.filter = Array.isArray(filter) ? filter : [filter];\n }\n execute() {\n return new Promise(async (resolve, reject) => {\n this.pendingPromise = {\n resolve,\n reject\n };\n try {\n this.eventManager = await this.resolver._initialize(this.auth);\n await this.onExecution();\n this.eventManager.registerConsumer(this);\n } catch (e) {\n this.reject(e);\n }\n });\n }\n async onAuthEvent(event) {\n const {\n urlResponse,\n sessionId,\n postBody,\n tenantId,\n error,\n type\n } = event;\n if (error) {\n this.reject(error);\n return;\n }\n const params = {\n auth: this.auth,\n requestUri: urlResponse,\n sessionId: sessionId,\n tenantId: tenantId || undefined,\n postBody: postBody || undefined,\n user: this.user,\n bypassAuthState: this.bypassAuthState\n };\n try {\n this.resolve(await this.getIdpTask(type)(params));\n } catch (e) {\n this.reject(e);\n }\n }\n onError(error) {\n this.reject(error);\n }\n getIdpTask(type) {\n switch (type) {\n case \"signInViaPopup\" /* AuthEventType.SIGN_IN_VIA_POPUP */:\n case \"signInViaRedirect\" /* AuthEventType.SIGN_IN_VIA_REDIRECT */:\n return _signIn;\n case \"linkViaPopup\" /* AuthEventType.LINK_VIA_POPUP */:\n case \"linkViaRedirect\" /* AuthEventType.LINK_VIA_REDIRECT */:\n return _link;\n case \"reauthViaPopup\" /* AuthEventType.REAUTH_VIA_POPUP */:\n case \"reauthViaRedirect\" /* AuthEventType.REAUTH_VIA_REDIRECT */:\n return _reauth;\n default:\n _fail(this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }\n }\n resolve(cred) {\n debugAssert(this.pendingPromise, 'Pending promise was never set');\n this.pendingPromise.resolve(cred);\n this.unregisterAndCleanUp();\n }\n reject(error) {\n debugAssert(this.pendingPromise, 'Pending promise was never set');\n this.pendingPromise.reject(error);\n this.unregisterAndCleanUp();\n }\n unregisterAndCleanUp() {\n if (this.eventManager) {\n this.eventManager.unregisterConsumer(this);\n }\n this.pendingPromise = null;\n this.cleanUp();\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst _POLL_WINDOW_CLOSE_TIMEOUT = new Delay(2000, 10000);\n/**\r\n * Authenticates a Firebase client using a popup-based OAuth authentication flow.\r\n *\r\n * @remarks\r\n * If succeeds, returns the signed in user along with the provider's credential. If sign in was\r\n * unsuccessful, returns an error object containing additional information about the error.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new FacebookAuthProvider();\r\n * const result = await signInWithPopup(auth, provider);\r\n *\r\n * // The signed-in user info.\r\n * const user = result.user;\r\n * // This gives you a Facebook Access Token.\r\n * const credential = provider.credentialFromResult(auth, result);\r\n * const token = credential.accessToken;\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param provider - The provider to authenticate. The provider has to be an {@link OAuthProvider}.\r\n * Non-OAuth providers like {@link EmailAuthProvider} will throw an error.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nasync function signInWithPopup(auth, provider, resolver) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_createError(auth, \"operation-not-supported-in-this-environment\" /* AuthErrorCode.OPERATION_NOT_SUPPORTED */));\n }\n const authInternal = _castAuth(auth);\n _assertInstanceOf(auth, provider, FederatedAuthProvider);\n const resolverInternal = _withDefaultResolver(authInternal, resolver);\n const action = new PopupOperation(authInternal, \"signInViaPopup\" /* AuthEventType.SIGN_IN_VIA_POPUP */, provider, resolverInternal);\n return action.executeNotNull();\n}\n/**\r\n * Reauthenticates the current user with the specified {@link OAuthProvider} using a pop-up based\r\n * OAuth flow.\r\n *\r\n * @remarks\r\n * If the reauthentication is successful, the returned result will contain the user and the\r\n * provider's credential.\r\n *\r\n * This method does not work in a Node.js environment or on any {@link User} signed in by\r\n * {@link Auth} instances created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a popup.\r\n * const provider = new FacebookAuthProvider();\r\n * const result = await signInWithPopup(auth, provider);\r\n * // Reauthenticate using a popup.\r\n * await reauthenticateWithPopup(result.user, provider);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param provider - The provider to authenticate. The provider has to be an {@link OAuthProvider}.\r\n * Non-OAuth providers like {@link EmailAuthProvider} will throw an error.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nasync function reauthenticateWithPopup(user, provider, resolver) {\n const userInternal = getModularInstance(user);\n if (_isFirebaseServerApp(userInternal.auth.app)) {\n return Promise.reject(_createError(userInternal.auth, \"operation-not-supported-in-this-environment\" /* AuthErrorCode.OPERATION_NOT_SUPPORTED */));\n }\n _assertInstanceOf(userInternal.auth, provider, FederatedAuthProvider);\n const resolverInternal = _withDefaultResolver(userInternal.auth, resolver);\n const action = new PopupOperation(userInternal.auth, \"reauthViaPopup\" /* AuthEventType.REAUTH_VIA_POPUP */, provider, resolverInternal, userInternal);\n return action.executeNotNull();\n}\n/**\r\n * Links the authenticated provider to the user account using a pop-up based OAuth flow.\r\n *\r\n * @remarks\r\n * If the linking is successful, the returned result will contain the user and the provider's credential.\r\n *\r\n * This method does not work in a Node.js environment.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using some other provider.\r\n * const result = await signInWithEmailAndPassword(auth, email, password);\r\n * // Link using a popup.\r\n * const provider = new FacebookAuthProvider();\r\n * await linkWithPopup(result.user, provider);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param provider - The provider to authenticate. The provider has to be an {@link OAuthProvider}.\r\n * Non-OAuth providers like {@link EmailAuthProvider} will throw an error.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nasync function linkWithPopup(user, provider, resolver) {\n const userInternal = getModularInstance(user);\n _assertInstanceOf(userInternal.auth, provider, FederatedAuthProvider);\n const resolverInternal = _withDefaultResolver(userInternal.auth, resolver);\n const action = new PopupOperation(userInternal.auth, \"linkViaPopup\" /* AuthEventType.LINK_VIA_POPUP */, provider, resolverInternal, userInternal);\n return action.executeNotNull();\n}\n/**\r\n * Popup event manager. Handles the popup's entire lifecycle; listens to auth\r\n * events\r\n *\r\n */\nlet PopupOperation = /*#__PURE__*/(() => {\n class PopupOperation extends AbstractPopupRedirectOperation {\n constructor(auth, filter, provider, resolver, user) {\n super(auth, filter, resolver, user);\n this.provider = provider;\n this.authWindow = null;\n this.pollId = null;\n if (PopupOperation.currentPopupAction) {\n PopupOperation.currentPopupAction.cancel();\n }\n PopupOperation.currentPopupAction = this;\n }\n async executeNotNull() {\n const result = await this.execute();\n _assert(result, this.auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return result;\n }\n async onExecution() {\n debugAssert(this.filter.length === 1, 'Popup operations only handle one event');\n const eventId = _generateEventId();\n this.authWindow = await this.resolver._openPopup(this.auth, this.provider, this.filter[0],\n // There's always one, see constructor\n eventId);\n this.authWindow.associatedEvent = eventId;\n // Check for web storage support and origin validation _after_ the popup is\n // loaded. These operations are slow (~1 second or so) Rather than\n // waiting on them before opening the window, optimistically open the popup\n // and check for storage support at the same time. If storage support is\n // not available, this will cause the whole thing to reject properly. It\n // will also close the popup, but since the promise has already rejected,\n // the popup closed by user poll will reject into the void.\n this.resolver._originValidation(this.auth).catch(e => {\n this.reject(e);\n });\n this.resolver._isIframeWebStorageSupported(this.auth, isSupported => {\n if (!isSupported) {\n this.reject(_createError(this.auth, \"web-storage-unsupported\" /* AuthErrorCode.WEB_STORAGE_UNSUPPORTED */));\n }\n });\n // Handle user closure. Notice this does *not* use await\n this.pollUserCancellation();\n }\n get eventId() {\n var _a;\n return ((_a = this.authWindow) === null || _a === void 0 ? void 0 : _a.associatedEvent) || null;\n }\n cancel() {\n this.reject(_createError(this.auth, \"cancelled-popup-request\" /* AuthErrorCode.EXPIRED_POPUP_REQUEST */));\n }\n cleanUp() {\n if (this.authWindow) {\n this.authWindow.close();\n }\n if (this.pollId) {\n window.clearTimeout(this.pollId);\n }\n this.authWindow = null;\n this.pollId = null;\n PopupOperation.currentPopupAction = null;\n }\n pollUserCancellation() {\n const poll = () => {\n var _a, _b;\n if ((_b = (_a = this.authWindow) === null || _a === void 0 ? void 0 : _a.window) === null || _b === void 0 ? void 0 : _b.closed) {\n // Make sure that there is sufficient time for whatever action to\n // complete. The window could have closed but the sign in network\n // call could still be in flight. This is specifically true for\n // Firefox or if the opener is in an iframe, in which case the oauth\n // helper closes the popup.\n this.pollId = window.setTimeout(() => {\n this.pollId = null;\n this.reject(_createError(this.auth, \"popup-closed-by-user\" /* AuthErrorCode.POPUP_CLOSED_BY_USER */));\n }, 8000 /* _Timeout.AUTH_EVENT */);\n return;\n }\n this.pollId = window.setTimeout(poll, _POLL_WINDOW_CLOSE_TIMEOUT.get());\n };\n poll();\n }\n }\n // Only one popup is ever shown at once. The lifecycle of the current popup\n // can be managed / cancelled by the constructor.\n PopupOperation.currentPopupAction = null;\n\n /**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n return PopupOperation;\n})();\nconst PENDING_REDIRECT_KEY = 'pendingRedirect';\n// We only get one redirect outcome for any one auth, so just store it\n// in here.\nconst redirectOutcomeMap = new Map();\nclass RedirectAction extends AbstractPopupRedirectOperation {\n constructor(auth, resolver, bypassAuthState = false) {\n super(auth, [\"signInViaRedirect\" /* AuthEventType.SIGN_IN_VIA_REDIRECT */, \"linkViaRedirect\" /* AuthEventType.LINK_VIA_REDIRECT */, \"reauthViaRedirect\" /* AuthEventType.REAUTH_VIA_REDIRECT */, \"unknown\" /* AuthEventType.UNKNOWN */], resolver, undefined, bypassAuthState);\n this.eventId = null;\n }\n /**\r\n * Override the execute function; if we already have a redirect result, then\r\n * just return it.\r\n */\n async execute() {\n let readyOutcome = redirectOutcomeMap.get(this.auth._key());\n if (!readyOutcome) {\n try {\n const hasPendingRedirect = await _getAndClearPendingRedirectStatus(this.resolver, this.auth);\n const result = hasPendingRedirect ? await super.execute() : null;\n readyOutcome = () => Promise.resolve(result);\n } catch (e) {\n readyOutcome = () => Promise.reject(e);\n }\n redirectOutcomeMap.set(this.auth._key(), readyOutcome);\n }\n // If we're not bypassing auth state, the ready outcome should be set to\n // null.\n if (!this.bypassAuthState) {\n redirectOutcomeMap.set(this.auth._key(), () => Promise.resolve(null));\n }\n return readyOutcome();\n }\n async onAuthEvent(event) {\n if (event.type === \"signInViaRedirect\" /* AuthEventType.SIGN_IN_VIA_REDIRECT */) {\n return super.onAuthEvent(event);\n } else if (event.type === \"unknown\" /* AuthEventType.UNKNOWN */) {\n // This is a sentinel value indicating there's no pending redirect\n this.resolve(null);\n return;\n }\n if (event.eventId) {\n const user = await this.auth._redirectUserForId(event.eventId);\n if (user) {\n this.user = user;\n return super.onAuthEvent(event);\n } else {\n this.resolve(null);\n }\n }\n }\n async onExecution() {}\n cleanUp() {}\n}\nasync function _getAndClearPendingRedirectStatus(resolver, auth) {\n const key = pendingRedirectKey(auth);\n const persistence = resolverPersistence(resolver);\n if (!(await persistence._isAvailable())) {\n return false;\n }\n const hasPendingRedirect = (await persistence._get(key)) === 'true';\n await persistence._remove(key);\n return hasPendingRedirect;\n}\nasync function _setPendingRedirectStatus(resolver, auth) {\n return resolverPersistence(resolver)._set(pendingRedirectKey(auth), 'true');\n}\nfunction _clearRedirectOutcomes() {\n redirectOutcomeMap.clear();\n}\nfunction _overrideRedirectResult(auth, result) {\n redirectOutcomeMap.set(auth._key(), result);\n}\nfunction resolverPersistence(resolver) {\n return _getInstance(resolver._redirectPersistence);\n}\nfunction pendingRedirectKey(auth) {\n return _persistenceKeyName(PENDING_REDIRECT_KEY, auth.config.apiKey, auth.name);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * Authenticates a Firebase client using a full-page redirect flow.\r\n *\r\n * @remarks\r\n * To handle the results and errors for this operation, refer to {@link getRedirectResult}.\r\n * Follow the {@link https://firebase.google.com/docs/auth/web/redirect-best-practices\r\n * | best practices} when using {@link signInWithRedirect}.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new FacebookAuthProvider();\r\n * // You can add additional scopes to the provider:\r\n * provider.addScope('user_birthday');\r\n * // Start a sign in process for an unauthenticated user.\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a Facebook Access Token.\r\n * const credential = provider.credentialFromResult(auth, result);\r\n * const token = credential.accessToken;\r\n * }\r\n * // As this API can be used for sign-in, linking and reauthentication,\r\n * // check the operationType to determine what triggered this redirect\r\n * // operation.\r\n * const operationType = result.operationType;\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param provider - The provider to authenticate. The provider has to be an {@link OAuthProvider}.\r\n * Non-OAuth providers like {@link EmailAuthProvider} will throw an error.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nfunction signInWithRedirect(auth, provider, resolver) {\n return _signInWithRedirect(auth, provider, resolver);\n}\nasync function _signInWithRedirect(auth, provider, resolver) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authInternal = _castAuth(auth);\n _assertInstanceOf(auth, provider, FederatedAuthProvider);\n // Wait for auth initialization to complete, this will process pending redirects and clear the\n // PENDING_REDIRECT_KEY in persistence. This should be completed before starting a new\n // redirect and creating a PENDING_REDIRECT_KEY entry.\n await authInternal._initializationPromise;\n const resolverInternal = _withDefaultResolver(authInternal, resolver);\n await _setPendingRedirectStatus(resolverInternal, authInternal);\n return resolverInternal._openRedirect(authInternal, provider, \"signInViaRedirect\" /* AuthEventType.SIGN_IN_VIA_REDIRECT */);\n}\n/**\r\n * Reauthenticates the current user with the specified {@link OAuthProvider} using a full-page redirect flow.\r\n * @remarks\r\n * To handle the results and errors for this operation, refer to {@link getRedirectResult}.\r\n * Follow the {@link https://firebase.google.com/docs/auth/web/redirect-best-practices\r\n * | best practices} when using {@link reauthenticateWithRedirect}.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances\r\n * created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new FacebookAuthProvider();\r\n * const result = await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * // Reauthenticate using a redirect.\r\n * await reauthenticateWithRedirect(result.user, provider);\r\n * // This will again trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param provider - The provider to authenticate. The provider has to be an {@link OAuthProvider}.\r\n * Non-OAuth providers like {@link EmailAuthProvider} will throw an error.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nfunction reauthenticateWithRedirect(user, provider, resolver) {\n return _reauthenticateWithRedirect(user, provider, resolver);\n}\nasync function _reauthenticateWithRedirect(user, provider, resolver) {\n const userInternal = getModularInstance(user);\n _assertInstanceOf(userInternal.auth, provider, FederatedAuthProvider);\n if (_isFirebaseServerApp(userInternal.auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(userInternal.auth));\n }\n // Wait for auth initialization to complete, this will process pending redirects and clear the\n // PENDING_REDIRECT_KEY in persistence. This should be completed before starting a new\n // redirect and creating a PENDING_REDIRECT_KEY entry.\n await userInternal.auth._initializationPromise;\n // Allow the resolver to error before persisting the redirect user\n const resolverInternal = _withDefaultResolver(userInternal.auth, resolver);\n await _setPendingRedirectStatus(resolverInternal, userInternal.auth);\n const eventId = await prepareUserForRedirect(userInternal);\n return resolverInternal._openRedirect(userInternal.auth, provider, \"reauthViaRedirect\" /* AuthEventType.REAUTH_VIA_REDIRECT */, eventId);\n}\n/**\r\n * Links the {@link OAuthProvider} to the user account using a full-page redirect flow.\r\n * @remarks\r\n * To handle the results and errors for this operation, refer to {@link getRedirectResult}.\r\n * Follow the {@link https://firebase.google.com/docs/auth/web/redirect-best-practices\r\n * | best practices} when using {@link linkWithRedirect}.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances\r\n * created with a {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using some other provider.\r\n * const result = await signInWithEmailAndPassword(auth, email, password);\r\n * // Link using a redirect.\r\n * const provider = new FacebookAuthProvider();\r\n * await linkWithRedirect(result.user, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * ```\r\n *\r\n * @param user - The user.\r\n * @param provider - The provider to authenticate. The provider has to be an {@link OAuthProvider}.\r\n * Non-OAuth providers like {@link EmailAuthProvider} will throw an error.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nfunction linkWithRedirect(user, provider, resolver) {\n return _linkWithRedirect(user, provider, resolver);\n}\nasync function _linkWithRedirect(user, provider, resolver) {\n const userInternal = getModularInstance(user);\n _assertInstanceOf(userInternal.auth, provider, FederatedAuthProvider);\n // Wait for auth initialization to complete, this will process pending redirects and clear the\n // PENDING_REDIRECT_KEY in persistence. This should be completed before starting a new\n // redirect and creating a PENDING_REDIRECT_KEY entry.\n await userInternal.auth._initializationPromise;\n // Allow the resolver to error before persisting the redirect user\n const resolverInternal = _withDefaultResolver(userInternal.auth, resolver);\n await _assertLinkedStatus(false, userInternal, provider.providerId);\n await _setPendingRedirectStatus(resolverInternal, userInternal.auth);\n const eventId = await prepareUserForRedirect(userInternal);\n return resolverInternal._openRedirect(userInternal.auth, provider, \"linkViaRedirect\" /* AuthEventType.LINK_VIA_REDIRECT */, eventId);\n}\n/**\r\n * Returns a {@link UserCredential} from the redirect-based sign-in flow.\r\n *\r\n * @remarks\r\n * If sign-in succeeded, returns the signed in user. If sign-in was unsuccessful, fails with an\r\n * error. If no redirect operation was called, returns `null`.\r\n *\r\n * This method does not work in a Node.js environment or with {@link Auth} instances created with a\r\n * {@link @firebase/app#FirebaseServerApp}.\r\n *\r\n * @example\r\n * ```javascript\r\n * // Sign in using a redirect.\r\n * const provider = new FacebookAuthProvider();\r\n * // You can add additional scopes to the provider:\r\n * provider.addScope('user_birthday');\r\n * // Start a sign in process for an unauthenticated user.\r\n * await signInWithRedirect(auth, provider);\r\n * // This will trigger a full page redirect away from your app\r\n *\r\n * // After returning from the redirect when your app initializes you can obtain the result\r\n * const result = await getRedirectResult(auth);\r\n * if (result) {\r\n * // This is the signed-in user\r\n * const user = result.user;\r\n * // This gives you a Facebook Access Token.\r\n * const credential = provider.credentialFromResult(auth, result);\r\n * const token = credential.accessToken;\r\n * }\r\n * // As this API can be used for sign-in, linking and reauthentication,\r\n * // check the operationType to determine what triggered this redirect\r\n * // operation.\r\n * const operationType = result.operationType;\r\n * ```\r\n *\r\n * @param auth - The {@link Auth} instance.\r\n * @param resolver - An instance of {@link PopupRedirectResolver}, optional\r\n * if already supplied to {@link initializeAuth} or provided by {@link getAuth}.\r\n *\r\n * @public\r\n */\nasync function getRedirectResult(auth, resolver) {\n await _castAuth(auth)._initializationPromise;\n return _getRedirectResult(auth, resolver, false);\n}\nasync function _getRedirectResult(auth, resolverExtern, bypassAuthState = false) {\n if (_isFirebaseServerApp(auth.app)) {\n return Promise.reject(_serverAppCurrentUserOperationNotSupportedError(auth));\n }\n const authInternal = _castAuth(auth);\n const resolver = _withDefaultResolver(authInternal, resolverExtern);\n const action = new RedirectAction(authInternal, resolver, bypassAuthState);\n const result = await action.execute();\n if (result && !bypassAuthState) {\n delete result.user._redirectEventId;\n await authInternal._persistUserIfCurrent(result.user);\n await authInternal._setRedirectUser(null, resolverExtern);\n }\n return result;\n}\nasync function prepareUserForRedirect(user) {\n const eventId = _generateEventId(`${user.uid}:::`);\n user._redirectEventId = eventId;\n await user.auth._setRedirectUser(user);\n await user.auth._persistUserIfCurrent(user);\n return eventId;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n// The amount of time to store the UIDs of seen events; this is\n// set to 10 min by default\nconst EVENT_DUPLICATION_CACHE_DURATION_MS = 10 * 60 * 1000;\nclass AuthEventManager {\n constructor(auth) {\n this.auth = auth;\n this.cachedEventUids = new Set();\n this.consumers = new Set();\n this.queuedRedirectEvent = null;\n this.hasHandledPotentialRedirect = false;\n this.lastProcessedEventTime = Date.now();\n }\n registerConsumer(authEventConsumer) {\n this.consumers.add(authEventConsumer);\n if (this.queuedRedirectEvent && this.isEventForConsumer(this.queuedRedirectEvent, authEventConsumer)) {\n this.sendToConsumer(this.queuedRedirectEvent, authEventConsumer);\n this.saveEventToCache(this.queuedRedirectEvent);\n this.queuedRedirectEvent = null;\n }\n }\n unregisterConsumer(authEventConsumer) {\n this.consumers.delete(authEventConsumer);\n }\n onEvent(event) {\n // Check if the event has already been handled\n if (this.hasEventBeenHandled(event)) {\n return false;\n }\n let handled = false;\n this.consumers.forEach(consumer => {\n if (this.isEventForConsumer(event, consumer)) {\n handled = true;\n this.sendToConsumer(event, consumer);\n this.saveEventToCache(event);\n }\n });\n if (this.hasHandledPotentialRedirect || !isRedirectEvent(event)) {\n // If we've already seen a redirect before, or this is a popup event,\n // bail now\n return handled;\n }\n this.hasHandledPotentialRedirect = true;\n // If the redirect wasn't handled, hang on to it\n if (!handled) {\n this.queuedRedirectEvent = event;\n handled = true;\n }\n return handled;\n }\n sendToConsumer(event, consumer) {\n var _a;\n if (event.error && !isNullRedirectEvent(event)) {\n const code = ((_a = event.error.code) === null || _a === void 0 ? void 0 : _a.split('auth/')[1]) || \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */;\n consumer.onError(_createError(this.auth, code));\n } else {\n consumer.onAuthEvent(event);\n }\n }\n isEventForConsumer(event, consumer) {\n const eventIdMatches = consumer.eventId === null || !!event.eventId && event.eventId === consumer.eventId;\n return consumer.filter.includes(event.type) && eventIdMatches;\n }\n hasEventBeenHandled(event) {\n if (Date.now() - this.lastProcessedEventTime >= EVENT_DUPLICATION_CACHE_DURATION_MS) {\n this.cachedEventUids.clear();\n }\n return this.cachedEventUids.has(eventUid(event));\n }\n saveEventToCache(event) {\n this.cachedEventUids.add(eventUid(event));\n this.lastProcessedEventTime = Date.now();\n }\n}\nfunction eventUid(e) {\n return [e.type, e.eventId, e.sessionId, e.tenantId].filter(v => v).join('-');\n}\nfunction isNullRedirectEvent({\n type,\n error\n}) {\n return type === \"unknown\" /* AuthEventType.UNKNOWN */ && (error === null || error === void 0 ? void 0 : error.code) === `auth/${\"no-auth-event\" /* AuthErrorCode.NO_AUTH_EVENT */}`;\n}\nfunction isRedirectEvent(event) {\n switch (event.type) {\n case \"signInViaRedirect\" /* AuthEventType.SIGN_IN_VIA_REDIRECT */:\n case \"linkViaRedirect\" /* AuthEventType.LINK_VIA_REDIRECT */:\n case \"reauthViaRedirect\" /* AuthEventType.REAUTH_VIA_REDIRECT */:\n return true;\n case \"unknown\" /* AuthEventType.UNKNOWN */:\n return isNullRedirectEvent(event);\n default:\n return false;\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nasync function _getProjectConfig(auth, request = {}) {\n return _performApiRequest(auth, \"GET\" /* HttpMethod.GET */, \"/v1/projects\" /* Endpoint.GET_PROJECT_CONFIG */, request);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst IP_ADDRESS_REGEX = /^\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}$/;\nconst HTTP_REGEX = /^https?/;\nasync function _validateOrigin(auth) {\n // Skip origin validation if we are in an emulated environment\n if (auth.config.emulator) {\n return;\n }\n const {\n authorizedDomains\n } = await _getProjectConfig(auth);\n for (const domain of authorizedDomains) {\n try {\n if (matchDomain(domain)) {\n return;\n }\n } catch (_a) {\n // Do nothing if there's a URL error; just continue searching\n }\n }\n // In the old SDK, this error also provides helpful messages.\n _fail(auth, \"unauthorized-domain\" /* AuthErrorCode.INVALID_ORIGIN */);\n}\nfunction matchDomain(expected) {\n const currentUrl = _getCurrentUrl();\n const {\n protocol,\n hostname\n } = new URL(currentUrl);\n if (expected.startsWith('chrome-extension://')) {\n const ceUrl = new URL(expected);\n if (ceUrl.hostname === '' && hostname === '') {\n // For some reason we're not parsing chrome URLs properly\n return protocol === 'chrome-extension:' && expected.replace('chrome-extension://', '') === currentUrl.replace('chrome-extension://', '');\n }\n return protocol === 'chrome-extension:' && ceUrl.hostname === hostname;\n }\n if (!HTTP_REGEX.test(protocol)) {\n return false;\n }\n if (IP_ADDRESS_REGEX.test(expected)) {\n // The domain has to be exactly equal to the pattern, as an IP domain will\n // only contain the IP, no extra character.\n return hostname === expected;\n }\n // Dots in pattern should be escaped.\n const escapedDomainPattern = expected.replace(/\\./g, '\\\\.');\n // Non ip address domains.\n // domain.com = *.domain.com OR domain.com\n const re = new RegExp('^(.+\\\\.' + escapedDomainPattern + '|' + escapedDomainPattern + ')$', 'i');\n return re.test(hostname);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst NETWORK_TIMEOUT = new Delay(30000, 60000);\n/**\r\n * Reset unlaoded GApi modules. If gapi.load fails due to a network error,\r\n * it will stop working after a retrial. This is a hack to fix this issue.\r\n */\nfunction resetUnloadedGapiModules() {\n // Clear last failed gapi.load state to force next gapi.load to first\n // load the failed gapi.iframes module.\n // Get gapix.beacon context.\n const beacon = _window().___jsl;\n // Get current hint.\n if (beacon === null || beacon === void 0 ? void 0 : beacon.H) {\n // Get gapi hint.\n for (const hint of Object.keys(beacon.H)) {\n // Requested modules.\n beacon.H[hint].r = beacon.H[hint].r || [];\n // Loaded modules.\n beacon.H[hint].L = beacon.H[hint].L || [];\n // Set requested modules to a copy of the loaded modules.\n beacon.H[hint].r = [...beacon.H[hint].L];\n // Clear pending callbacks.\n if (beacon.CP) {\n for (let i = 0; i < beacon.CP.length; i++) {\n // Remove all failed pending callbacks.\n beacon.CP[i] = null;\n }\n }\n }\n }\n}\nfunction loadGapi(auth) {\n return new Promise((resolve, reject) => {\n var _a, _b, _c;\n // Function to run when gapi.load is ready.\n function loadGapiIframe() {\n // The developer may have tried to previously run gapi.load and failed.\n // Run this to fix that.\n resetUnloadedGapiModules();\n gapi.load('gapi.iframes', {\n callback: () => {\n resolve(gapi.iframes.getContext());\n },\n ontimeout: () => {\n // The above reset may be sufficient, but having this reset after\n // failure ensures that if the developer calls gapi.load after the\n // connection is re-established and before another attempt to embed\n // the iframe, it would work and would not be broken because of our\n // failed attempt.\n // Timeout when gapi.iframes.Iframe not loaded.\n resetUnloadedGapiModules();\n reject(_createError(auth, \"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */));\n },\n timeout: NETWORK_TIMEOUT.get()\n });\n }\n if ((_b = (_a = _window().gapi) === null || _a === void 0 ? void 0 : _a.iframes) === null || _b === void 0 ? void 0 : _b.Iframe) {\n // If gapi.iframes.Iframe available, resolve.\n resolve(gapi.iframes.getContext());\n } else if (!!((_c = _window().gapi) === null || _c === void 0 ? void 0 : _c.load)) {\n // Gapi loader ready, load gapi.iframes.\n loadGapiIframe();\n } else {\n // Create a new iframe callback when this is called so as not to overwrite\n // any previous defined callback. This happens if this method is called\n // multiple times in parallel and could result in the later callback\n // overwriting the previous one. This would end up with a iframe\n // timeout.\n const cbName = _generateCallbackName('iframefcb');\n // GApi loader not available, dynamically load platform.js.\n _window()[cbName] = () => {\n // GApi loader should be ready.\n if (!!gapi.load) {\n loadGapiIframe();\n } else {\n // Gapi loader failed, throw error.\n reject(_createError(auth, \"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */));\n }\n };\n // Load GApi loader.\n return _loadJS(`${_gapiScriptUrl()}?onload=${cbName}`).catch(e => reject(e));\n }\n }).catch(error => {\n // Reset cached promise to allow for retrial.\n cachedGApiLoader = null;\n throw error;\n });\n}\nlet cachedGApiLoader = null;\nfunction _loadGapi(auth) {\n cachedGApiLoader = cachedGApiLoader || loadGapi(auth);\n return cachedGApiLoader;\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst PING_TIMEOUT = new Delay(5000, 15000);\nconst IFRAME_PATH = '__/auth/iframe';\nconst EMULATED_IFRAME_PATH = 'emulator/auth/iframe';\nconst IFRAME_ATTRIBUTES = {\n style: {\n position: 'absolute',\n top: '-100px',\n width: '1px',\n height: '1px'\n },\n 'aria-hidden': 'true',\n tabindex: '-1'\n};\n// Map from apiHost to endpoint ID for passing into iframe. In current SDK, apiHost can be set to\n// anything (not from a list of endpoints with IDs as in legacy), so this is the closest we can get.\nconst EID_FROM_APIHOST = new Map([[\"identitytoolkit.googleapis.com\" /* DefaultConfig.API_HOST */, 'p'], ['staging-identitytoolkit.sandbox.googleapis.com', 's'], ['test-identitytoolkit.sandbox.googleapis.com', 't'] // test\n]);\nfunction getIframeUrl(auth) {\n const config = auth.config;\n _assert(config.authDomain, auth, \"auth-domain-config-required\" /* AuthErrorCode.MISSING_AUTH_DOMAIN */);\n const url = config.emulator ? _emulatorUrl(config, EMULATED_IFRAME_PATH) : `https://${auth.config.authDomain}/${IFRAME_PATH}`;\n const params = {\n apiKey: config.apiKey,\n appName: auth.name,\n v: SDK_VERSION\n };\n const eid = EID_FROM_APIHOST.get(auth.config.apiHost);\n if (eid) {\n params.eid = eid;\n }\n const frameworks = auth._getFrameworks();\n if (frameworks.length) {\n params.fw = frameworks.join(',');\n }\n return `${url}?${querystring(params).slice(1)}`;\n}\nasync function _openIframe(auth) {\n const context = await _loadGapi(auth);\n const gapi = _window().gapi;\n _assert(gapi, auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n return context.open({\n where: document.body,\n url: getIframeUrl(auth),\n messageHandlersFilter: gapi.iframes.CROSS_ORIGIN_IFRAMES_FILTER,\n attributes: IFRAME_ATTRIBUTES,\n dontclear: true\n }, iframe => new Promise(async (resolve, reject) => {\n await iframe.restyle({\n // Prevent iframe from closing on mouse out.\n setHideOnLeave: false\n });\n const networkError = _createError(auth, \"network-request-failed\" /* AuthErrorCode.NETWORK_REQUEST_FAILED */);\n // Confirm iframe is correctly loaded.\n // To fallback on failure, set a timeout.\n const networkErrorTimer = _window().setTimeout(() => {\n reject(networkError);\n }, PING_TIMEOUT.get());\n // Clear timer and resolve pending iframe ready promise.\n function clearTimerAndResolve() {\n _window().clearTimeout(networkErrorTimer);\n resolve(iframe);\n }\n // This returns an IThenable. However the reject part does not call\n // when the iframe is not loaded.\n iframe.ping(clearTimerAndResolve).then(clearTimerAndResolve, () => {\n reject(networkError);\n });\n }));\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst BASE_POPUP_OPTIONS = {\n location: 'yes',\n resizable: 'yes',\n statusbar: 'yes',\n toolbar: 'no'\n};\nconst DEFAULT_WIDTH = 500;\nconst DEFAULT_HEIGHT = 600;\nconst TARGET_BLANK = '_blank';\nconst FIREFOX_EMPTY_URL = 'http://localhost';\nclass AuthPopup {\n constructor(window) {\n this.window = window;\n this.associatedEvent = null;\n }\n close() {\n if (this.window) {\n try {\n this.window.close();\n } catch (e) {}\n }\n }\n}\nfunction _open(auth, url, name, width = DEFAULT_WIDTH, height = DEFAULT_HEIGHT) {\n const top = Math.max((window.screen.availHeight - height) / 2, 0).toString();\n const left = Math.max((window.screen.availWidth - width) / 2, 0).toString();\n let target = '';\n const options = Object.assign(Object.assign({}, BASE_POPUP_OPTIONS), {\n width: width.toString(),\n height: height.toString(),\n top,\n left\n });\n // Chrome iOS 7 and 8 is returning an undefined popup win when target is\n // specified, even though the popup is not necessarily blocked.\n const ua = getUA().toLowerCase();\n if (name) {\n target = _isChromeIOS(ua) ? TARGET_BLANK : name;\n }\n if (_isFirefox(ua)) {\n // Firefox complains when invalid URLs are popped out. Hacky way to bypass.\n url = url || FIREFOX_EMPTY_URL;\n // Firefox disables by default scrolling on popup windows, which can create\n // issues when the user has many Google accounts, for instance.\n options.scrollbars = 'yes';\n }\n const optionsString = Object.entries(options).reduce((accum, [key, value]) => `${accum}${key}=${value},`, '');\n if (_isIOSStandalone(ua) && target !== '_self') {\n openAsNewWindowIOS(url || '', target);\n return new AuthPopup(null);\n }\n // about:blank getting sanitized causing browsers like IE/Edge to display\n // brief error message before redirecting to handler.\n const newWin = window.open(url || '', target, optionsString);\n _assert(newWin, auth, \"popup-blocked\" /* AuthErrorCode.POPUP_BLOCKED */);\n // Flaky on IE edge, encapsulate with a try and catch.\n try {\n newWin.focus();\n } catch (e) {}\n return new AuthPopup(newWin);\n}\nfunction openAsNewWindowIOS(url, target) {\n const el = document.createElement('a');\n el.href = url;\n el.target = target;\n const click = document.createEvent('MouseEvent');\n click.initMouseEvent('click', true, true, window, 1, 0, 0, 0, 0, false, false, false, false, 1, null);\n el.dispatchEvent(click);\n}\n\n/**\r\n * @license\r\n * Copyright 2021 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * URL for Authentication widget which will initiate the OAuth handshake\r\n *\r\n * @internal\r\n */\nconst WIDGET_PATH = '__/auth/handler';\n/**\r\n * URL for emulated environment\r\n *\r\n * @internal\r\n */\nconst EMULATOR_WIDGET_PATH = 'emulator/auth/handler';\n/**\r\n * Fragment name for the App Check token that gets passed to the widget\r\n *\r\n * @internal\r\n */\nconst FIREBASE_APP_CHECK_FRAGMENT_ID = encodeURIComponent('fac');\nasync function _getRedirectUrl(auth, provider, authType, redirectUrl, eventId, additionalParams) {\n _assert(auth.config.authDomain, auth, \"auth-domain-config-required\" /* AuthErrorCode.MISSING_AUTH_DOMAIN */);\n _assert(auth.config.apiKey, auth, \"invalid-api-key\" /* AuthErrorCode.INVALID_API_KEY */);\n const params = {\n apiKey: auth.config.apiKey,\n appName: auth.name,\n authType,\n redirectUrl,\n v: SDK_VERSION,\n eventId\n };\n if (provider instanceof FederatedAuthProvider) {\n provider.setDefaultLanguage(auth.languageCode);\n params.providerId = provider.providerId || '';\n if (!isEmpty(provider.getCustomParameters())) {\n params.customParameters = JSON.stringify(provider.getCustomParameters());\n }\n // TODO set additionalParams from the provider as well?\n for (const [key, value] of Object.entries(additionalParams || {})) {\n params[key] = value;\n }\n }\n if (provider instanceof BaseOAuthProvider) {\n const scopes = provider.getScopes().filter(scope => scope !== '');\n if (scopes.length > 0) {\n params.scopes = scopes.join(',');\n }\n }\n if (auth.tenantId) {\n params.tid = auth.tenantId;\n }\n // TODO: maybe set eid as endipointId\n // TODO: maybe set fw as Frameworks.join(\",\")\n const paramsDict = params;\n for (const key of Object.keys(paramsDict)) {\n if (paramsDict[key] === undefined) {\n delete paramsDict[key];\n }\n }\n // Sets the App Check token to pass to the widget\n const appCheckToken = await auth._getAppCheckToken();\n const appCheckTokenFragment = appCheckToken ? `#${FIREBASE_APP_CHECK_FRAGMENT_ID}=${encodeURIComponent(appCheckToken)}` : '';\n // Start at index 1 to skip the leading '&' in the query string\n return `${getHandlerBase(auth)}?${querystring(paramsDict).slice(1)}${appCheckTokenFragment}`;\n}\nfunction getHandlerBase({\n config\n}) {\n if (!config.emulator) {\n return `https://${config.authDomain}/${WIDGET_PATH}`;\n }\n return _emulatorUrl(config, EMULATOR_WIDGET_PATH);\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\n/**\r\n * The special web storage event\r\n *\r\n */\nconst WEB_STORAGE_SUPPORT_KEY = 'webStorageSupport';\nclass BrowserPopupRedirectResolver {\n constructor() {\n this.eventManagers = {};\n this.iframes = {};\n this.originValidationPromises = {};\n this._redirectPersistence = browserSessionPersistence;\n this._completeRedirectFn = _getRedirectResult;\n this._overrideRedirectResult = _overrideRedirectResult;\n }\n // Wrapping in async even though we don't await anywhere in order\n // to make sure errors are raised as promise rejections\n async _openPopup(auth, provider, authType, eventId) {\n var _a;\n debugAssert((_a = this.eventManagers[auth._key()]) === null || _a === void 0 ? void 0 : _a.manager, '_initialize() not called before _openPopup()');\n const url = await _getRedirectUrl(auth, provider, authType, _getCurrentUrl(), eventId);\n return _open(auth, url, _generateEventId());\n }\n async _openRedirect(auth, provider, authType, eventId) {\n await this._originValidation(auth);\n const url = await _getRedirectUrl(auth, provider, authType, _getCurrentUrl(), eventId);\n _setWindowLocation(url);\n return new Promise(() => {});\n }\n _initialize(auth) {\n const key = auth._key();\n if (this.eventManagers[key]) {\n const {\n manager,\n promise\n } = this.eventManagers[key];\n if (manager) {\n return Promise.resolve(manager);\n } else {\n debugAssert(promise, 'If manager is not set, promise should be');\n return promise;\n }\n }\n const promise = this.initAndGetManager(auth);\n this.eventManagers[key] = {\n promise\n };\n // If the promise is rejected, the key should be removed so that the\n // operation can be retried later.\n promise.catch(() => {\n delete this.eventManagers[key];\n });\n return promise;\n }\n async initAndGetManager(auth) {\n const iframe = await _openIframe(auth);\n const manager = new AuthEventManager(auth);\n iframe.register('authEvent', iframeEvent => {\n _assert(iframeEvent === null || iframeEvent === void 0 ? void 0 : iframeEvent.authEvent, auth, \"invalid-auth-event\" /* AuthErrorCode.INVALID_AUTH_EVENT */);\n // TODO: Consider splitting redirect and popup events earlier on\n const handled = manager.onEvent(iframeEvent.authEvent);\n return {\n status: handled ? \"ACK\" /* GapiOutcome.ACK */ : \"ERROR\" /* GapiOutcome.ERROR */\n };\n }, gapi.iframes.CROSS_ORIGIN_IFRAMES_FILTER);\n this.eventManagers[auth._key()] = {\n manager\n };\n this.iframes[auth._key()] = iframe;\n return manager;\n }\n _isIframeWebStorageSupported(auth, cb) {\n const iframe = this.iframes[auth._key()];\n iframe.send(WEB_STORAGE_SUPPORT_KEY, {\n type: WEB_STORAGE_SUPPORT_KEY\n }, result => {\n var _a;\n const isSupported = (_a = result === null || result === void 0 ? void 0 : result[0]) === null || _a === void 0 ? void 0 : _a[WEB_STORAGE_SUPPORT_KEY];\n if (isSupported !== undefined) {\n cb(!!isSupported);\n }\n _fail(auth, \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n }, gapi.iframes.CROSS_ORIGIN_IFRAMES_FILTER);\n }\n _originValidation(auth) {\n const key = auth._key();\n if (!this.originValidationPromises[key]) {\n this.originValidationPromises[key] = _validateOrigin(auth);\n }\n return this.originValidationPromises[key];\n }\n get _shouldInitProactively() {\n // Mobile browsers and Safari need to optimistically initialize\n return _isMobileBrowser() || _isSafari() || _isIOS();\n }\n}\n/**\r\n * An implementation of {@link PopupRedirectResolver} suitable for browser\r\n * based applications.\r\n *\r\n * @remarks\r\n * This method does not work in a Node.js environment.\r\n *\r\n * @public\r\n */\nconst browserPopupRedirectResolver = BrowserPopupRedirectResolver;\nclass MultiFactorAssertionImpl {\n constructor(factorId) {\n this.factorId = factorId;\n }\n _process(auth, session, displayName) {\n switch (session.type) {\n case \"enroll\" /* MultiFactorSessionType.ENROLL */:\n return this._finalizeEnroll(auth, session.credential, displayName);\n case \"signin\" /* MultiFactorSessionType.SIGN_IN */:\n return this._finalizeSignIn(auth, session.credential);\n default:\n return debugFail('unexpected MultiFactorSessionType');\n }\n }\n}\n\n/**\r\n * {@inheritdoc PhoneMultiFactorAssertion}\r\n *\r\n * @public\r\n */\nclass PhoneMultiFactorAssertionImpl extends MultiFactorAssertionImpl {\n constructor(credential) {\n super(\"phone\" /* FactorId.PHONE */);\n this.credential = credential;\n }\n /** @internal */\n static _fromCredential(credential) {\n return new PhoneMultiFactorAssertionImpl(credential);\n }\n /** @internal */\n _finalizeEnroll(auth, idToken, displayName) {\n return finalizeEnrollPhoneMfa(auth, {\n idToken,\n displayName,\n phoneVerificationInfo: this.credential._makeVerificationRequest()\n });\n }\n /** @internal */\n _finalizeSignIn(auth, mfaPendingCredential) {\n return finalizeSignInPhoneMfa(auth, {\n mfaPendingCredential,\n phoneVerificationInfo: this.credential._makeVerificationRequest()\n });\n }\n}\n/**\r\n * Provider for generating a {@link PhoneMultiFactorAssertion}.\r\n *\r\n * @public\r\n */\nlet PhoneMultiFactorGenerator = /*#__PURE__*/(() => {\n class PhoneMultiFactorGenerator {\n constructor() {}\n /**\r\n * Provides a {@link PhoneMultiFactorAssertion} to confirm ownership of the phone second factor.\r\n *\r\n * @remarks\r\n * This method does not work in a Node.js environment.\r\n *\r\n * @param phoneAuthCredential - A credential provided by {@link PhoneAuthProvider.credential}.\r\n * @returns A {@link PhoneMultiFactorAssertion} which can be used with\r\n * {@link MultiFactorResolver.resolveSignIn}\r\n */\n static assertion(credential) {\n return PhoneMultiFactorAssertionImpl._fromCredential(credential);\n }\n }\n /**\r\n * The identifier of the phone second factor: `phone`.\r\n */\n PhoneMultiFactorGenerator.FACTOR_ID = 'phone';\n\n /**\r\n * Provider for generating a {@link TotpMultiFactorAssertion}.\r\n *\r\n * @public\r\n */\n return PhoneMultiFactorGenerator;\n})();\nlet TotpMultiFactorGenerator = /*#__PURE__*/(() => {\n class TotpMultiFactorGenerator {\n /**\r\n * Provides a {@link TotpMultiFactorAssertion} to confirm ownership of\r\n * the TOTP (time-based one-time password) second factor.\r\n * This assertion is used to complete enrollment in TOTP second factor.\r\n *\r\n * @param secret A {@link TotpSecret} containing the shared secret key and other TOTP parameters.\r\n * @param oneTimePassword One-time password from TOTP App.\r\n * @returns A {@link TotpMultiFactorAssertion} which can be used with\r\n * {@link MultiFactorUser.enroll}.\r\n */\n static assertionForEnrollment(secret, oneTimePassword) {\n return TotpMultiFactorAssertionImpl._fromSecret(secret, oneTimePassword);\n }\n /**\r\n * Provides a {@link TotpMultiFactorAssertion} to confirm ownership of the TOTP second factor.\r\n * This assertion is used to complete signIn with TOTP as the second factor.\r\n *\r\n * @param enrollmentId identifies the enrolled TOTP second factor.\r\n * @param oneTimePassword One-time password from TOTP App.\r\n * @returns A {@link TotpMultiFactorAssertion} which can be used with\r\n * {@link MultiFactorResolver.resolveSignIn}.\r\n */\n static assertionForSignIn(enrollmentId, oneTimePassword) {\n return TotpMultiFactorAssertionImpl._fromEnrollmentId(enrollmentId, oneTimePassword);\n }\n /**\r\n * Returns a promise to {@link TotpSecret} which contains the TOTP shared secret key and other parameters.\r\n * Creates a TOTP secret as part of enrolling a TOTP second factor.\r\n * Used for generating a QR code URL or inputting into a TOTP app.\r\n * This method uses the auth instance corresponding to the user in the multiFactorSession.\r\n *\r\n * @param session The {@link MultiFactorSession} that the user is part of.\r\n * @returns A promise to {@link TotpSecret}.\r\n */\n static async generateSecret(session) {\n var _a;\n const mfaSession = session;\n _assert(typeof ((_a = mfaSession.user) === null || _a === void 0 ? void 0 : _a.auth) !== 'undefined', \"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n const response = await startEnrollTotpMfa(mfaSession.user.auth, {\n idToken: mfaSession.credential,\n totpEnrollmentInfo: {}\n });\n return TotpSecret._fromStartTotpMfaEnrollmentResponse(response, mfaSession.user.auth);\n }\n }\n /**\r\n * The identifier of the TOTP second factor: `totp`.\r\n */\n TotpMultiFactorGenerator.FACTOR_ID = \"totp\" /* FactorId.TOTP */;\n return TotpMultiFactorGenerator;\n})();\nclass TotpMultiFactorAssertionImpl extends MultiFactorAssertionImpl {\n constructor(otp, enrollmentId, secret) {\n super(\"totp\" /* FactorId.TOTP */);\n this.otp = otp;\n this.enrollmentId = enrollmentId;\n this.secret = secret;\n }\n /** @internal */\n static _fromSecret(secret, otp) {\n return new TotpMultiFactorAssertionImpl(otp, undefined, secret);\n }\n /** @internal */\n static _fromEnrollmentId(enrollmentId, otp) {\n return new TotpMultiFactorAssertionImpl(otp, enrollmentId);\n }\n /** @internal */\n async _finalizeEnroll(auth, idToken, displayName) {\n _assert(typeof this.secret !== 'undefined', auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n return finalizeEnrollTotpMfa(auth, {\n idToken,\n displayName,\n totpVerificationInfo: this.secret._makeTotpVerificationInfo(this.otp)\n });\n }\n /** @internal */\n async _finalizeSignIn(auth, mfaPendingCredential) {\n _assert(this.enrollmentId !== undefined && this.otp !== undefined, auth, \"argument-error\" /* AuthErrorCode.ARGUMENT_ERROR */);\n const totpVerificationInfo = {\n verificationCode: this.otp\n };\n return finalizeSignInTotpMfa(auth, {\n mfaPendingCredential,\n mfaEnrollmentId: this.enrollmentId,\n totpVerificationInfo\n });\n }\n}\n/**\r\n * Provider for generating a {@link TotpMultiFactorAssertion}.\r\n *\r\n * Stores the shared secret key and other parameters to generate time-based OTPs.\r\n * Implements methods to retrieve the shared secret key and generate a QR code URL.\r\n * @public\r\n */\nclass TotpSecret {\n // The public members are declared outside the constructor so the docs can be generated.\n constructor(secretKey, hashingAlgorithm, codeLength, codeIntervalSeconds, enrollmentCompletionDeadline, sessionInfo, auth) {\n this.sessionInfo = sessionInfo;\n this.auth = auth;\n this.secretKey = secretKey;\n this.hashingAlgorithm = hashingAlgorithm;\n this.codeLength = codeLength;\n this.codeIntervalSeconds = codeIntervalSeconds;\n this.enrollmentCompletionDeadline = enrollmentCompletionDeadline;\n }\n /** @internal */\n static _fromStartTotpMfaEnrollmentResponse(response, auth) {\n return new TotpSecret(response.totpSessionInfo.sharedSecretKey, response.totpSessionInfo.hashingAlgorithm, response.totpSessionInfo.verificationCodeLength, response.totpSessionInfo.periodSec, new Date(response.totpSessionInfo.finalizeEnrollmentTime).toUTCString(), response.totpSessionInfo.sessionInfo, auth);\n }\n /** @internal */\n _makeTotpVerificationInfo(otp) {\n return {\n sessionInfo: this.sessionInfo,\n verificationCode: otp\n };\n }\n /**\r\n * Returns a QR code URL as described in\r\n * https://github.com/google/google-authenticator/wiki/Key-Uri-Format\r\n * This can be displayed to the user as a QR code to be scanned into a TOTP app like Google Authenticator.\r\n * If the optional parameters are unspecified, an accountName of and issuer of are used.\r\n *\r\n * @param accountName the name of the account/app along with a user identifier.\r\n * @param issuer issuer of the TOTP (likely the app name).\r\n * @returns A QR code URL string.\r\n */\n generateQrCodeUrl(accountName, issuer) {\n var _a;\n let useDefaults = false;\n if (_isEmptyString(accountName) || _isEmptyString(issuer)) {\n useDefaults = true;\n }\n if (useDefaults) {\n if (_isEmptyString(accountName)) {\n accountName = ((_a = this.auth.currentUser) === null || _a === void 0 ? void 0 : _a.email) || 'unknownuser';\n }\n if (_isEmptyString(issuer)) {\n issuer = this.auth.name;\n }\n }\n return `otpauth://totp/${issuer}:${accountName}?secret=${this.secretKey}&issuer=${issuer}&algorithm=${this.hashingAlgorithm}&digits=${this.codeLength}`;\n }\n}\n/** @internal */\nfunction _isEmptyString(input) {\n return typeof input === 'undefined' || (input === null || input === void 0 ? void 0 : input.length) === 0;\n}\nvar name = \"@firebase/auth\";\nvar version = \"1.7.4\";\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nclass AuthInterop {\n constructor(auth) {\n this.auth = auth;\n this.internalListeners = new Map();\n }\n getUid() {\n var _a;\n this.assertAuthConfigured();\n return ((_a = this.auth.currentUser) === null || _a === void 0 ? void 0 : _a.uid) || null;\n }\n async getToken(forceRefresh) {\n this.assertAuthConfigured();\n await this.auth._initializationPromise;\n if (!this.auth.currentUser) {\n return null;\n }\n const accessToken = await this.auth.currentUser.getIdToken(forceRefresh);\n return {\n accessToken\n };\n }\n addAuthTokenListener(listener) {\n this.assertAuthConfigured();\n if (this.internalListeners.has(listener)) {\n return;\n }\n const unsubscribe = this.auth.onIdTokenChanged(user => {\n listener((user === null || user === void 0 ? void 0 : user.stsTokenManager.accessToken) || null);\n });\n this.internalListeners.set(listener, unsubscribe);\n this.updateProactiveRefresh();\n }\n removeAuthTokenListener(listener) {\n this.assertAuthConfigured();\n const unsubscribe = this.internalListeners.get(listener);\n if (!unsubscribe) {\n return;\n }\n this.internalListeners.delete(listener);\n unsubscribe();\n this.updateProactiveRefresh();\n }\n assertAuthConfigured() {\n _assert(this.auth._initializationPromise, \"dependent-sdk-initialized-before-auth\" /* AuthErrorCode.DEPENDENT_SDK_INIT_BEFORE_AUTH */);\n }\n updateProactiveRefresh() {\n if (this.internalListeners.size > 0) {\n this.auth._startProactiveRefresh();\n } else {\n this.auth._stopProactiveRefresh();\n }\n }\n}\n\n/**\r\n * @license\r\n * Copyright 2020 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nfunction getVersionForPlatform(clientPlatform) {\n switch (clientPlatform) {\n case \"Node\" /* ClientPlatform.NODE */:\n return 'node';\n case \"ReactNative\" /* ClientPlatform.REACT_NATIVE */:\n return 'rn';\n case \"Worker\" /* ClientPlatform.WORKER */:\n return 'webworker';\n case \"Cordova\" /* ClientPlatform.CORDOVA */:\n return 'cordova';\n case \"WebExtension\" /* ClientPlatform.WEB_EXTENSION */:\n return 'web-extension';\n default:\n return undefined;\n }\n}\n/** @internal */\nfunction registerAuth(clientPlatform) {\n _registerComponent(new Component(\"auth\" /* _ComponentName.AUTH */, (container, {\n options: deps\n }) => {\n const app = container.getProvider('app').getImmediate();\n const heartbeatServiceProvider = container.getProvider('heartbeat');\n const appCheckServiceProvider = container.getProvider('app-check-internal');\n const {\n apiKey,\n authDomain\n } = app.options;\n _assert(apiKey && !apiKey.includes(':'), \"invalid-api-key\" /* AuthErrorCode.INVALID_API_KEY */, {\n appName: app.name\n });\n const config = {\n apiKey,\n authDomain,\n clientPlatform,\n apiHost: \"identitytoolkit.googleapis.com\" /* DefaultConfig.API_HOST */,\n tokenApiHost: \"securetoken.googleapis.com\" /* DefaultConfig.TOKEN_API_HOST */,\n apiScheme: \"https\" /* DefaultConfig.API_SCHEME */,\n sdkClientVersion: _getClientVersion(clientPlatform)\n };\n const authInstance = new AuthImpl(app, heartbeatServiceProvider, appCheckServiceProvider, config);\n _initializeAuthInstance(authInstance, deps);\n return authInstance;\n }, \"PUBLIC\" /* ComponentType.PUBLIC */)\n /**\r\n * Auth can only be initialized by explicitly calling getAuth() or initializeAuth()\r\n * For why we do this, See go/firebase-next-auth-init\r\n */.setInstantiationMode(\"EXPLICIT\" /* InstantiationMode.EXPLICIT */)\n /**\r\n * Because all firebase products that depend on auth depend on auth-internal directly,\r\n * we need to initialize auth-internal after auth is initialized to make it available to other firebase products.\r\n */.setInstanceCreatedCallback((container, _instanceIdentifier, _instance) => {\n const authInternalProvider = container.getProvider(\"auth-internal\" /* _ComponentName.AUTH_INTERNAL */);\n authInternalProvider.initialize();\n }));\n _registerComponent(new Component(\"auth-internal\" /* _ComponentName.AUTH_INTERNAL */, container => {\n const auth = _castAuth(container.getProvider(\"auth\" /* _ComponentName.AUTH */).getImmediate());\n return (auth => new AuthInterop(auth))(auth);\n }, \"PRIVATE\" /* ComponentType.PRIVATE */).setInstantiationMode(\"EXPLICIT\" /* InstantiationMode.EXPLICIT */));\n registerVersion(name, version, getVersionForPlatform(clientPlatform));\n // BUILD_TARGET will be replaced by values like esm5, esm2017, cjs5, etc during the compilation\n registerVersion(name, version, 'esm2017');\n}\n\n/**\r\n * @license\r\n * Copyright 2021 Google LLC\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\nconst DEFAULT_ID_TOKEN_MAX_AGE = 5 * 60;\nconst authIdTokenMaxAge = getExperimentalSetting('authIdTokenMaxAge') || DEFAULT_ID_TOKEN_MAX_AGE;\nlet lastPostedIdToken = null;\nconst mintCookieFactory = url => async user => {\n const idTokenResult = user && (await user.getIdTokenResult());\n const idTokenAge = idTokenResult && (new Date().getTime() - Date.parse(idTokenResult.issuedAtTime)) / 1000;\n if (idTokenAge && idTokenAge > authIdTokenMaxAge) {\n return;\n }\n // Specifically trip null => undefined when logged out, to delete any existing cookie\n const idToken = idTokenResult === null || idTokenResult === void 0 ? void 0 : idTokenResult.token;\n if (lastPostedIdToken === idToken) {\n return;\n }\n lastPostedIdToken = idToken;\n await fetch(url, {\n method: idToken ? 'POST' : 'DELETE',\n headers: idToken ? {\n 'Authorization': `Bearer ${idToken}`\n } : {}\n });\n};\n/**\r\n * Returns the Auth instance associated with the provided {@link @firebase/app#FirebaseApp}.\r\n * If no instance exists, initializes an Auth instance with platform-specific default dependencies.\r\n *\r\n * @param app - The Firebase App.\r\n *\r\n * @public\r\n */\nfunction getAuth(app = getApp()) {\n const provider = _getProvider(app, 'auth');\n if (provider.isInitialized()) {\n return provider.getImmediate();\n }\n const auth = initializeAuth(app, {\n popupRedirectResolver: browserPopupRedirectResolver,\n persistence: [indexedDBLocalPersistence, browserLocalPersistence, browserSessionPersistence]\n });\n const authTokenSyncPath = getExperimentalSetting('authTokenSyncURL');\n // Only do the Cookie exchange in a secure context\n if (authTokenSyncPath && typeof isSecureContext === 'boolean' && isSecureContext) {\n // Don't allow urls (XSS possibility), only paths on the same domain\n const authTokenSyncUrl = new URL(authTokenSyncPath, location.origin);\n if (location.origin === authTokenSyncUrl.origin) {\n const mintCookie = mintCookieFactory(authTokenSyncUrl.toString());\n beforeAuthStateChanged(auth, mintCookie, () => mintCookie(auth.currentUser));\n onIdTokenChanged(auth, user => mintCookie(user));\n }\n }\n const authEmulatorHost = getDefaultEmulatorHost('auth');\n if (authEmulatorHost) {\n connectAuthEmulator(auth, `http://${authEmulatorHost}`);\n }\n return auth;\n}\nfunction getScriptParentElement() {\n var _a, _b;\n return (_b = (_a = document.getElementsByTagName('head')) === null || _a === void 0 ? void 0 : _a[0]) !== null && _b !== void 0 ? _b : document;\n}\n_setExternalJSProvider({\n loadJS(url) {\n // TODO: consider adding timeout support & cancellation\n return new Promise((resolve, reject) => {\n const el = document.createElement('script');\n el.setAttribute('src', url);\n el.onload = resolve;\n el.onerror = e => {\n const error = _createError(\"internal-error\" /* AuthErrorCode.INTERNAL_ERROR */);\n error.customData = e;\n reject(error);\n };\n el.type = 'text/javascript';\n el.charset = 'UTF-8';\n getScriptParentElement().appendChild(el);\n });\n },\n gapiScript: 'https://apis.google.com/js/api.js',\n recaptchaV2Script: 'https://www.google.com/recaptcha/api.js',\n recaptchaEnterpriseScript: 'https://www.google.com/recaptcha/enterprise.js?render='\n});\nregisterAuth(\"Browser\" /* ClientPlatform.BROWSER */);\nexport { TwitterAuthProvider as $, ActionCodeOperation as A, updateCurrentUser as B, signOut as C, revokeAccessToken as D, deleteUser as E, FactorId as F, debugErrorMap as G, prodErrorMap as H, AUTH_ERROR_CODES_MAP_DO_NOT_USE_INTERNALLY as I, initializeAuth as J, connectAuthEmulator as K, AuthCredential as L, EmailAuthCredential as M, OAuthCredential as N, OperationType as O, PhoneAuthProvider as P, PhoneAuthCredential as Q, RecaptchaVerifier as R, SignInMethod as S, TotpMultiFactorGenerator as T, inMemoryPersistence as U, EmailAuthProvider as V, FacebookAuthProvider as W, GoogleAuthProvider as X, GithubAuthProvider as Y, OAuthProvider as Z, SAMLAuthProvider as _, browserSessionPersistence as a, signInAnonymously as a0, signInWithCredential as a1, linkWithCredential as a2, reauthenticateWithCredential as a3, signInWithCustomToken as a4, sendPasswordResetEmail as a5, confirmPasswordReset as a6, applyActionCode as a7, checkActionCode as a8, verifyPasswordResetCode as a9, _isIOS7Or8 as aA, _createError as aB, _assert as aC, AuthEventManager as aD, _getInstance as aE, _persistenceKeyName as aF, _getRedirectResult as aG, _overrideRedirectResult as aH, _clearRedirectOutcomes as aI, _castAuth as aJ, UserImpl as aK, AuthImpl as aL, _getClientVersion as aM, _generateEventId as aN, AuthPopup as aO, FetchProvider as aP, SAMLAuthCredential as aQ, createUserWithEmailAndPassword as aa, signInWithEmailAndPassword as ab, sendSignInLinkToEmail as ac, isSignInWithEmailLink as ad, signInWithEmailLink as ae, fetchSignInMethodsForEmail as af, sendEmailVerification as ag, verifyBeforeUpdateEmail as ah, ActionCodeURL as ai, parseActionCodeURL as aj, updateProfile as ak, updateEmail as al, updatePassword as am, getIdToken as an, getIdTokenResult as ao, unlink as ap, getAdditionalUserInfo as aq, reload as ar, getMultiFactorResolver as as, multiFactor as at, debugAssert as au, _isIOS as av, _isAndroid as aw, _fail as ax, _getRedirectUrl as ay, _getProjectConfig as az, browserLocalPersistence as b, signInWithPopup as c, linkWithPopup as d, reauthenticateWithPopup as e, signInWithRedirect as f, linkWithRedirect as g, reauthenticateWithRedirect as h, indexedDBLocalPersistence as i, getRedirectResult as j, browserPopupRedirectResolver as k, linkWithPhoneNumber as l, PhoneMultiFactorGenerator as m, TotpSecret as n, getAuth as o, ProviderId as p, setPersistence as q, reauthenticateWithPhoneNumber as r, signInWithPhoneNumber as s, initializeRecaptchaConfig as t, updatePhoneNumber as u, validatePassword as v, onIdTokenChanged as w, beforeAuthStateChanged as x, onAuthStateChanged as y, useDeviceLanguage as z };\n","export { A as ActionCodeOperation, ai as ActionCodeURL, L as AuthCredential, I as AuthErrorCodes, M as EmailAuthCredential, V as EmailAuthProvider, W as FacebookAuthProvider, F as FactorId, Y as GithubAuthProvider, X as GoogleAuthProvider, N as OAuthCredential, Z as OAuthProvider, O as OperationType, Q as PhoneAuthCredential, P as PhoneAuthProvider, m as PhoneMultiFactorGenerator, p as ProviderId, R as RecaptchaVerifier, _ as SAMLAuthProvider, S as SignInMethod, T as TotpMultiFactorGenerator, n as TotpSecret, $ as TwitterAuthProvider, a7 as applyActionCode, x as beforeAuthStateChanged, b as browserLocalPersistence, k as browserPopupRedirectResolver, a as browserSessionPersistence, a8 as checkActionCode, a6 as confirmPasswordReset, K as connectAuthEmulator, aa as createUserWithEmailAndPassword, G as debugErrorMap, E as deleteUser, af as fetchSignInMethodsForEmail, aq as getAdditionalUserInfo, o as getAuth, an as getIdToken, ao as getIdTokenResult, as as getMultiFactorResolver, j as getRedirectResult, U as inMemoryPersistence, i as indexedDBLocalPersistence, J as initializeAuth, t as initializeRecaptchaConfig, ad as isSignInWithEmailLink, a2 as linkWithCredential, l as linkWithPhoneNumber, d as linkWithPopup, g as linkWithRedirect, at as multiFactor, y as onAuthStateChanged, w as onIdTokenChanged, aj as parseActionCodeURL, H as prodErrorMap, a3 as reauthenticateWithCredential, r as reauthenticateWithPhoneNumber, e as reauthenticateWithPopup, h as reauthenticateWithRedirect, ar as reload, D as revokeAccessToken, ag as sendEmailVerification, a5 as sendPasswordResetEmail, ac as sendSignInLinkToEmail, q as setPersistence, a0 as signInAnonymously, a1 as signInWithCredential, a4 as signInWithCustomToken, ab as signInWithEmailAndPassword, ae as signInWithEmailLink, s as signInWithPhoneNumber, c as signInWithPopup, f as signInWithRedirect, C as signOut, ap as unlink, B as updateCurrentUser, al as updateEmail, am as updatePassword, u as updatePhoneNumber, ak as updateProfile, z as useDeviceLanguage, v as validatePassword, ah as verifyBeforeUpdateEmail, a9 as verifyPasswordResetCode } from './index-454a0f5f.js';\nimport '@firebase/app';\nimport '@firebase/util';\nimport '@firebase/logger';\nimport 'tslib';\nimport '@firebase/component';\n","export * from '@firebase/auth';\n","import { ɵgetAllInstancesOf as _getAllInstancesOf, ɵgetDefaultInstanceOf as _getDefaultInstanceOf, VERSION, ɵAngularFireSchedulers as _AngularFireSchedulers, ɵAppCheckInstances as _AppCheckInstances, ɵzoneWrap as _zoneWrap } from '@angular/fire';\nimport { timer, from } from 'rxjs';\nimport { concatMap, distinct } from 'rxjs/operators';\nimport * as i0 from '@angular/core';\nimport { InjectionToken, Optional, NgModule, makeEnvironmentProviders, NgZone, Injector } from '@angular/core';\nimport { FirebaseApp, FirebaseApps } from '@angular/fire/app';\nimport { registerVersion } from 'firebase/app';\nimport { authState as authState$1, user as user$1, idToken as idToken$1 } from 'rxfire/auth';\nimport { applyActionCode as applyActionCode$1, beforeAuthStateChanged as beforeAuthStateChanged$1, checkActionCode as checkActionCode$1, confirmPasswordReset as confirmPasswordReset$1, connectAuthEmulator as connectAuthEmulator$1, createUserWithEmailAndPassword as createUserWithEmailAndPassword$1, deleteUser as deleteUser$1, fetchSignInMethodsForEmail as fetchSignInMethodsForEmail$1, getAdditionalUserInfo as getAdditionalUserInfo$1, getAuth as getAuth$1, getIdToken as getIdToken$1, getIdTokenResult as getIdTokenResult$1, getMultiFactorResolver as getMultiFactorResolver$1, getRedirectResult as getRedirectResult$1, initializeAuth as initializeAuth$1, initializeRecaptchaConfig as initializeRecaptchaConfig$1, isSignInWithEmailLink as isSignInWithEmailLink$1, linkWithCredential as linkWithCredential$1, linkWithPhoneNumber as linkWithPhoneNumber$1, linkWithPopup as linkWithPopup$1, linkWithRedirect as linkWithRedirect$1, multiFactor as multiFactor$1, onAuthStateChanged as onAuthStateChanged$1, onIdTokenChanged as onIdTokenChanged$1, parseActionCodeURL as parseActionCodeURL$1, reauthenticateWithCredential as reauthenticateWithCredential$1, reauthenticateWithPhoneNumber as reauthenticateWithPhoneNumber$1, reauthenticateWithPopup as reauthenticateWithPopup$1, reauthenticateWithRedirect as reauthenticateWithRedirect$1, reload as reload$1, revokeAccessToken as revokeAccessToken$1, sendEmailVerification as sendEmailVerification$1, sendPasswordResetEmail as sendPasswordResetEmail$1, sendSignInLinkToEmail as sendSignInLinkToEmail$1, setPersistence as setPersistence$1, signInAnonymously as signInAnonymously$1, signInWithCredential as signInWithCredential$1, signInWithCustomToken as signInWithCustomToken$1, signInWithEmailAndPassword as signInWithEmailAndPassword$1, signInWithEmailLink as signInWithEmailLink$1, signInWithPhoneNumber as signInWithPhoneNumber$1, signInWithPopup as signInWithPopup$1, signInWithRedirect as signInWithRedirect$1, signOut as signOut$1, unlink as unlink$1, updateCurrentUser as updateCurrentUser$1, updateEmail as updateEmail$1, updatePassword as updatePassword$1, updatePhoneNumber as updatePhoneNumber$1, updateProfile as updateProfile$1, useDeviceLanguage as useDeviceLanguage$1, validatePassword as validatePassword$1, verifyBeforeUpdateEmail as verifyBeforeUpdateEmail$1, verifyPasswordResetCode as verifyPasswordResetCode$1 } from 'firebase/auth';\nexport * from 'firebase/auth';\nconst AUTH_PROVIDER_NAME = 'auth';\nclass Auth {\n constructor(auth) {\n return auth;\n }\n}\nclass AuthInstances {\n constructor() {\n return _getAllInstancesOf(AUTH_PROVIDER_NAME);\n }\n}\nconst authInstance$ = /*#__PURE__*/ /*#__PURE__*/timer(0, 300).pipe(/*#__PURE__*/concatMap(() => from(_getAllInstancesOf(AUTH_PROVIDER_NAME))), /*#__PURE__*/distinct());\nconst PROVIDED_AUTH_INSTANCES = /*#__PURE__*/new InjectionToken('angularfire2.auth-instances');\nfunction defaultAuthInstanceFactory(provided, defaultApp) {\n const defaultAuth = _getDefaultInstanceOf(AUTH_PROVIDER_NAME, provided, defaultApp);\n return defaultAuth && new Auth(defaultAuth);\n}\nfunction authInstanceFactory(fn) {\n return (zone, injector) => {\n const auth = zone.runOutsideAngular(() => fn(injector));\n return new Auth(auth);\n };\n}\nconst AUTH_INSTANCES_PROVIDER = {\n provide: AuthInstances,\n deps: [[/*#__PURE__*/new Optional(), PROVIDED_AUTH_INSTANCES]]\n};\nconst DEFAULT_AUTH_INSTANCE_PROVIDER = {\n provide: Auth,\n useFactory: defaultAuthInstanceFactory,\n deps: [[/*#__PURE__*/new Optional(), PROVIDED_AUTH_INSTANCES], FirebaseApp]\n};\nlet AuthModule = /*#__PURE__*/(() => {\n class AuthModule {\n constructor() {\n registerVersion('angularfire', VERSION.full, 'auth');\n }\n static ɵfac = function AuthModule_Factory(__ngFactoryType__) {\n return new (__ngFactoryType__ || AuthModule)();\n };\n static ɵmod = /* @__PURE__ */i0.ɵɵdefineNgModule({\n type: AuthModule\n });\n static ɵinj = /* @__PURE__ */i0.ɵɵdefineInjector({\n providers: [DEFAULT_AUTH_INSTANCE_PROVIDER, AUTH_INSTANCES_PROVIDER]\n });\n }\n return AuthModule;\n})();\n/*#__PURE__*/(() => {\n (typeof ngDevMode === \"undefined\" || ngDevMode) && void 0;\n})();\nfunction provideAuth(fn, ...deps) {\n registerVersion('angularfire', VERSION.full, 'auth');\n return makeEnvironmentProviders([DEFAULT_AUTH_INSTANCE_PROVIDER, AUTH_INSTANCES_PROVIDER, {\n provide: PROVIDED_AUTH_INSTANCES,\n useFactory: authInstanceFactory(fn),\n multi: true,\n deps: [NgZone, Injector, _AngularFireSchedulers, FirebaseApps, [new Optional(), _AppCheckInstances], ...deps]\n }]);\n}\n\n// DO NOT MODIFY, this file is autogenerated by tools/build.ts\nconst authState = /*#__PURE__*/_zoneWrap(authState$1, true);\nconst user = /*#__PURE__*/_zoneWrap(user$1, true);\nconst idToken = /*#__PURE__*/_zoneWrap(idToken$1, true);\n\n// DO NOT MODIFY, this file is autogenerated by tools/build.ts\nconst applyActionCode = /*#__PURE__*/_zoneWrap(applyActionCode$1, true);\nconst beforeAuthStateChanged = /*#__PURE__*/_zoneWrap(beforeAuthStateChanged$1, true);\nconst checkActionCode = /*#__PURE__*/_zoneWrap(checkActionCode$1, true);\nconst confirmPasswordReset = /*#__PURE__*/_zoneWrap(confirmPasswordReset$1, true);\nconst connectAuthEmulator = /*#__PURE__*/_zoneWrap(connectAuthEmulator$1, true);\nconst createUserWithEmailAndPassword = /*#__PURE__*/_zoneWrap(createUserWithEmailAndPassword$1, true);\nconst deleteUser = /*#__PURE__*/_zoneWrap(deleteUser$1, true);\nconst fetchSignInMethodsForEmail = /*#__PURE__*/_zoneWrap(fetchSignInMethodsForEmail$1, true);\nconst getAdditionalUserInfo = /*#__PURE__*/_zoneWrap(getAdditionalUserInfo$1, true);\nconst getAuth = /*#__PURE__*/_zoneWrap(getAuth$1, true);\nconst getIdToken = /*#__PURE__*/_zoneWrap(getIdToken$1, true);\nconst getIdTokenResult = /*#__PURE__*/_zoneWrap(getIdTokenResult$1, true);\nconst getMultiFactorResolver = /*#__PURE__*/_zoneWrap(getMultiFactorResolver$1, true);\nconst getRedirectResult = /*#__PURE__*/_zoneWrap(getRedirectResult$1, true);\nconst initializeAuth = /*#__PURE__*/_zoneWrap(initializeAuth$1, true);\nconst initializeRecaptchaConfig = /*#__PURE__*/_zoneWrap(initializeRecaptchaConfig$1, true);\nconst isSignInWithEmailLink = /*#__PURE__*/_zoneWrap(isSignInWithEmailLink$1, true);\nconst linkWithCredential = /*#__PURE__*/_zoneWrap(linkWithCredential$1, true);\nconst linkWithPhoneNumber = /*#__PURE__*/_zoneWrap(linkWithPhoneNumber$1, true);\nconst linkWithPopup = /*#__PURE__*/_zoneWrap(linkWithPopup$1, true);\nconst linkWithRedirect = /*#__PURE__*/_zoneWrap(linkWithRedirect$1, true);\nconst multiFactor = /*#__PURE__*/_zoneWrap(multiFactor$1, true);\nconst onAuthStateChanged = /*#__PURE__*/_zoneWrap(onAuthStateChanged$1, true);\nconst onIdTokenChanged = /*#__PURE__*/_zoneWrap(onIdTokenChanged$1, true);\nconst parseActionCodeURL = /*#__PURE__*/_zoneWrap(parseActionCodeURL$1, true);\nconst reauthenticateWithCredential = /*#__PURE__*/_zoneWrap(reauthenticateWithCredential$1, true);\nconst reauthenticateWithPhoneNumber = /*#__PURE__*/_zoneWrap(reauthenticateWithPhoneNumber$1, true);\nconst reauthenticateWithPopup = /*#__PURE__*/_zoneWrap(reauthenticateWithPopup$1, true);\nconst reauthenticateWithRedirect = /*#__PURE__*/_zoneWrap(reauthenticateWithRedirect$1, true);\nconst reload = /*#__PURE__*/_zoneWrap(reload$1, true);\nconst revokeAccessToken = /*#__PURE__*/_zoneWrap(revokeAccessToken$1, true);\nconst sendEmailVerification = /*#__PURE__*/_zoneWrap(sendEmailVerification$1, true);\nconst sendPasswordResetEmail = /*#__PURE__*/_zoneWrap(sendPasswordResetEmail$1, true);\nconst sendSignInLinkToEmail = /*#__PURE__*/_zoneWrap(sendSignInLinkToEmail$1, true);\nconst setPersistence = /*#__PURE__*/_zoneWrap(setPersistence$1, true);\nconst signInAnonymously = /*#__PURE__*/_zoneWrap(signInAnonymously$1, true);\nconst signInWithCredential = /*#__PURE__*/_zoneWrap(signInWithCredential$1, true);\nconst signInWithCustomToken = /*#__PURE__*/_zoneWrap(signInWithCustomToken$1, true);\nconst signInWithEmailAndPassword = /*#__PURE__*/_zoneWrap(signInWithEmailAndPassword$1, true);\nconst signInWithEmailLink = /*#__PURE__*/_zoneWrap(signInWithEmailLink$1, true);\nconst signInWithPhoneNumber = /*#__PURE__*/_zoneWrap(signInWithPhoneNumber$1, true);\nconst signInWithPopup = /*#__PURE__*/_zoneWrap(signInWithPopup$1, true);\nconst signInWithRedirect = /*#__PURE__*/_zoneWrap(signInWithRedirect$1, true);\nconst signOut = /*#__PURE__*/_zoneWrap(signOut$1, true);\nconst unlink = /*#__PURE__*/_zoneWrap(unlink$1, true);\nconst updateCurrentUser = /*#__PURE__*/_zoneWrap(updateCurrentUser$1, true);\nconst updateEmail = /*#__PURE__*/_zoneWrap(updateEmail$1, true);\nconst updatePassword = /*#__PURE__*/_zoneWrap(updatePassword$1, true);\nconst updatePhoneNumber = /*#__PURE__*/_zoneWrap(updatePhoneNumber$1, true);\nconst updateProfile = /*#__PURE__*/_zoneWrap(updateProfile$1, true);\nconst useDeviceLanguage = /*#__PURE__*/_zoneWrap(useDeviceLanguage$1, true);\nconst validatePassword = /*#__PURE__*/_zoneWrap(validatePassword$1, true);\nconst verifyBeforeUpdateEmail = /*#__PURE__*/_zoneWrap(verifyBeforeUpdateEmail$1, true);\nconst verifyPasswordResetCode = /*#__PURE__*/_zoneWrap(verifyPasswordResetCode$1, true);\n\n/**\n * Generated bundle index. Do not edit.\n */\n\nexport { Auth, AuthInstances, AuthModule, applyActionCode, authInstance$, authState, beforeAuthStateChanged, checkActionCode, confirmPasswordReset, connectAuthEmulator, createUserWithEmailAndPassword, deleteUser, fetchSignInMethodsForEmail, getAdditionalUserInfo, getAuth, getIdToken, getIdTokenResult, getMultiFactorResolver, getRedirectResult, idToken, initializeAuth, initializeRecaptchaConfig, isSignInWithEmailLink, linkWithCredential, linkWithPhoneNumber, linkWithPopup, linkWithRedirect, multiFactor, onAuthStateChanged, onIdTokenChanged, parseActionCodeURL, provideAuth, reauthenticateWithCredential, reauthenticateWithPhoneNumber, reauthenticateWithPopup, reauthenticateWithRedirect, reload, revokeAccessToken, sendEmailVerification, sendPasswordResetEmail, sendSignInLinkToEmail, setPersistence, signInAnonymously, signInWithCredential, signInWithCustomToken, signInWithEmailAndPassword, signInWithEmailLink, signInWithPhoneNumber, signInWithPopup, signInWithRedirect, signOut, unlink, updateCurrentUser, updateEmail, updatePassword, updatePhoneNumber, updateProfile, useDeviceLanguage, user, validatePassword, verifyBeforeUpdateEmail, verifyPasswordResetCode };\n","export * from '@firebase/firestore';\n","import { onSnapshot, refEqual, getCountFromServer } from 'firebase/firestore';\nimport { Observable, from, pipe } from 'rxjs';\nimport { map, scan, distinctUntilChanged, filter, startWith, pairwise } from 'rxjs/operators';\n\n/******************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\n\nvar __assign = function () {\n __assign = Object.assign || function __assign(t) {\n for (var s, i = 1, n = arguments.length; i < n; i++) {\n s = arguments[i];\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\n }\n return t;\n };\n return __assign.apply(this, arguments);\n};\nfunction __spreadArray(to, from, pack) {\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\n if (ar || !(i in from)) {\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\n ar[i] = from[i];\n }\n }\n return to.concat(ar || Array.prototype.slice.call(from));\n}\ntypeof SuppressedError === \"function\" ? SuppressedError : function (error, suppressed, message) {\n var e = new Error(message);\n return e.name = \"SuppressedError\", e.error = error, e.suppressed = suppressed, e;\n};\n\n/**\n * @license\n * Copyright 2018 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar DEFAULT_OPTIONS = {\n includeMetadataChanges: false\n};\nfunction fromRef(ref, options) {\n if (options === void 0) {\n options = DEFAULT_OPTIONS;\n }\n /* eslint-enable @typescript-eslint/no-explicit-any */\n return new Observable(function (subscriber) {\n var unsubscribe = onSnapshot(ref, options, {\n next: subscriber.next.bind(subscriber),\n error: subscriber.error.bind(subscriber),\n complete: subscriber.complete.bind(subscriber)\n });\n return {\n unsubscribe: unsubscribe\n };\n });\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nfunction doc(ref) {\n return fromRef(ref, {\n includeMetadataChanges: true\n });\n}\n/**\n * Returns a stream of a document, mapped to its data payload and optionally the document ID\n * @param query\n * @param options\n */\nfunction docData(ref, options) {\n if (options === void 0) {\n options = {};\n }\n return doc(ref).pipe(map(function (snap) {\n return snapToData(snap, options);\n }));\n}\nfunction snapToData(snapshot, options) {\n var _a;\n if (options === void 0) {\n options = {};\n }\n var data = snapshot.data(options);\n // match the behavior of the JS SDK when the snapshot doesn't exist\n // it's possible with data converters too that the user didn't return an object\n if (!snapshot.exists() || typeof data !== 'object' || data === null || !options.idField) {\n return data;\n }\n return __assign(__assign({}, data), (_a = {}, _a[options.idField] = snapshot.id, _a));\n}\n\n/**\n * @license\n * Copyright 2023 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nvar ALL_EVENTS = ['added', 'modified', 'removed'];\n/**\n * Create an operator that determines if a the stream of document changes\n * are specified by the event filter. If the document change type is not\n * in specified events array, it will not be emitted.\n */\nvar filterEvents = function (events) {\n return filter(function (changes) {\n var hasChange = false;\n for (var i = 0; i < changes.length; i++) {\n var change = changes[i];\n if (events && events.indexOf(change.type) >= 0) {\n hasChange = true;\n break;\n }\n }\n return hasChange;\n });\n};\n/**\n * Splice arguments on top of a sliced array, to break top-level ===\n * this is useful for change-detection\n */\nfunction sliceAndSplice(original, start, deleteCount) {\n var args = [];\n for (var _i = 3; _i < arguments.length; _i++) {\n args[_i - 3] = arguments[_i];\n }\n var returnArray = original.slice();\n returnArray.splice.apply(returnArray, __spreadArray([start, deleteCount], args, false));\n return returnArray;\n}\n/**\n * Creates a new sorted array from a new change.\n * @param combined\n * @param change\n */\nfunction processIndividualChange(combined, change) {\n switch (change.type) {\n case 'added':\n if (combined[change.newIndex] && refEqual(combined[change.newIndex].doc.ref, change.doc.ref)) ;else {\n return sliceAndSplice(combined, change.newIndex, 0, change);\n }\n break;\n case 'modified':\n if (combined[change.oldIndex] == null || refEqual(combined[change.oldIndex].doc.ref, change.doc.ref)) {\n // When an item changes position we first remove it\n // and then add it's new position\n if (change.oldIndex !== change.newIndex) {\n var copiedArray = combined.slice();\n copiedArray.splice(change.oldIndex, 1);\n copiedArray.splice(change.newIndex, 0, change);\n return copiedArray;\n } else {\n return sliceAndSplice(combined, change.newIndex, 1, change);\n }\n }\n break;\n case 'removed':\n if (combined[change.oldIndex] && refEqual(combined[change.oldIndex].doc.ref, change.doc.ref)) {\n return sliceAndSplice(combined, change.oldIndex, 1);\n }\n break;\n }\n return combined;\n}\n/**\n * Combines the total result set from the current set of changes from an incoming set\n * of changes.\n * @param current\n * @param changes\n * @param events\n */\nfunction processDocumentChanges(current, changes, events) {\n if (events === void 0) {\n events = ALL_EVENTS;\n }\n changes.forEach(function (change) {\n // skip unwanted change types\n if (events.indexOf(change.type) > -1) {\n current = processIndividualChange(current, change);\n }\n });\n return current;\n}\n/**\n * Create an operator that allows you to compare the current emission with\n * the prior, even on first emission (where prior is undefined).\n */\nvar windowwise = function () {\n return pipe(startWith(undefined), pairwise());\n};\n/**\n * Given two snapshots does their metadata match?\n * @param a\n * @param b\n */\nvar metaDataEquals = function (a, b) {\n return JSON.stringify(a.metadata) === JSON.stringify(b.metadata);\n};\n/**\n * Create an operator that filters out empty changes. We provide the\n * ability to filter on events, which means all changes can be filtered out.\n * This creates an empty array and would be incorrect to emit.\n */\nvar filterEmptyUnlessFirst = function () {\n return pipe(windowwise(), filter(function (_a) {\n var prior = _a[0],\n current = _a[1];\n return current.length > 0 || prior === undefined;\n }), map(function (_a) {\n var current = _a[1];\n return current;\n }));\n};\n/**\n * Return a stream of document changes on a query. These results are not in sort order but in\n * order of occurence.\n * @param query\n */\nfunction collectionChanges(query, options) {\n if (options === void 0) {\n options = {};\n }\n return fromRef(query, {\n includeMetadataChanges: true\n }).pipe(windowwise(), map(function (_a) {\n var priorSnapshot = _a[0],\n currentSnapshot = _a[1];\n var docChanges = currentSnapshot.docChanges();\n if (priorSnapshot && !metaDataEquals(priorSnapshot, currentSnapshot)) {\n // the metadata has changed, docChanges() doesn't return metadata events, so let's\n // do it ourselves by scanning over all the docs and seeing if the metadata has changed\n // since either this docChanges() emission or the prior snapshot\n currentSnapshot.docs.forEach(function (currentDocSnapshot, currentIndex) {\n var currentDocChange = docChanges.find(function (c) {\n return refEqual(c.doc.ref, currentDocSnapshot.ref);\n });\n if (currentDocChange) {\n // if the doc is in the current changes and the metadata hasn't changed this doc\n if (metaDataEquals(currentDocChange.doc, currentDocSnapshot)) {\n return;\n }\n } else {\n // if there is a prior doc and the metadata hasn't changed skip this doc\n var priorDocSnapshot = priorSnapshot === null || priorSnapshot === void 0 ? void 0 : priorSnapshot.docs.find(function (d) {\n return refEqual(d.ref, currentDocSnapshot.ref);\n });\n if (priorDocSnapshot && metaDataEquals(priorDocSnapshot, currentDocSnapshot)) {\n return;\n }\n }\n docChanges.push({\n oldIndex: currentIndex,\n newIndex: currentIndex,\n type: 'modified',\n doc: currentDocSnapshot\n });\n });\n }\n return docChanges;\n }), filterEvents(options.events || ALL_EVENTS), filterEmptyUnlessFirst());\n}\n/**\n * Return a stream of document snapshots on a query. These results are in sort order.\n * @param query\n */\nfunction collection(query) {\n return fromRef(query, {\n includeMetadataChanges: true\n }).pipe(map(function (changes) {\n return changes.docs;\n }));\n}\n/**\n * Return a stream of document changes on a query. These results are in sort order.\n * @param query\n */\nfunction sortedChanges(query, options) {\n if (options === void 0) {\n options = {};\n }\n return collectionChanges(query, options).pipe(scan(function (current, changes) {\n return processDocumentChanges(current, changes, options.events);\n }, []), distinctUntilChanged());\n}\n/**\n * Create a stream of changes as they occur it time. This method is similar\n * to docChanges() but it collects each event in an array over time.\n */\nfunction auditTrail(query, options) {\n if (options === void 0) {\n options = {};\n }\n return collectionChanges(query, options).pipe(scan(function (current, action) {\n return __spreadArray(__spreadArray([], current, true), action, true);\n }, []));\n}\n/**\n * Returns a stream of documents mapped to their data payload, and optionally the document ID\n * @param query\n * @param options\n */\nfunction collectionData(query, options) {\n if (options === void 0) {\n options = {};\n }\n return collection(query).pipe(map(function (arr) {\n return arr.map(function (snap) {\n return snapToData(snap, options);\n });\n }));\n}\nfunction collectionCountSnap(query) {\n return from(getCountFromServer(query));\n}\nfunction collectionCount(query) {\n return collectionCountSnap(query).pipe(map(function (snap) {\n return snap.data().count;\n }));\n}\nexport { auditTrail, collection, collectionChanges, collectionCount, collectionCountSnap, collectionData, doc, docData, fromRef, snapToData, sortedChanges };\n","import { ɵgetAllInstancesOf as _getAllInstancesOf, ɵgetDefaultInstanceOf as _getDefaultInstanceOf, VERSION, ɵAngularFireSchedulers as _AngularFireSchedulers, ɵAppCheckInstances as _AppCheckInstances, ɵzoneWrap as _zoneWrap } from '@angular/fire';\nimport { timer, from } from 'rxjs';\nimport { concatMap, distinct } from 'rxjs/operators';\nimport * as i0 from '@angular/core';\nimport { InjectionToken, Optional, NgModule, makeEnvironmentProviders, NgZone, Injector } from '@angular/core';\nimport { FirebaseApp, FirebaseApps } from '@angular/fire/app';\nimport { AuthInstances } from '@angular/fire/auth';\nimport { registerVersion } from 'firebase/app';\nimport { collectionChanges as collectionChanges$1, collection as collection$1, sortedChanges as sortedChanges$1, auditTrail as auditTrail$1, collectionData as collectionData$1, collectionCountSnap as collectionCountSnap$1, collectionCount as collectionCount$1, doc as doc$1, docData as docData$1, snapToData as snapToData$1, fromRef as fromRef$1 } from 'rxfire/firestore';\nimport { addDoc as addDoc$1, aggregateFieldEqual as aggregateFieldEqual$1, aggregateQuerySnapshotEqual as aggregateQuerySnapshotEqual$1, and as and$1, arrayRemove as arrayRemove$1, arrayUnion as arrayUnion$1, average as average$1, clearIndexedDbPersistence as clearIndexedDbPersistence$1, collection as collection$2, collectionGroup as collectionGroup$1, connectFirestoreEmulator as connectFirestoreEmulator$1, count as count$1, deleteAllPersistentCacheIndexes as deleteAllPersistentCacheIndexes$1, deleteDoc as deleteDoc$1, deleteField as deleteField$1, disableNetwork as disableNetwork$1, disablePersistentCacheIndexAutoCreation as disablePersistentCacheIndexAutoCreation$1, doc as doc$2, documentId as documentId$1, enableIndexedDbPersistence as enableIndexedDbPersistence$1, enableMultiTabIndexedDbPersistence as enableMultiTabIndexedDbPersistence$1, enableNetwork as enableNetwork$1, enablePersistentCacheIndexAutoCreation as enablePersistentCacheIndexAutoCreation$1, endAt as endAt$1, endBefore as endBefore$1, getAggregateFromServer as getAggregateFromServer$1, getCountFromServer as getCountFromServer$1, getDoc as getDoc$1, getDocFromCache as getDocFromCache$1, getDocFromServer as getDocFromServer$1, getDocs as getDocs$1, getDocsFromCache as getDocsFromCache$1, getDocsFromServer as getDocsFromServer$1, getFirestore as getFirestore$1, getPersistentCacheIndexManager as getPersistentCacheIndexManager$1, increment as increment$1, initializeFirestore as initializeFirestore$1, limit as limit$1, limitToLast as limitToLast$1, loadBundle as loadBundle$1, memoryEagerGarbageCollector as memoryEagerGarbageCollector$1, memoryLocalCache as memoryLocalCache$1, memoryLruGarbageCollector as memoryLruGarbageCollector$1, namedQuery as namedQuery$1, onSnapshot as onSnapshot$1, onSnapshotsInSync as onSnapshotsInSync$1, or as or$1, orderBy as orderBy$1, persistentLocalCache as persistentLocalCache$1, persistentMultipleTabManager as persistentMultipleTabManager$1, persistentSingleTabManager as persistentSingleTabManager$1, query as query$1, queryEqual as queryEqual$1, refEqual as refEqual$1, runTransaction as runTransaction$1, serverTimestamp as serverTimestamp$1, setDoc as setDoc$1, setIndexConfiguration as setIndexConfiguration$1, setLogLevel as setLogLevel$1, snapshotEqual as snapshotEqual$1, startAfter as startAfter$1, startAt as startAt$1, sum as sum$1, terminate as terminate$1, updateDoc as updateDoc$1, waitForPendingWrites as waitForPendingWrites$1, where as where$1, writeBatch as writeBatch$1 } from 'firebase/firestore';\nexport * from 'firebase/firestore';\nclass Firestore {\n constructor(firestore) {\n return firestore;\n }\n}\nconst FIRESTORE_PROVIDER_NAME = 'firestore';\nclass FirestoreInstances {\n constructor() {\n return _getAllInstancesOf(FIRESTORE_PROVIDER_NAME);\n }\n}\nconst firestoreInstance$ = /*#__PURE__*/ /*#__PURE__*/timer(0, 300).pipe(/*#__PURE__*/concatMap(() => from(_getAllInstancesOf(FIRESTORE_PROVIDER_NAME))), /*#__PURE__*/distinct());\nconst PROVIDED_FIRESTORE_INSTANCES = /*#__PURE__*/new InjectionToken('angularfire2.firestore-instances');\nfunction defaultFirestoreInstanceFactory(provided, defaultApp) {\n const defaultFirestore = _getDefaultInstanceOf(FIRESTORE_PROVIDER_NAME, provided, defaultApp);\n return defaultFirestore && new Firestore(defaultFirestore);\n}\nfunction firestoreInstanceFactory(fn) {\n return (zone, injector) => {\n const firestore = zone.runOutsideAngular(() => fn(injector));\n return new Firestore(firestore);\n };\n}\nconst FIRESTORE_INSTANCES_PROVIDER = {\n provide: FirestoreInstances,\n deps: [[/*#__PURE__*/new Optional(), PROVIDED_FIRESTORE_INSTANCES]]\n};\nconst DEFAULT_FIRESTORE_INSTANCE_PROVIDER = {\n provide: Firestore,\n useFactory: defaultFirestoreInstanceFactory,\n deps: [[/*#__PURE__*/new Optional(), PROVIDED_FIRESTORE_INSTANCES], FirebaseApp]\n};\nlet FirestoreModule = /*#__PURE__*/(() => {\n class FirestoreModule {\n constructor() {\n registerVersion('angularfire', VERSION.full, 'fst');\n }\n static ɵfac = function FirestoreModule_Factory(__ngFactoryType__) {\n return new (__ngFactoryType__ || FirestoreModule)();\n };\n static ɵmod = /* @__PURE__ */i0.ɵɵdefineNgModule({\n type: FirestoreModule\n });\n static ɵinj = /* @__PURE__ */i0.ɵɵdefineInjector({\n providers: [DEFAULT_FIRESTORE_INSTANCE_PROVIDER, FIRESTORE_INSTANCES_PROVIDER]\n });\n }\n return FirestoreModule;\n})();\n/*#__PURE__*/(() => {\n (typeof ngDevMode === \"undefined\" || ngDevMode) && void 0;\n})();\nfunction provideFirestore(fn, ...deps) {\n registerVersion('angularfire', VERSION.full, 'fst');\n return makeEnvironmentProviders([DEFAULT_FIRESTORE_INSTANCE_PROVIDER, FIRESTORE_INSTANCES_PROVIDER, {\n provide: PROVIDED_FIRESTORE_INSTANCES,\n useFactory: firestoreInstanceFactory(fn),\n multi: true,\n deps: [NgZone, Injector, _AngularFireSchedulers, FirebaseApps,\n // Firestore+Auth work better if Auth is loaded first\n [new Optional(), AuthInstances], [new Optional(), _AppCheckInstances], ...deps]\n }]);\n}\n\n// DO NOT MODIFY, this file is autogenerated by tools/build.ts\nconst collectionChanges = /*#__PURE__*/_zoneWrap(collectionChanges$1, true);\nconst collectionSnapshots = /*#__PURE__*/_zoneWrap(collection$1, true);\nconst sortedChanges = /*#__PURE__*/_zoneWrap(sortedChanges$1, true);\nconst auditTrail = /*#__PURE__*/_zoneWrap(auditTrail$1, true);\nconst collectionData = /*#__PURE__*/_zoneWrap(collectionData$1, true);\nconst collectionCountSnap = /*#__PURE__*/_zoneWrap(collectionCountSnap$1, true);\nconst collectionCount = /*#__PURE__*/_zoneWrap(collectionCount$1, true);\nconst docSnapshots = /*#__PURE__*/_zoneWrap(doc$1, true);\nconst docData = /*#__PURE__*/_zoneWrap(docData$1, true);\nconst snapToData = /*#__PURE__*/_zoneWrap(snapToData$1, true);\nconst fromRef = /*#__PURE__*/_zoneWrap(fromRef$1, true);\n\n// DO NOT MODIFY, this file is autogenerated by tools/build.ts\nconst addDoc = /*#__PURE__*/_zoneWrap(addDoc$1, true);\nconst aggregateFieldEqual = /*#__PURE__*/_zoneWrap(aggregateFieldEqual$1, true);\nconst aggregateQuerySnapshotEqual = /*#__PURE__*/_zoneWrap(aggregateQuerySnapshotEqual$1, true);\nconst and = /*#__PURE__*/_zoneWrap(and$1, true);\nconst arrayRemove = /*#__PURE__*/_zoneWrap(arrayRemove$1, true);\nconst arrayUnion = /*#__PURE__*/_zoneWrap(arrayUnion$1, true);\nconst average = /*#__PURE__*/_zoneWrap(average$1, true);\nconst clearIndexedDbPersistence = /*#__PURE__*/_zoneWrap(clearIndexedDbPersistence$1, true);\nconst collection = /*#__PURE__*/_zoneWrap(collection$2, true);\nconst collectionGroup = /*#__PURE__*/_zoneWrap(collectionGroup$1, true);\nconst connectFirestoreEmulator = /*#__PURE__*/_zoneWrap(connectFirestoreEmulator$1, true);\nconst count = /*#__PURE__*/_zoneWrap(count$1, true);\nconst deleteAllPersistentCacheIndexes = /*#__PURE__*/_zoneWrap(deleteAllPersistentCacheIndexes$1, true);\nconst deleteDoc = /*#__PURE__*/_zoneWrap(deleteDoc$1, true);\nconst deleteField = /*#__PURE__*/_zoneWrap(deleteField$1, true);\nconst disableNetwork = /*#__PURE__*/_zoneWrap(disableNetwork$1, true);\nconst disablePersistentCacheIndexAutoCreation = /*#__PURE__*/_zoneWrap(disablePersistentCacheIndexAutoCreation$1, true);\nconst doc = /*#__PURE__*/_zoneWrap(doc$2, true);\nconst documentId = /*#__PURE__*/_zoneWrap(documentId$1, true);\nconst enableIndexedDbPersistence = /*#__PURE__*/_zoneWrap(enableIndexedDbPersistence$1, true);\nconst enableMultiTabIndexedDbPersistence = /*#__PURE__*/_zoneWrap(enableMultiTabIndexedDbPersistence$1, true);\nconst enableNetwork = /*#__PURE__*/_zoneWrap(enableNetwork$1, true);\nconst enablePersistentCacheIndexAutoCreation = /*#__PURE__*/_zoneWrap(enablePersistentCacheIndexAutoCreation$1, true);\nconst endAt = /*#__PURE__*/_zoneWrap(endAt$1, true);\nconst endBefore = /*#__PURE__*/_zoneWrap(endBefore$1, true);\nconst getAggregateFromServer = /*#__PURE__*/_zoneWrap(getAggregateFromServer$1, true);\nconst getCountFromServer = /*#__PURE__*/_zoneWrap(getCountFromServer$1, true);\nconst getDoc = /*#__PURE__*/_zoneWrap(getDoc$1, true);\nconst getDocFromCache = /*#__PURE__*/_zoneWrap(getDocFromCache$1, true);\nconst getDocFromServer = /*#__PURE__*/_zoneWrap(getDocFromServer$1, true);\nconst getDocs = /*#__PURE__*/_zoneWrap(getDocs$1, true);\nconst getDocsFromCache = /*#__PURE__*/_zoneWrap(getDocsFromCache$1, true);\nconst getDocsFromServer = /*#__PURE__*/_zoneWrap(getDocsFromServer$1, true);\nconst getFirestore = /*#__PURE__*/_zoneWrap(getFirestore$1, true);\nconst getPersistentCacheIndexManager = /*#__PURE__*/_zoneWrap(getPersistentCacheIndexManager$1, true);\nconst increment = /*#__PURE__*/_zoneWrap(increment$1, true);\nconst initializeFirestore = /*#__PURE__*/_zoneWrap(initializeFirestore$1, true);\nconst limit = /*#__PURE__*/_zoneWrap(limit$1, true);\nconst limitToLast = /*#__PURE__*/_zoneWrap(limitToLast$1, true);\nconst loadBundle = /*#__PURE__*/_zoneWrap(loadBundle$1, true);\nconst memoryEagerGarbageCollector = /*#__PURE__*/_zoneWrap(memoryEagerGarbageCollector$1, true);\nconst memoryLocalCache = /*#__PURE__*/_zoneWrap(memoryLocalCache$1, true);\nconst memoryLruGarbageCollector = /*#__PURE__*/_zoneWrap(memoryLruGarbageCollector$1, true);\nconst namedQuery = /*#__PURE__*/_zoneWrap(namedQuery$1, true);\nconst onSnapshot = /*#__PURE__*/_zoneWrap(onSnapshot$1, true);\nconst onSnapshotsInSync = /*#__PURE__*/_zoneWrap(onSnapshotsInSync$1, true);\nconst or = /*#__PURE__*/_zoneWrap(or$1, true);\nconst orderBy = /*#__PURE__*/_zoneWrap(orderBy$1, true);\nconst persistentLocalCache = /*#__PURE__*/_zoneWrap(persistentLocalCache$1, true);\nconst persistentMultipleTabManager = /*#__PURE__*/_zoneWrap(persistentMultipleTabManager$1, true);\nconst persistentSingleTabManager = /*#__PURE__*/_zoneWrap(persistentSingleTabManager$1, true);\nconst query = /*#__PURE__*/_zoneWrap(query$1, true);\nconst queryEqual = /*#__PURE__*/_zoneWrap(queryEqual$1, true);\nconst refEqual = /*#__PURE__*/_zoneWrap(refEqual$1, true);\nconst runTransaction = /*#__PURE__*/_zoneWrap(runTransaction$1, true);\nconst serverTimestamp = /*#__PURE__*/_zoneWrap(serverTimestamp$1, true);\nconst setDoc = /*#__PURE__*/_zoneWrap(setDoc$1, true);\nconst setIndexConfiguration = /*#__PURE__*/_zoneWrap(setIndexConfiguration$1, true);\nconst setLogLevel = /*#__PURE__*/_zoneWrap(setLogLevel$1, true);\nconst snapshotEqual = /*#__PURE__*/_zoneWrap(snapshotEqual$1, true);\nconst startAfter = /*#__PURE__*/_zoneWrap(startAfter$1, true);\nconst startAt = /*#__PURE__*/_zoneWrap(startAt$1, true);\nconst sum = /*#__PURE__*/_zoneWrap(sum$1, true);\nconst terminate = /*#__PURE__*/_zoneWrap(terminate$1, true);\nconst updateDoc = /*#__PURE__*/_zoneWrap(updateDoc$1, true);\nconst waitForPendingWrites = /*#__PURE__*/_zoneWrap(waitForPendingWrites$1, true);\nconst where = /*#__PURE__*/_zoneWrap(where$1, true);\nconst writeBatch = /*#__PURE__*/_zoneWrap(writeBatch$1, true);\n\n/**\n * Generated bundle index. Do not edit.\n */\n\nexport { Firestore, FirestoreInstances, FirestoreModule, addDoc, aggregateFieldEqual, aggregateQuerySnapshotEqual, and, arrayRemove, arrayUnion, auditTrail, average, clearIndexedDbPersistence, collection, collectionChanges, collectionCount, collectionCountSnap, collectionData, collectionGroup, collectionSnapshots, connectFirestoreEmulator, count, deleteAllPersistentCacheIndexes, deleteDoc, deleteField, disableNetwork, disablePersistentCacheIndexAutoCreation, doc, docData, docSnapshots, documentId, enableIndexedDbPersistence, enableMultiTabIndexedDbPersistence, enableNetwork, enablePersistentCacheIndexAutoCreation, endAt, endBefore, firestoreInstance$, fromRef, getAggregateFromServer, getCountFromServer, getDoc, getDocFromCache, getDocFromServer, getDocs, getDocsFromCache, getDocsFromServer, getFirestore, getPersistentCacheIndexManager, increment, initializeFirestore, limit, limitToLast, loadBundle, memoryEagerGarbageCollector, memoryLocalCache, memoryLruGarbageCollector, namedQuery, onSnapshot, onSnapshotsInSync, or, orderBy, persistentLocalCache, persistentMultipleTabManager, persistentSingleTabManager, provideFirestore, query, queryEqual, refEqual, runTransaction, serverTimestamp, setDoc, setIndexConfiguration, setLogLevel, snapToData, snapshotEqual, sortedChanges, startAfter, startAt, sum, terminate, updateDoc, waitForPendingWrites, where, writeBatch };\n","import { Injectable } from '@angular/core';\nimport { Subject } from 'rxjs';\n\n@Injectable({ providedIn: 'root' })\nexport class AtlasMenuService {\n private closeMenuEventSource = new Subject();\n\n closeMenuEvent$ = this.closeMenuEventSource.asObservable();\n\n closeMenu(id: string): void {\n this.closeMenuEventSource.next(id);\n }\n}\n","import { Component, Input } from '@angular/core';\nimport { AtlasMenuService } from '../menu/menu.service';\n\n@Component({\n selector: 'atlas-menu-item',\n templateUrl: './menu-item.component.html',\n styleUrls: ['./menu-item.component.scss', '../common.scss'],\n standalone: false,\n})\nexport class MenuItemComponent {\n @Input() url: string;\n @Input() route: string;\n @Input() label: string;\n @Input() icon: string;\n @Input() closeOnClick = true;\n @Input() disabled: boolean;\n public parentMenuId: string;\n\n constructor(private menuService: AtlasMenuService) {}\n\n clicked(): void {\n if (this.closeOnClick) {\n this.menuService.closeMenu(this.parentMenuId);\n }\n }\n\n // This is a temporary solution for allowing the Academy/Community Conquer Local links to be opened in a new tab when click.\n isConquerLocalLink(url: string): boolean {\n const regEx = new RegExp('(academy|community).conquerlocal.com');\n return regEx.test(url);\n }\n}\n","\n","import {\n AfterContentChecked,\n AfterViewInit,\n Component,\n ContentChildren,\n effect,\n ElementRef,\n EventEmitter,\n Injector,\n Input,\n OnDestroy,\n OnInit,\n Output,\n signal,\n ViewChild,\n} from '@angular/core';\nimport { fromEvent, Subject } from 'rxjs';\nimport { filter, takeUntil } from 'rxjs/operators';\nimport { MenuItemComponent } from '../menu-item/menu-item.component';\n\nimport { AtlasMenuService } from './menu.service';\n\nclass Guid {\n static newGuid(): string {\n return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c: string) => {\n // tslint:disable-next-line:no-bitwise\n const r = (Math.random() * 16) | 0,\n // tslint:disable-next-line: no-bitwise\n v = c === 'x' ? r : (r & 0x3) | 0x8;\n return v.toString(16);\n });\n }\n}\n\n@Component({\n selector: 'atlas-menu',\n templateUrl: './menu.component.html',\n styleUrls: ['./menu.component.scss', '../common.scss'],\n standalone: false,\n})\nexport class MenuComponent implements OnInit, AfterViewInit, OnDestroy, AfterContentChecked {\n @ViewChild('menu', { static: true }) menu: ElementRef;\n @ContentChildren(MenuItemComponent) menuItems: MenuItemComponent[];\n @Input() relative: boolean;\n @Input() anchorRight: boolean;\n @Input() anchorBottom: boolean;\n\n // eslint-disable-next-line @angular-eslint/no-output-on-prefix\n @Output() onClose: EventEmitter = new EventEmitter();\n // eslint-disable-next-line @angular-eslint/no-output-on-prefix\n @Output() onOpen: EventEmitter = new EventEmitter();\n private anchor: HTMLElement;\n\n public id: string;\n\n _destroyOpenSubscriptions$$: Subject = new Subject();\n _destroyed$$: Subject = new Subject();\n isOpen = signal(false);\n\n constructor(\n private elRef: ElementRef,\n private menuService: AtlasMenuService,\n private injector: Injector,\n ) {\n this.id = Guid.newGuid();\n }\n\n ngOnInit(): void {\n if (this.relative) {\n this.elRef.nativeElement.parentElement.style.setProperty('position', 'relative');\n this.elRef.nativeElement.style.setProperty('position', 'absolute');\n this.elRef.nativeElement.style.setProperty('left', '0');\n this.elRef.nativeElement.style.setProperty('bottom', '0');\n this.elRef.nativeElement.style.setProperty('min-width', '100%');\n this.menu.nativeElement.style.setProperty('min-width', '100%');\n }\n }\n\n ngAfterContentChecked(): void {\n this.menuItems.map((tab: MenuItemComponent) => {\n tab.parentMenuId = this.id;\n });\n }\n\n ngAfterViewInit(): void {\n this.anchor = this.elRef.nativeElement.parentElement;\n\n effect(\n () => {\n if (this.isOpen()) {\n this.elRef.nativeElement.style.setProperty('display', 'flex');\n fromEvent(document, 'click')\n .pipe(takeUntil(this._destroyOpenSubscriptions$$))\n .subscribe((event: MouseEvent) => {\n if (\n !(\n this.pointInBounds(event.clientX, event.clientY, this.menuRect) ||\n this.pointInBounds(event.clientX, event.clientY, this.anchorRect)\n )\n ) {\n this.isOpen.set(false);\n }\n });\n\n fromEvent(window, 'resize')\n .pipe(takeUntil(this._destroyOpenSubscriptions$$))\n .subscribe(() => {\n this.repositionMenu();\n });\n\n this.menuService.closeMenuEvent$.pipe(takeUntil(this._destroyOpenSubscriptions$$)).subscribe((id: string) => {\n if (this.isOpen() && (!id || id === this.id)) {\n this.isOpen.set(false);\n }\n });\n this.onOpen.emit();\n } else {\n this._destroyOpenSubscriptions$$.next();\n this.onClose.emit();\n }\n },\n {\n injector: this.injector,\n },\n );\n\n fromEvent(this.anchor, 'click')\n .pipe(\n takeUntil(this._destroyed$$),\n filter((event: MouseEvent) => this.pointInBounds(event.clientX, event.clientY, this.anchorRect)),\n )\n .subscribe(() => {\n this.toggleMenu();\n });\n }\n\n private get anchorRect(): DOMRect {\n return this.anchor.getBoundingClientRect() as DOMRect;\n }\n\n private get menuRect(): DOMRect {\n return this.menu.nativeElement.getBoundingClientRect() as DOMRect;\n }\n\n private pointInBounds(pointX: number, pointY: number, rect: DOMRect): boolean {\n return pointX >= rect.left && pointX <= rect.right && pointY >= rect.top && pointY <= rect.bottom;\n }\n\n private toggleMenu(): void {\n this.isOpen.set(!this.isOpen());\n this.repositionMenu();\n }\n\n private repositionMenu(): void {\n if (this.relative) {\n // tslint:disable-next-line: no-shadowed-variable\n let menuStyles = ``;\n if (this.anchorRight) {\n menuStyles += `right: 30px;`;\n }\n if (this.anchorBottom) {\n menuStyles += ` bottom: 0;`;\n } else {\n menuStyles += ` top: -30px;`;\n }\n this.menu.nativeElement.setAttribute('style', menuStyles);\n return;\n }\n if (!this.isOpen()) {\n return;\n }\n let menuStyles = `top: ${this.anchorRect.bottom}px;`;\n if (window.innerWidth <= 600) {\n menuStyles += ` left: 0; width: 100%;`;\n this.menu.nativeElement.setAttribute('style', menuStyles);\n return;\n }\n if (window.innerWidth / 2 < this.anchorRect.left) {\n menuStyles += ` left: ${this.anchorRect.right}px;`;\n menuStyles += ` transform: translate(-100%, 0%);`;\n } else {\n menuStyles += ` left: ${this.anchorRect.left}px;`;\n }\n menuStyles += ` min-width: ${this.anchorRect.width}px`;\n this.menu.nativeElement.setAttribute('style', menuStyles);\n }\n\n ngOnDestroy(): void {\n this._destroyed$$.next();\n this._destroyed$$.complete();\n }\n}\n","\n \n
\n","import { Component } from '@angular/core';\n\n@Component({\n selector: 'atlas-chip',\n templateUrl: './chip.component.html',\n styleUrls: ['./chip.component.scss'],\n standalone: false,\n})\nexport class ChipComponent {}\n"," \n","import { Component, Input } from '@angular/core';\n\n@Component({\n selector: 'atlas-item',\n templateUrl: './item.component.html',\n styleUrls: ['./item.component.scss', '../common.scss'],\n standalone: false,\n})\nexport class ItemComponent {\n isInteractable: boolean;\n @Input() icon: string;\n @Input() set interactable(val: string) {\n this.isInteractable = val !== 'false';\n }\n @Input() badgeContent: string | number | null;\n @Input() iconText: string;\n @Input() customClass = '';\n}\n","\n
\n
{{ icon }} \n
{{ iconText }}
\n
\n {{ badgeContent }}\n \n
\n
\n
\n","import { Component, input, Input, OnChanges, OnInit } from '@angular/core';\nimport { CenterNavigationItem } from '@vendasta/atlas';\n\n@Component({\n selector: 'atlas-center-selector',\n templateUrl: './center-selector.component.html',\n styleUrls: ['./center-selector.component.scss'],\n standalone: false,\n})\nexport class CenterSelectorComponent implements OnInit, OnChanges {\n @Input() centerData: CenterNavigationItem[] = [];\n @Input() serviceProviderId: string;\n readonly showNewMenu = input(false);\n\n showSelector: boolean;\n\n ngOnInit(): void {\n this.calculateShowSelector();\n }\n\n ngOnChanges(): void {\n this.calculateShowSelector();\n }\n\n private calculateShowSelector(): void {\n if (!this.centerData) {\n return;\n }\n this.showSelector =\n this.centerData.filter((item: CenterNavigationItem) => {\n return item.centerId !== this.serviceProviderId;\n }).length > 0;\n }\n}\n","@if (showSelector) {\n @if (showNewMenu()) {\n \n apps \n \n \n @for (navItem of centerData; track $index) {\n @if (navItem.centerId !== serviceProviderId) {\n \n {{ navItem.name }}\n \n }\n }\n \n } @else {\n \n \n \n \n \n }\n}\n","import { Component, HostBinding } from '@angular/core';\nimport { BehaviorSubject } from 'rxjs';\n\n@Component({\n selector: 'atlas-panel-content',\n templateUrl: './panel-content.component.html',\n styleUrls: ['./panel-content.component.scss'],\n standalone: false,\n})\nexport class PanelContentComponent {\n set collapsed(collapsed: boolean) {\n this.collapsed$$.next(collapsed);\n }\n\n private collapsed$$ = new BehaviorSubject(null);\n\n @HostBinding('class.atlas-panel-content__hidden')\n get isHidden(): boolean {\n return this.collapsed$$.getValue();\n }\n}\n"," \n","import { Directive, ElementRef, HostListener, Renderer2 } from '@angular/core';\n\n@Directive({\n selector: '[atlasIconButton]',\n standalone: false,\n})\nexport class IconButtonDirective {\n constructor(\n private el: ElementRef,\n private renderer: Renderer2,\n ) {\n renderer.setStyle(el.nativeElement, 'padding', '0');\n renderer.setStyle(el.nativeElement, 'min-width', '0');\n renderer.setStyle(el.nativeElement, 'width', '40px');\n renderer.setStyle(el.nativeElement, 'height', '40px');\n renderer.setStyle(el.nativeElement, 'flex-shrink', '0');\n renderer.setStyle(el.nativeElement, 'line-height', '40px');\n renderer.setStyle(el.nativeElement, 'border-radius', '50%');\n renderer.setStyle(el.nativeElement, 'box-sizing', 'border-box');\n renderer.setStyle(el.nativeElement, 'position', 'relative');\n renderer.setStyle(el.nativeElement, 'user-select', 'none');\n renderer.setStyle(el.nativeElement, 'cursor', 'pointer');\n renderer.setStyle(el.nativeElement, 'outline', '0');\n renderer.setStyle(el.nativeElement, 'border', 'none');\n renderer.setStyle(el.nativeElement, 'display', 'flex');\n renderer.setStyle(el.nativeElement, 'justify-content', 'center');\n renderer.setStyle(el.nativeElement, 'align-items', 'center');\n renderer.setStyle(el.nativeElement, 'white-space', 'nowrap');\n renderer.setStyle(el.nativeElement, 'text-decoration', 'none');\n renderer.setStyle(el.nativeElement, 'vertical-align', 'baseline');\n renderer.setStyle(el.nativeElement, 'text-align', 'center');\n renderer.setStyle(el.nativeElement, 'margin', '0');\n renderer.setStyle(el.nativeElement, 'overflow', 'visible');\n renderer.setStyle(el.nativeElement, 'color', 'inherit');\n renderer.setStyle(el.nativeElement, 'background', 'transparent');\n renderer.setStyle(\n el.nativeElement,\n 'transition',\n 'background-color 0.2s cubic-bezier(0.4, 0, 1, 1), color 0.15s ease-in-out',\n );\n }\n\n @HostListener('mouseover')\n @HostListener('mouseup')\n onMouseOver(): void {\n this.renderer.setStyle(this.el.nativeElement, 'background', 'var(--themingPrimaryHoverColor)');\n }\n\n @HostListener('mouseout')\n onMouseOut(): void {\n this.renderer.setStyle(this.el.nativeElement, 'background', 'transparent');\n }\n\n @HostListener('mousedown')\n onClick(): void {\n this.renderer.setStyle(this.el.nativeElement, 'background', 'var(--themingPrimaryActiveColor)');\n }\n}\n","import { Component, EventEmitter } from '@angular/core';\n\n@Component({\n selector: 'atlas-panel-header',\n templateUrl: './panel-header.component.html',\n styleUrls: ['./panel-header.component.scss'],\n standalone: false,\n})\nexport class PanelHeaderComponent {\n public collapsible: boolean;\n public collapsed: boolean;\n public collapsedChange: EventEmitter = new EventEmitter();\n\n collapseClicked(event: Event): void {\n event.stopPropagation();\n this.collapsed = !this.collapsed;\n this.collapsedChange.emit(this.collapsed);\n }\n}\n"," \n\n","import {\n AfterContentInit,\n Component,\n ContentChild,\n HostBinding,\n Input,\n OnChanges,\n OnDestroy,\n SimpleChanges,\n} from '@angular/core';\nimport { BehaviorSubject, Observable, Subject } from 'rxjs';\nimport { distinctUntilChanged, takeUntil } from 'rxjs/operators';\nimport { PanelContentComponent } from './panel-content.component';\nimport { PanelHeaderComponent } from './panel-header.component';\n\n@Component({\n selector: 'atlas-panel',\n templateUrl: './panel.component.html',\n styleUrls: ['./panel.component.scss'],\n standalone: false,\n})\nexport class PanelComponent implements AfterContentInit, OnChanges, OnDestroy {\n @ContentChild(PanelHeaderComponent, { static: true }) header: PanelHeaderComponent;\n @ContentChild(PanelContentComponent, { static: true }) content: PanelContentComponent;\n\n private collapsed$$ = new BehaviorSubject(null);\n\n // Publicly observable state\n public collapsed$: Observable = this.collapsed$$;\n\n @Input() collapsible = false;\n get collapsed(): boolean {\n return this.collapsed$$.getValue();\n }\n\n @Input() set collapsed(collapsed: boolean) {\n this.collapsed$$.next(collapsed);\n }\n\n @HostBinding('class.atlas-panel__collapsed')\n get collapsedClass(): boolean {\n return this.collapsed$$.getValue();\n }\n\n private _destroyed$$: Subject = new Subject();\n\n updateChildrenCollapsed(collapsed: boolean): void {\n if (this.header) {\n this.header.collapsed = collapsed;\n }\n\n if (this.content) {\n this.content.collapsed = collapsed;\n }\n }\n\n ngAfterContentInit(): void {\n // Update header and content on collapse change\n this.collapsed$$.pipe(distinctUntilChanged(), takeUntil(this._destroyed$$)).subscribe((isCollapsed: boolean) => {\n this.updateChildrenCollapsed(isCollapsed);\n });\n\n this.updateChildrenCollapsed(this.collapsed);\n if (this.header) {\n this.header.collapsible = this.collapsible;\n // Allow header to update collapsed status\n this.header.collapsedChange\n .pipe(takeUntil(this._destroyed$$))\n .subscribe((isCollapsed) => this.collapsed$$.next(isCollapsed));\n }\n }\n\n ngOnChanges(changes: SimpleChanges): void {\n if (changes.collapsible) {\n if (this.header) {\n this.header.collapsible = this.collapsible;\n }\n }\n }\n\n ngOnDestroy(): void {\n this._destroyed$$.next();\n this._destroyed$$.complete();\n }\n}\n"," \n","import { Component, Input } from '@angular/core';\n\n@Component({\n selector: 'atlas-loader',\n templateUrl: './loader.component.html',\n styleUrls: ['./loader.component.scss'],\n standalone: false,\n})\nexport class LoaderComponent {\n @Input() size: number;\n @Input() thickness = 3;\n}\n","\n","import { Component, EventEmitter, Input, Output } from '@angular/core';\n\n@Component({\n selector: 'atlas-button',\n templateUrl: './button.component.html',\n styleUrls: ['./button.component.scss'],\n standalone: false,\n})\nexport class ButtonComponent {\n @Input() loading: boolean;\n @Input() disabled: boolean;\n @Input() raised: boolean;\n @Input() color: 'primary' | 'secondary' = 'secondary';\n\n // eslint-disable-next-line @angular-eslint/no-output-on-prefix\n @Output() onClick: EventEmitter = new EventEmitter();\n\n onButtonClick(event: Event): void {\n this.onClick.emit();\n event.stopPropagation();\n event.stopImmediatePropagation();\n }\n}\n","\n","import { Component, Input } from '@angular/core';\nimport { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';\n\n@Component({\n selector: 'atlas-checkbox',\n templateUrl: './checkbox.component.html',\n styleUrls: ['./checkbox.component.scss'],\n providers: [{ provide: NG_VALUE_ACCESSOR, useExisting: CheckboxComponent, multi: true }],\n standalone: false,\n})\nexport class CheckboxComponent implements ControlValueAccessor {\n @Input() disabled: boolean;\n @Input() color: 'default' | 'primary' | 'secondary' = 'default';\n private innerValue: boolean;\n\n private changed = new Array<(value: boolean) => void>();\n private touched = new Array<() => void>();\n\n get value(): boolean {\n return this.innerValue;\n }\n\n set value(value: boolean) {\n if (this.innerValue !== value) {\n this.innerValue = value;\n this.changed.forEach((f) => f(value));\n }\n }\n\n touch(): void {\n this.touched.forEach((f) => f());\n }\n\n writeValue(value: boolean): void {\n this.innerValue = value;\n }\n\n registerOnChange(fn: (value: boolean) => void): void {\n this.changed.push(fn);\n }\n\n registerOnTouched(fn: () => void): void {\n this.touched.push(fn);\n }\n}\n","\n \n \n \n \n \n","import { Component } from '@angular/core';\n\n@Component({\n selector: 'atlas-ink-bar',\n templateUrl: './ink-bar.component.html',\n styleUrls: ['./ink-bar.component.scss'],\n standalone: false,\n})\nexport class InkBarComponent {}\n","import { Component } from '@angular/core';\n\n@Component({\n selector: 'atlas-modal-content',\n templateUrl: './modal-content.component.html',\n styleUrls: ['./modal-content.component.scss'],\n standalone: false,\n})\nexport class ModalContentComponent {}\n"," \n","import { Component } from '@angular/core';\n\n@Component({\n selector: 'atlas-modal-footer',\n templateUrl: './modal-footer.component.html',\n styleUrls: ['./modal-footer.component.scss'],\n standalone: false,\n})\nexport class ModalFooterComponent {}\n"," \n","import { Component, inject } from '@angular/core';\nimport { MatDialogRef } from '@angular/material/dialog';\n\n@Component({\n selector: 'atlas-modal-header',\n templateUrl: './modal-header.component.html',\n styleUrls: ['./modal-header.component.scss'],\n standalone: false,\n})\nexport class ModalHeaderComponent {\n dialog = inject(MatDialogRef);\n\n close(): void {\n this.dialog.close();\n }\n}\n"," \n\n close \n \n","import { DialogRef } from '@angular/cdk/dialog';\nimport { BreakpointObserver } from '@angular/cdk/layout';\nimport { Component, inject, OnInit } from '@angular/core';\nimport { takeUntil } from 'rxjs/operators';\n\n@Component({\n selector: 'atlas-modal',\n templateUrl: './modal.component.html',\n styleUrls: ['./modal.component.scss'],\n standalone: false,\n})\nexport class ModalComponent implements OnInit {\n dialogRef = inject(DialogRef);\n observer = inject(BreakpointObserver);\n\n ngOnInit() {\n this.observer\n .observe('(max-width: 768px)')\n .pipe(takeUntil(this.dialogRef.closed))\n .subscribe((result) => {\n if (result.matches) {\n this.dialogRef.updateSize('100vw', '100vh');\n const surface = this.dialogRef.overlayRef.overlayElement.querySelector('.mdc-dialog__surface');\n surface.style.borderRadius = '0';\n }\n });\n }\n}\n"," \n","import { Component } from '@angular/core';\n\n@Component({\n selector: 'atlas-panel-description',\n templateUrl: './panel-description.component.html',\n styleUrls: ['./panel-description.component.scss'],\n standalone: false,\n})\nexport class PanelDescriptionComponent {}\n"," \n","import { Component } from '@angular/core';\n\n@Component({\n selector: 'atlas-panel-title',\n templateUrl: './panel-title.component.html',\n styleUrls: ['./panel-title.component.scss'],\n standalone: false,\n})\nexport class PanelTitleComponent {}\n"," \n","import { Injectable } from '@angular/core';\nimport { BehaviorSubject, Observable } from 'rxjs';\nimport { map, shareReplay } from 'rxjs/operators';\n\nexport interface SnackbarOptions {\n duration: number;\n}\n\nexport interface SnackbarConfig {\n text: string;\n actionText: string;\n}\n\n@Injectable({ providedIn: 'root' })\nexport class SnackbarService {\n private visible$$: BehaviorSubject = new BehaviorSubject(false);\n public visible$: Observable = this.visible$$.asObservable();\n private config$$: BehaviorSubject = new BehaviorSubject({ text: '', actionText: '' });\n private config$: Observable = this.config$$.asObservable();\n public text$: Observable;\n public actionText$: Observable;\n\n private timeoutHandle: number;\n\n constructor() {\n this.text$ = this.config$.pipe(\n map((config: SnackbarConfig) => config.text),\n shareReplay({ bufferSize: 1, refCount: true }),\n );\n this.actionText$ = this.config$.pipe(\n map((config: SnackbarConfig) => config.actionText),\n shareReplay({ bufferSize: 1, refCount: true }),\n );\n }\n\n show(text: string, actionText: string, config?: SnackbarOptions): void {\n if (this.visible$$.getValue()) {\n return;\n }\n this.config$$.next({ text: text, actionText: actionText });\n this.visible$$.next(true);\n this.timeoutHandle = window.setTimeout(this.close.bind(this), config && config.duration ? config.duration : 2000);\n }\n\n close(): void {\n if (!this.visible$$.getValue()) {\n return;\n }\n window.clearTimeout(this.timeoutHandle);\n this.visible$$.next(false);\n }\n}\n","import { Component, Input } from '@angular/core';\n\n@Component({\n selector: 'atlas-tab',\n templateUrl: './tab.component.html',\n styleUrls: ['./tab.component.scss'],\n standalone: false,\n})\nexport class TabComponent {\n @Input() label: string;\n}\n"," \n","import {\n AfterViewInit,\n Component,\n ElementRef,\n EventEmitter,\n Input,\n OnChanges,\n OnDestroy,\n Output,\n Renderer2,\n ViewChild,\n} from '@angular/core';\nimport { ReplaySubject, Subject } from 'rxjs';\nimport { filter, takeUntil } from 'rxjs/operators';\nimport { InkBarComponent } from '../ink-bar/ink-bar.component';\n\n@Component({\n selector: 'atlas-tab-header',\n templateUrl: './tab-header.component.html',\n styleUrls: ['./tab-header.component.scss'],\n standalone: false,\n})\nexport class TabHeaderComponent implements OnChanges, OnDestroy, AfterViewInit {\n @ViewChild(InkBarComponent, { read: ElementRef }) _inkBar: ElementRef;\n @Input() labels: string[];\n @Output() selected: EventEmitter = new EventEmitter();\n public activeLabel$$: ReplaySubject = new ReplaySubject(1);\n private _destroyed$$: Subject = new Subject();\n\n constructor(private renderer: Renderer2) {}\n\n ngAfterViewInit(): void {\n this.activeLabel$$\n .pipe(\n takeUntil(this._destroyed$$),\n filter((label: string) => !!label),\n )\n .subscribe((label: string) => {\n this.selected.emit(label);\n if (this._inkBar) {\n const activeTab: HTMLElement = document.getElementById(`atlas-navbar__tab__${label}`);\n this.renderer.setStyle(this._inkBar.nativeElement, 'visibility', 'visible');\n this.renderer.setStyle(this._inkBar.nativeElement, 'width', activeTab.clientWidth + 'px');\n this.renderer.setStyle(this._inkBar.nativeElement, 'left', activeTab.offsetLeft + 'px');\n }\n });\n }\n\n ngOnChanges(): void {\n if (this.labels && this.labels.length > 0) {\n this.activeLabel$$.next(this.labels[0]);\n }\n }\n\n setActive(label: string): void {\n this.activeLabel$$.next(label);\n }\n\n ngOnDestroy(): void {\n this._destroyed$$.next();\n this._destroyed$$.complete();\n }\n}\n","\n","import {\n AfterContentInit,\n Component,\n ContentChildren,\n ElementRef,\n EventEmitter,\n OnDestroy,\n Output,\n QueryList,\n Renderer2,\n} from '@angular/core';\nimport { Subject } from 'rxjs';\nimport { takeUntil } from 'rxjs/operators';\nimport { TabComponent } from './tab.component';\n\n@Component({\n selector: 'atlas-tab-group',\n templateUrl: './tab-group.component.html',\n styleUrls: ['./tab-group.component.scss'],\n standalone: false,\n})\nexport class TabGroupComponent implements AfterContentInit, OnDestroy {\n @Output() selectionChange: EventEmitter = new EventEmitter();\n @ContentChildren(TabComponent, { read: ElementRef }) _tabs: QueryList;\n public tabLabels: string[] = [];\n private _destroyed$$: Subject = new Subject();\n\n constructor(private renderer: Renderer2) {}\n\n ngAfterContentInit(): void {\n this.tabLabels = this._tabs.map((tab: ElementRef) => tab.nativeElement.attributes.getNamedItem('label').value);\n this._tabs.changes.pipe(takeUntil(this._destroyed$$)).subscribe((tabs: ElementRef[]) => {\n this.tabLabels = tabs.map((tab: ElementRef) => tab.nativeElement.attributes.getNamedItem('label').value);\n });\n }\n\n setSelectedTab(selectedTab: string): void {\n this._tabs.forEach((tab: ElementRef) => {\n this.renderer.setStyle(\n tab.nativeElement,\n 'display',\n tab.nativeElement.attributes.getNamedItem('label').value === selectedTab ? 'inherit' : 'none',\n );\n });\n this.selectionChange.emit(selectedTab);\n }\n\n ngOnDestroy(): void {\n this._destroyed$$.next();\n this._destroyed$$.complete();\n }\n}\n"," \n \n","/**\n * @license\n * Copyright Google LLC All Rights Reserved.\n *\n * Use of this source code is governed by an MIT-style license that can be\n * found in the LICENSE file at https://angular.io/license\n */\n/**\n * Forked code, allows for usage within our Angular elements without\n * dependency on @angular/material\n */\nimport { animate, AnimationTriggerMetadata, keyframes, state, style, transition, trigger } from '@angular/animations';\n\n/**\n * Animations used by MatTooltipDirective.\n * @docs-private\n */\nexport const matTooltipAnimations: {\n readonly tooltipState: AnimationTriggerMetadata;\n} = {\n /** Animation that transitions a tooltip in and out. */\n tooltipState: trigger('state', [\n state('initial, void, hidden', style({ opacity: 0, transform: 'scale(0)' })),\n state('visible', style({ transform: 'scale(1)' })),\n transition(\n '* => visible',\n animate(\n '200ms cubic-bezier(0, 0, 0.2, 1)',\n keyframes([\n style({ opacity: 0, transform: 'scale(0)', offset: 0 }),\n style({ opacity: 0.5, transform: 'scale(0.99)', offset: 0.5 }),\n style({ opacity: 1, transform: 'scale(1)', offset: 1 }),\n ]),\n ),\n ),\n transition('* => hidden', animate('100ms cubic-bezier(0, 0, 0.2, 1)', style({ opacity: 0 }))),\n ]),\n};\n","/**\n * @license\n * Copyright Google LLC All Rights Reserved.\n *\n * Use of this source code is governed by an MIT-style license that can be\n * found in the LICENSE file at https://angular.io/license\n */\n/**\n * Forked code, allows for usage within our Angular elements without\n * dependency on @angular/material\n */\nimport { AnimationEvent } from '@angular/animations';\nimport { AriaDescriber, FocusMonitor } from '@angular/cdk/a11y';\nimport { Directionality } from '@angular/cdk/bidi';\nimport { coerceBooleanProperty } from '@angular/cdk/coercion';\nimport { ESCAPE, hasModifierKey } from '@angular/cdk/keycodes';\nimport { BreakpointObserver, BreakpointState, Breakpoints } from '@angular/cdk/layout';\nimport {\n FlexibleConnectedPositionStrategy,\n HorizontalConnectionPos,\n OriginConnectionPosition,\n Overlay,\n OverlayConnectionPosition,\n OverlayRef,\n ScrollStrategy,\n VerticalConnectionPos,\n} from '@angular/cdk/overlay';\nimport { Platform, normalizePassiveListenerOptions } from '@angular/cdk/platform';\nimport { ComponentPortal } from '@angular/cdk/portal';\nimport { ScrollDispatcher } from '@angular/cdk/scrolling';\nimport {\n ChangeDetectionStrategy,\n ChangeDetectorRef,\n Component,\n Directive,\n ElementRef,\n HostBinding,\n Inject,\n InjectionToken,\n Input,\n NgZone,\n OnDestroy,\n OnInit,\n Optional,\n ViewContainerRef,\n ViewEncapsulation,\n} from '@angular/core';\nimport { Observable, Subject } from 'rxjs';\nimport { take, takeUntil } from 'rxjs/operators';\n\nimport { matTooltipAnimations } from './tooltip-animations';\n\n/** Possible positions for a tooltip. */\nexport type TooltipPosition = 'left' | 'right' | 'above' | 'below' | 'before' | 'after';\n\n/**\n * Options for how the tooltip trigger should handle touch gestures.\n * See `MatTooltipDirective.touchGestures` for more information.\n */\nexport type TooltipTouchGestures = 'auto' | 'on' | 'off';\n\n/** Possible visibility states of a tooltip. */\nexport type TooltipVisibility = 'initial' | 'visible' | 'hidden';\n\n/** Time in ms to throttle repositioning after scroll events. */\nexport const SCROLL_THROTTLE_MS = 20;\n\n/** CSS class that will be attached to the overlay panel. */\nexport const TOOLTIP_PANEL_CLASS = 'atlas_navbar--tooltip-panel';\n\n/** Options used to bind passive event listeners. */\nconst passiveListenerOptions = normalizePassiveListenerOptions({ passive: true });\n\n/**\n * Time between the user putting the pointer on a tooltip\n * trigger and the long press event being fired.\n */\nconst LONGPRESS_DELAY = 500;\n\n/**\n * Creates an error to be thrown if the user supplied an invalid tooltip position.\n * @docs-private\n */\nexport function getMatTooltipInvalidPositionError(position: string): Error {\n return Error(`Tooltip position \"${position}\" is invalid.`);\n}\n\n/** Injection token that determines the scroll handling while a tooltip is visible. */\nexport const MAT_TOOLTIP_SCROLL_STRATEGY = new InjectionToken<() => ScrollStrategy>(\n 'atlas_navbar--tooltip-scroll-strategy',\n);\n\n/** @docs-private */\nexport function MAT_TOOLTIP_SCROLL_STRATEGY_FACTORY(overlay: Overlay): () => ScrollStrategy {\n return () => overlay.scrollStrategies.reposition({ scrollThrottle: SCROLL_THROTTLE_MS });\n}\n\n/** @docs-private */\nexport const MAT_TOOLTIP_SCROLL_STRATEGY_FACTORY_PROVIDER = {\n provide: MAT_TOOLTIP_SCROLL_STRATEGY,\n deps: [Overlay],\n useFactory: MAT_TOOLTIP_SCROLL_STRATEGY_FACTORY,\n};\n\n/** Default `atlasTooltip` options that can be overridden. */\nexport interface MatTooltipDefaultOptions {\n showDelay: number;\n hideDelay: number;\n touchendHideDelay: number;\n touchGestures?: TooltipTouchGestures;\n position?: TooltipPosition;\n}\n\n/** Injection token to be used to override the default options for `atlasTooltip`. */\nexport const MAT_TOOLTIP_DEFAULT_OPTIONS = new InjectionToken(\n 'atlas_navbar--tooltip-default-options',\n {\n providedIn: 'root',\n factory: MAT_TOOLTIP_DEFAULT_OPTIONS_FACTORY,\n },\n);\n\n/** @docs-private */\nexport function MAT_TOOLTIP_DEFAULT_OPTIONS_FACTORY(): MatTooltipDefaultOptions {\n return {\n showDelay: 0,\n hideDelay: 0,\n touchendHideDelay: 1500,\n };\n}\n\n/**\n * Directive that attaches a material design tooltip to the host element. Animates the showing and\n * hiding of a tooltip provided position (defaults to below the element).\n *\n * https://material.io/design/components/tooltips.html\n */\n@Directive({\n selector: '[atlasTooltip]',\n exportAs: 'atlasTooltip',\n standalone: false,\n})\nexport class MatTooltipDirective implements OnDestroy, OnInit {\n _overlayRef: OverlayRef | null;\n _tooltipInstance: TooltipComponent | null;\n\n private _portal: ComponentPortal;\n private _position: TooltipPosition = 'below';\n private _disabled = false;\n private _tooltipClass: string | string[] | Set | { [key: string]: any };\n private _scrollStrategy: () => ScrollStrategy;\n\n /** Allows the user to define the position of the tooltip relative to the parent element */\n @Input('atlasTooltipPosition')\n get position(): TooltipPosition {\n return this._position;\n }\n set position(value: TooltipPosition) {\n if (value !== this._position) {\n this._position = value;\n\n if (this._overlayRef) {\n this._updatePosition();\n\n if (this._tooltipInstance) {\n this._tooltipInstance.show(0);\n }\n\n this._overlayRef.updatePosition();\n }\n }\n }\n\n /** Disables the display of the tooltip. */\n @Input('atlasTooltipDisabled')\n get disabled(): boolean {\n return this._disabled;\n }\n // tslint:disable-next-line: typedef\n set disabled(value) {\n this._disabled = coerceBooleanProperty(value);\n\n // If tooltip is disabled, hide immediately.\n if (this._disabled) {\n this.hide(0);\n }\n }\n\n /** The default delay in ms before showing the tooltip after show is called */\n // tslint:disable-next-line:no-input-rename\n @Input('atlasTooltipShowDelay') showDelay = this._defaultOptions.showDelay;\n\n /** The default delay in ms before hiding the tooltip after hide is called */\n //tslint:disable-next-line:no-input-rename\n @Input('atlasTooltipHideDelay') hideDelay = this._defaultOptions.hideDelay;\n\n /**\n * How touch gestures should be handled by the tooltip. On touch devices the tooltip directive\n * uses a long press gesture to show and hide, however it can conflict with the native browser\n * gestures. To work around the conflict, Angular Material disables native gestures on the\n * trigger, but that might not be desirable on particular elements (e.g. inputs and draggable\n * elements). The different values for this option configure the touch event handling as follows:\n * - `auto` - Enables touch gestures for all elements, but tries to avoid conflicts with native\n * browser gestures on particular elements. In particular, it allows text selection on inputs\n * and textareas, and preserves the native browser dragging on elements marked as `draggable`.\n * - `on` - Enables touch gestures for all elements and disables native\n * browser gestures with no exceptions.\n * - `off` - Disables touch gestures. Note that this will prevent the tooltip from\n * showing on touch devices.\n */\n //tslint:disable-next-line:no-input-rename\n @Input('atlasTooltipTouchGestures') touchGestures: TooltipTouchGestures = 'auto';\n\n /** The message to be displayed in the tooltip */\n @Input('atlasTooltip')\n get message(): string {\n return this._message;\n }\n set message(value: string) {\n this._ariaDescriber.removeDescription(this._elementRef.nativeElement, this._message);\n\n // If the message is not a string (e.g. number), convert it to a string and trim it.\n this._message = value != null ? `${value}`.trim() : '';\n\n if (!this._message && this._isTooltipVisible()) {\n this.hide(0);\n } else {\n this._updateTooltipMessage();\n this._ngZone.runOutsideAngular(() => {\n // The `AriaDescriber` has some functionality that avoids adding a description if it's the\n // same as the `aria-label` of an element, however we can't know whether the tooltip trigger\n // has a data-bound `aria-label` or when it'll be set for the first time. We can avoid the\n // issue by deferring the description by a tick so Angular has time to set the `aria-label`.\n Promise.resolve().then(() => {\n this._ariaDescriber.describe(this._elementRef.nativeElement, this.message);\n });\n });\n }\n }\n private _message = '';\n\n /** Classes to be passed to the tooltip. Supports the same syntax as `ngClass`. */\n @Input('atlasTooltipClass')\n get tooltipClass(): string | string[] | Set | { [key: string]: any } {\n return this._tooltipClass;\n }\n set tooltipClass(value: string | string[] | Set | { [key: string]: any }) {\n this._tooltipClass = value;\n if (this._tooltipInstance) {\n this._setTooltipClass(this._tooltipClass);\n }\n }\n\n /** Manually-bound passive event listeners. */\n private _passiveListeners = new Map();\n\n /** Timer started at the last `touchstart` event. */\n private _touchstartTimeout = 0;\n\n /** Emits when the component is destroyed. */\n private readonly _destroyed = new Subject();\n\n constructor(\n private _overlay: Overlay,\n private _elementRef: ElementRef,\n private _scrollDispatcher: ScrollDispatcher,\n private _viewContainerRef: ViewContainerRef,\n private _ngZone: NgZone,\n private _platform: Platform,\n private _ariaDescriber: AriaDescriber,\n private _focusMonitor: FocusMonitor,\n @Inject(MAT_TOOLTIP_SCROLL_STRATEGY) scrollStrategy: any,\n @Optional() private _dir: Directionality,\n @Optional()\n @Inject(MAT_TOOLTIP_DEFAULT_OPTIONS)\n private _defaultOptions: MatTooltipDefaultOptions,\n /**\n * @deprecated _hammerLoader parameter to be removed.\n * @breaking-change 9.0.0\n */\n // Note that we need to give Angular something to inject here so it doesn't throw.\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n @Inject(ElementRef) _hammerLoader?: any,\n ) {\n this._scrollStrategy = scrollStrategy;\n\n if (_defaultOptions) {\n if (_defaultOptions.position) {\n this.position = _defaultOptions.position;\n }\n\n if (_defaultOptions.touchGestures) {\n this.touchGestures = _defaultOptions.touchGestures;\n }\n }\n\n _focusMonitor\n .monitor(_elementRef)\n .pipe(takeUntil(this._destroyed))\n .subscribe((origin) => {\n // Note that the focus monitor runs outside the Angular zone.\n if (!origin) {\n _ngZone.run(() => this.hide(0));\n } else if (origin === 'keyboard') {\n _ngZone.run(() => this.show());\n }\n });\n\n _ngZone.runOutsideAngular(() => {\n _elementRef.nativeElement.addEventListener('keydown', this._handleKeydown);\n });\n }\n\n /**\n * Setup styling-specific things\n */\n ngOnInit(): void {\n // This needs to happen in `ngOnInit` so the initial values for all inputs have been set.\n this._setupPointerEvents();\n }\n\n /**\n * Dispose the tooltip when destroyed.\n */\n ngOnDestroy(): void {\n const nativeElement = this._elementRef.nativeElement;\n\n window.clearTimeout(this._touchstartTimeout);\n\n if (this._overlayRef) {\n this._overlayRef.dispose();\n this._tooltipInstance = null;\n }\n\n // Clean up the event listeners set in the constructor\n nativeElement.removeEventListener('keydown', this._handleKeydown);\n this._passiveListeners.forEach((listener, event) => {\n nativeElement.removeEventListener(event, listener, passiveListenerOptions);\n });\n this._passiveListeners.clear();\n\n this._destroyed.next();\n this._destroyed.complete();\n\n this._ariaDescriber.removeDescription(nativeElement, this.message);\n this._focusMonitor.stopMonitoring(nativeElement);\n }\n\n /** Shows the tooltip after the delay in ms, defaults to tooltip-delay-show or 0ms if no input */\n show(delay: number = this.showDelay): void {\n if (\n this.disabled ||\n !this.message ||\n (this._isTooltipVisible() &&\n !!this._tooltipInstance &&\n !this._tooltipInstance._showTimeoutId &&\n !this._tooltipInstance._hideTimeoutId)\n ) {\n return;\n }\n\n const overlayRef = this._createOverlay();\n\n this._detach();\n this._portal = this._portal || new ComponentPortal(TooltipComponent, this._viewContainerRef);\n this._tooltipInstance = overlayRef.attach(this._portal).instance;\n this._tooltipInstance\n .afterHidden()\n .pipe(takeUntil(this._destroyed))\n .subscribe(() => this._detach());\n this._setTooltipClass(this._tooltipClass);\n this._updateTooltipMessage();\n this._tooltipInstance.show(delay);\n }\n\n /** Hides the tooltip after the delay in ms, defaults to tooltip-delay-hide or 0ms if no input */\n hide(delay: number = this.hideDelay): void {\n if (this._tooltipInstance) {\n this._tooltipInstance.hide(delay);\n }\n }\n\n /** Shows/hides the tooltip */\n toggle(): void {\n this._isTooltipVisible() ? this.hide() : this.show();\n }\n\n /** Returns true if the tooltip is currently visible to the user */\n _isTooltipVisible(): boolean {\n return !!this._tooltipInstance && this._tooltipInstance.isVisible();\n }\n\n /** Handles the keydown events on the host element. */\n private _handleKeydown = (e: KeyboardEvent) => {\n if (this._isTooltipVisible() && e.keyCode === ESCAPE && !hasModifierKey(e)) {\n e.preventDefault();\n e.stopPropagation();\n this._ngZone.run(() => this.hide(0));\n }\n };\n\n /** Create the overlay config and position strategy */\n private _createOverlay(): OverlayRef {\n if (this._overlayRef) {\n return this._overlayRef;\n }\n\n const scrollableAncestors = this._scrollDispatcher.getAncestorScrollContainers(this._elementRef);\n\n // Create connected position strategy that listens for scroll events to reposition.\n const strategy = this._overlay\n .position()\n .flexibleConnectedTo(this._elementRef)\n .withTransformOriginOn('.atlas_navbar--tooltip')\n .withFlexibleDimensions(false)\n .withViewportMargin(8)\n .withScrollableContainers(scrollableAncestors);\n\n strategy.positionChanges.pipe(takeUntil(this._destroyed)).subscribe((change) => {\n if (this._tooltipInstance) {\n if (change.scrollableViewProperties.isOverlayClipped && this._tooltipInstance.isVisible()) {\n // After position changes occur and the overlay is clipped by\n // a parent scrollable then close the tooltip.\n this._ngZone.run(() => this.hide(0));\n }\n }\n });\n\n this._overlayRef = this._overlay.create({\n direction: this._dir,\n positionStrategy: strategy,\n panelClass: TOOLTIP_PANEL_CLASS,\n scrollStrategy: this._scrollStrategy(),\n });\n\n this._updatePosition();\n\n this._overlayRef\n .detachments()\n .pipe(takeUntil(this._destroyed))\n .subscribe(() => this._detach());\n\n return this._overlayRef;\n }\n\n /** Detaches the currently-attached tooltip. */\n private _detach(): void {\n if (this._overlayRef && this._overlayRef.hasAttached()) {\n this._overlayRef.detach();\n }\n\n this._tooltipInstance = null;\n }\n\n /** Updates the position of the current tooltip. */\n private _updatePosition(): void {\n if (!this._overlayRef) {\n return;\n }\n const position = this._overlayRef.getConfig().positionStrategy as FlexibleConnectedPositionStrategy;\n const origin = this._getOrigin();\n const overlay = this._getOverlayPosition();\n\n position.withPositions([\n { ...origin.main, ...overlay.main },\n { ...origin.fallback, ...overlay.fallback },\n ]);\n }\n\n /**\n * Returns the origin position and a fallback position based on the user's position preference.\n * The fallback position is the inverse of the origin (e.g. `'below' -> 'above'`).\n */\n _getOrigin(): { main: OriginConnectionPosition; fallback: OriginConnectionPosition } {\n const isLtr = !this._dir || this._dir.value === 'ltr';\n const position = this.position;\n let originPosition: OriginConnectionPosition;\n\n if (position === 'above' || position === 'below') {\n originPosition = { originX: 'center', originY: position === 'above' ? 'top' : 'bottom' };\n } else if (position === 'before' || (position === 'left' && isLtr) || (position === 'right' && !isLtr)) {\n originPosition = { originX: 'start', originY: 'center' };\n } else if (position === 'after' || (position === 'right' && isLtr) || (position === 'left' && !isLtr)) {\n originPosition = { originX: 'end', originY: 'center' };\n } else {\n throw getMatTooltipInvalidPositionError(position);\n }\n\n const { x, y } = this._invertPosition(originPosition.originX, originPosition.originY);\n\n return {\n main: originPosition,\n fallback: { originX: x, originY: y },\n };\n }\n\n /** Returns the overlay position and a fallback position based on the user's preference */\n _getOverlayPosition(): { main: OverlayConnectionPosition; fallback: OverlayConnectionPosition } {\n const isLtr = !this._dir || this._dir.value === 'ltr';\n const position = this.position;\n let overlayPosition: OverlayConnectionPosition;\n\n if (position === 'above') {\n overlayPosition = { overlayX: 'center', overlayY: 'bottom' };\n } else if (position === 'below') {\n overlayPosition = { overlayX: 'center', overlayY: 'top' };\n } else if (position === 'before' || (position === 'left' && isLtr) || (position === 'right' && !isLtr)) {\n overlayPosition = { overlayX: 'end', overlayY: 'center' };\n } else if (position === 'after' || (position === 'right' && isLtr) || (position === 'left' && !isLtr)) {\n overlayPosition = { overlayX: 'start', overlayY: 'center' };\n } else {\n throw getMatTooltipInvalidPositionError(position);\n }\n\n const { x, y } = this._invertPosition(overlayPosition.overlayX, overlayPosition.overlayY);\n\n return {\n main: overlayPosition,\n fallback: { overlayX: x, overlayY: y },\n };\n }\n\n /** Updates the tooltip message and repositions the overlay according to the new message length */\n private _updateTooltipMessage(): void {\n // Must wait for the message to be painted to the tooltip so that the overlay can properly\n // calculate the correct positioning based on the size of the text.\n if (this._tooltipInstance) {\n this._tooltipInstance.message = this.message;\n this._tooltipInstance._markForCheck();\n\n this._ngZone.onMicrotaskEmpty\n .asObservable()\n .pipe(take(1), takeUntil(this._destroyed))\n .subscribe(() => {\n if (this._tooltipInstance) {\n if (this._overlayRef) {\n this._overlayRef.updatePosition();\n }\n }\n });\n }\n }\n\n /** Updates the tooltip class */\n private _setTooltipClass(tooltipClass: string | string[] | Set | { [key: string]: any }): void {\n if (this._tooltipInstance) {\n this._tooltipInstance.tooltipClass = tooltipClass;\n this._tooltipInstance._markForCheck();\n }\n }\n\n /** Inverts an overlay position. */\n private _invertPosition(x: HorizontalConnectionPos, y: VerticalConnectionPos): any {\n if (this.position === 'above' || this.position === 'below') {\n if (y === 'top') {\n y = 'bottom';\n } else if (y === 'bottom') {\n y = 'top';\n }\n } else {\n if (x === 'end') {\n x = 'start';\n } else if (x === 'start') {\n x = 'end';\n }\n }\n\n return { x, y };\n }\n\n /** Binds the pointer events to the tooltip trigger. */\n private _setupPointerEvents(): void {\n // The mouse events shouldn't be bound on mobile devices, because they can prevent the\n // first tap from firing its click event or can cause the tooltip to open for clicks.\n if (!this._platform.IOS && !this._platform.ANDROID) {\n this._passiveListeners.set('mouseenter', () => this.show()).set('mouseleave', () => this.hide());\n } else if (this.touchGestures !== 'off') {\n this._disableNativeGesturesIfNecessary();\n const touchendListener = () => {\n window.clearTimeout(this._touchstartTimeout);\n this.hide(this._defaultOptions.touchendHideDelay);\n };\n\n this._passiveListeners\n .set('touchend', touchendListener)\n .set('touchcancel', touchendListener)\n .set('touchstart', () => {\n // Note that it's important that we don't `preventDefault` here,\n // because it can prevent click events from firing on the element.\n window.clearTimeout(this._touchstartTimeout);\n this._touchstartTimeout = window.setTimeout(() => this.show(), LONGPRESS_DELAY);\n });\n }\n\n this._passiveListeners.forEach((listener, event) => {\n this._elementRef.nativeElement.addEventListener(event, listener, passiveListenerOptions);\n });\n }\n\n /** Disables the native browser gestures, based on how the tooltip has been configured. */\n private _disableNativeGesturesIfNecessary(): void {\n const element = this._elementRef.nativeElement;\n const style = element.style;\n const gestures = this.touchGestures;\n\n if (gestures !== 'off') {\n // If gestures are set to `auto`, we don't disable text selection on inputs and\n // textareas, because it prevents the user from typing into them on iOS Safari.\n if (gestures === 'on' || (element.nodeName !== 'INPUT' && element.nodeName !== 'TEXTAREA')) {\n style.userSelect = style.webkitUserSelect = (style as any).MozUserSelect = 'none';\n }\n\n // If we have `auto` gestures and the element uses native HTML dragging,\n // we don't set `-webkit-user-drag` because it prevents the native behavior.\n if (gestures === 'on' || !element.draggable) {\n (style as any).webkitUserDrag = 'none';\n }\n\n style.touchAction = 'none';\n }\n }\n}\n\n/**\n * Internal component that wraps the tooltip's content.\n * @docs-private\n */\n@Component({\n selector: 'atlas-tooltip-component',\n templateUrl: 'tooltip.html',\n styleUrls: ['tooltip.scss'],\n encapsulation: ViewEncapsulation.None,\n changeDetection: ChangeDetectionStrategy.OnPush,\n animations: [matTooltipAnimations.tooltipState],\n standalone: false,\n})\nexport class TooltipComponent implements OnDestroy {\n @HostBinding('[style.zoom]') _zoom: number | null = null;\n @HostBinding('(body:click)') _bodyClickEvent;\n @HostBinding('aria-hidden') _ariaHidden = 'true';\n /** Message to display in the tooltip */\n message: string;\n\n /** Classes to be added to the tooltip. Supports the same syntax as `ngClass`. */\n tooltipClass: string | string[] | Set | { [key: string]: any };\n\n /** The timeout ID of any current timer set to show the tooltip */\n _showTimeoutId = 0;\n\n /** The timeout ID of any current timer set to hide the tooltip */\n _hideTimeoutId = 0;\n\n /** Property watched by the animation framework to show or hide the tooltip */\n _visibility: TooltipVisibility = 'initial';\n\n /** Whether interactions on the page should close the tooltip */\n private _closeOnInteraction = false;\n\n /** Subject for notifying that the tooltip has been hidden from the view */\n private readonly _onHide: Subject = new Subject();\n\n /** Stream that emits whether the user has a handset-sized display. */\n _isHandset: Observable