From 195b7224a44456b654ae4c6468913b19c10bb321 Mon Sep 17 00:00:00 2001 From: Abram Sanderson Date: Thu, 12 Dec 2024 15:31:48 -0800 Subject: [PATCH 1/6] Rewrite ViewSchema.checkCompatibility to leverage discrepancies (#23192) ## Description Updates `ViewSchema.checkCompatibility` to leverage the `getFieldDiscrepancies` codepath. This also puts in place infrastructure to allow viewing the document when the only differences between view and stored schema are extra optional fields in the stored schema on nodes that the view schema author has declared are OK. --------- Co-authored-by: Abram Sanderson --- .../tree-cli-app/src/test/legacy/index.ts | 9 + .../apps/tree-cli-app/src/test/legacy/v1.ts | 13 + .../apps/tree-cli-app/src/test/schema.spec.ts | 98 +++-- .../dds/tree/api-report/tree.alpha.api.md | 2 +- packages/dds/tree/src/core/index.ts | 1 - .../dds/tree/src/core/schema-stored/schema.ts | 10 + .../dds/tree/src/core/schema-view/index.ts | 1 - .../dds/tree/src/core/schema-view/view.ts | 11 - .../default-schema/defaultSchema.ts | 1 + .../default-schema/schemaChecker.ts | 5 +- .../dds/tree/src/feature-libraries/index.ts | 13 + .../modular-schema/discrepancies.ts | 114 ++++-- .../feature-libraries/modular-schema/index.ts | 13 + .../tree/src/shared-tree/schematizeTree.ts | 16 +- .../src/shared-tree/schematizingTreeView.ts | 32 +- .../dds/tree/src/simple-tree/api/create.ts | 6 +- .../dds/tree/src/simple-tree/api/index.ts | 6 +- .../tree/src/simple-tree/api/schemaFactory.ts | 63 ++- .../tree/src/simple-tree/api/storedSchema.ts | 29 +- packages/dds/tree/src/simple-tree/api/tree.ts | 5 + packages/dds/tree/src/simple-tree/api/view.ts | 248 +++++++++--- packages/dds/tree/src/simple-tree/index.ts | 2 + .../dds/tree/src/simple-tree/objectNode.ts | 28 +- .../tree/src/simple-tree/objectNodeTypes.ts | 5 + .../tree/src/simple-tree/toStoredSchema.ts | 44 ++- .../default-schema/schemaChecker.spec.ts | 2 + .../modular-schema/discrepancies.spec.ts | 26 +- .../schemaEvolutionExamples.spec.ts | 59 +-- .../dds/tree/src/test/schemaFactoryAlpha.ts | 90 +++++ .../test/shared-tree/schematizeTree.spec.ts | 66 ++-- .../shared-tree/schematizingTreeView.spec.ts | 137 ++++++- .../test/simple-tree/api/storedSchema.spec.ts | 3 +- .../src/test/simple-tree/api/treeApi.spec.ts | 84 ++++ .../src/test/simple-tree/schemaTypes.spec.ts | 2 +- .../src/test/simple-tree/toMapTree.spec.ts | 17 + .../src/test/simple-tree/viewSchema.spec.ts | 369 ++++++++++++++++++ .../api-report/fluid-framework.alpha.api.md | 2 +- 37 files changed, 1321 insertions(+), 311 deletions(-) create mode 100644 examples/apps/tree-cli-app/src/test/legacy/index.ts create mode 100644 examples/apps/tree-cli-app/src/test/legacy/v1.ts create mode 100644 packages/dds/tree/src/test/schemaFactoryAlpha.ts create mode 100644 packages/dds/tree/src/test/simple-tree/viewSchema.spec.ts diff --git a/examples/apps/tree-cli-app/src/test/legacy/index.ts b/examples/apps/tree-cli-app/src/test/legacy/index.ts new file mode 100644 index 000000000000..32fc95b7ee8b --- /dev/null +++ b/examples/apps/tree-cli-app/src/test/legacy/index.ts @@ -0,0 +1,9 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +// Using 'export *' in this file increases readability of relevant tests by grouping schema according to their versions. +// The usual concerns of export * around bundling do not apply. +/* eslint-disable no-restricted-syntax */ +export * as v1 from "./v1.js"; diff --git a/examples/apps/tree-cli-app/src/test/legacy/v1.ts b/examples/apps/tree-cli-app/src/test/legacy/v1.ts new file mode 100644 index 000000000000..a737f5303a87 --- /dev/null +++ b/examples/apps/tree-cli-app/src/test/legacy/v1.ts @@ -0,0 +1,13 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { SchemaFactory } from "@fluidframework/tree"; + +const schemaBuilder = new SchemaFactory("com.fluidframework.example.cli"); + +/** + * List node. + */ +export class List extends schemaBuilder.array("List", [schemaBuilder.string]) {} diff --git a/examples/apps/tree-cli-app/src/test/schema.spec.ts b/examples/apps/tree-cli-app/src/test/schema.spec.ts index c31245d6b653..22c63761a347 100644 --- a/examples/apps/tree-cli-app/src/test/schema.spec.ts +++ b/examples/apps/tree-cli-app/src/test/schema.spec.ts @@ -11,55 +11,16 @@ import { typeboxValidator, type ForestOptions, type ICodecOptions, + type ImplicitFieldSchema, type JsonCompatible, // eslint-disable-next-line import/no-internal-modules } from "@fluidframework/tree/alpha"; import { List } from "../schema.js"; -// This file demonstrates how applications can write tests which ensure they maintain compatibility with the schema from previously released versions. - -describe("schema", () => { - it("current schema matches latest historical schema", () => { - const current = extractPersistedSchema(List); - - // For compatibility with deep equality and simple objects, round trip via JSON to erase prototypes. - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const currentRoundTripped: JsonCompatible = JSON.parse(JSON.stringify(current)); - - const previous = historicalSchema.at(-1); - assert(previous !== undefined); - // This ensures that historicalSchema's last entry is up to date with the current application code. - // This can catch: - // 1. Forgetting to update historicalSchema when intentionally making schema changes. - // 2. Accidentally changing schema in a way that impacts document compatibility. - assert.deepEqual(currentRoundTripped, previous.schema); - }); - - it("historical schema can be upgraded to current schema", () => { - const options: ForestOptions & ICodecOptions = { jsonValidator: typeboxValidator }; - - for (let documentIndex = 0; documentIndex < historicalSchema.length; documentIndex++) { - for (let viewIndex = 0; viewIndex < historicalSchema.length; viewIndex++) { - const compat = comparePersistedSchema( - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - historicalSchema[documentIndex]!.schema, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - historicalSchema[viewIndex]!.schema, - options, - false, - ); +import { v1 } from "./legacy/index.js"; - // We do not expect duplicates in historicalSchema. - assert.equal(compat.isEquivalent, documentIndex === viewIndex); - // Currently collaboration is only allowed between identical versions - assert.equal(compat.canView, documentIndex === viewIndex); - // Older versions should be upgradable to newer versions, but not the reverse. - assert.equal(compat.canUpgrade, documentIndex <= viewIndex); - } - } - }); -}); +// This file demonstrates how applications can write tests which ensure they maintain compatibility with the schema from previously released versions. /** * List of schema from previous versions of this application. @@ -67,7 +28,11 @@ describe("schema", () => { * * The `schema` field is generated by passing the schema to `extractPersistedSchema`. */ -const historicalSchema: { version: string; schema: JsonCompatible }[] = [ +const historicalSchema: { + version: string; + schema: JsonCompatible; + viewSchema: ImplicitFieldSchema; +}[] = [ { version: "1.0", schema: { @@ -90,6 +55,7 @@ const historicalSchema: { version: string; schema: JsonCompatible }[] = [ types: ["com.fluidframework.example.cli.List"], }, }, + viewSchema: v1.List, }, { version: "2.0", @@ -140,5 +106,51 @@ const historicalSchema: { version: string; schema: JsonCompatible }[] = [ types: ["com.fluidframework.example.cli.List"], }, }, + viewSchema: List, }, ]; + +describe("schema", () => { + it("current schema matches latest historical schema", () => { + const current = extractPersistedSchema(List); + + // For compatibility with deep equality and simple objects, round trip via JSON to erase prototypes. + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const currentRoundTripped: JsonCompatible = JSON.parse(JSON.stringify(current)); + + const previous = historicalSchema.at(-1); + assert(previous !== undefined); + // This ensures that historicalSchema's last entry is up to date with the current application code. + // This can catch: + // 1. Forgetting to update historicalSchema when intentionally making schema changes. + // 2. Accidentally changing schema in a way that impacts document compatibility. + assert.deepEqual(currentRoundTripped, previous.schema); + }); + + describe("historical schema can be upgraded to current schema", () => { + const options: ForestOptions & ICodecOptions = { jsonValidator: typeboxValidator }; + + for (let documentIndex = 0; documentIndex < historicalSchema.length; documentIndex++) { + for (let viewIndex = 0; viewIndex < historicalSchema.length; viewIndex++) { + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + it(`document ${historicalSchema[documentIndex]!.version} vs view version ${historicalSchema[viewIndex]!.version}`, () => { + const compat = comparePersistedSchema( + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + historicalSchema[documentIndex]!.schema, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + historicalSchema[viewIndex]!.viewSchema, + options, + false, + ); + + // We do not expect duplicates in historicalSchema. + assert.equal(compat.isEquivalent, documentIndex === viewIndex); + // Currently collaboration is only allowed between identical versions + assert.equal(compat.canView, documentIndex === viewIndex); + // Older versions should be upgradable to newer versions, but not the reverse. + assert.equal(compat.canUpgrade, documentIndex <= viewIndex); + }); + } + } + }); +}); diff --git a/packages/dds/tree/api-report/tree.alpha.api.md b/packages/dds/tree/api-report/tree.alpha.api.md index 128b3b3bb851..21483a31fa1c 100644 --- a/packages/dds/tree/api-report/tree.alpha.api.md +++ b/packages/dds/tree/api-report/tree.alpha.api.md @@ -58,7 +58,7 @@ export interface CommitMetadata { } // @alpha -export function comparePersistedSchema(persisted: JsonCompatible, view: JsonCompatible, options: ICodecOptions, canInitialize: boolean): SchemaCompatibilityStatus; +export function comparePersistedSchema(persisted: JsonCompatible, view: ImplicitFieldSchema, options: ICodecOptions, canInitialize: boolean): SchemaCompatibilityStatus; // @alpha export type ConciseTree = Exclude | THandle | ConciseTree[] | { diff --git a/packages/dds/tree/src/core/index.ts b/packages/dds/tree/src/core/index.ts index a14043ce84d0..496e19aed616 100644 --- a/packages/dds/tree/src/core/index.ts +++ b/packages/dds/tree/src/core/index.ts @@ -207,7 +207,6 @@ export { export { type Adapters, AdaptedViewSchema, - Compatibility, type TreeAdapter, AllowedUpdateType, } from "./schema-view/index.js"; diff --git a/packages/dds/tree/src/core/schema-stored/schema.ts b/packages/dds/tree/src/core/schema-stored/schema.ts index 4e5acb0c565c..b81df693af59 100644 --- a/packages/dds/tree/src/core/schema-stored/schema.ts +++ b/packages/dds/tree/src/core/schema-stored/schema.ts @@ -92,6 +92,16 @@ export interface SchemaPolicy { * If true, new content inserted into the tree should be validated against the stored schema. */ readonly validateSchema: boolean; + + /** + * Whether to allow a document to be opened when a particular stored schema (identified by `identifier`) + * contains optional fields that are not known to the view schema. + * + * @privateRemarks + * Plumbing this in via `SchemaPolicy` avoids needing to walk the view schema representation repeatedly in places + * that need it (schema validation, view vs stored compatibility checks). + */ + allowUnknownOptionalFields(identifier: TreeNodeSchemaIdentifier): boolean; } /** diff --git a/packages/dds/tree/src/core/schema-view/index.ts b/packages/dds/tree/src/core/schema-view/index.ts index bab4be023d2a..22092eef4b96 100644 --- a/packages/dds/tree/src/core/schema-view/index.ts +++ b/packages/dds/tree/src/core/schema-view/index.ts @@ -5,7 +5,6 @@ export { type Adapters, - Compatibility, type TreeAdapter, AdaptedViewSchema, AllowedUpdateType, diff --git a/packages/dds/tree/src/core/schema-view/view.ts b/packages/dds/tree/src/core/schema-view/view.ts index f15dc2b3778e..7bc56008cb49 100644 --- a/packages/dds/tree/src/core/schema-view/view.ts +++ b/packages/dds/tree/src/core/schema-view/view.ts @@ -9,17 +9,6 @@ import type { TreeNodeSchemaIdentifier, TreeStoredSchema } from "../schema-store * APIs for applying `view schema` to documents. */ -/** - * How compatible a particular view schema is for some operation on some specific document. - */ -export enum Compatibility { - Incompatible, - // For write compatibility this can include compatible schema updates to stored schema. - // TODO: separate schema updates from adapters. - // RequiresAdapters, - Compatible, -} - /** * What kinds of updates to stored schema to permit. * diff --git a/packages/dds/tree/src/feature-libraries/default-schema/defaultSchema.ts b/packages/dds/tree/src/feature-libraries/default-schema/defaultSchema.ts index fb849d40ca6e..8da2bbd8a8d2 100644 --- a/packages/dds/tree/src/feature-libraries/default-schema/defaultSchema.ts +++ b/packages/dds/tree/src/feature-libraries/default-schema/defaultSchema.ts @@ -13,4 +13,5 @@ import { fieldKinds } from "./defaultFieldKinds.js"; export const defaultSchemaPolicy: FullSchemaPolicy = { fieldKinds, validateSchema: false, + allowUnknownOptionalFields: () => false, }; diff --git a/packages/dds/tree/src/feature-libraries/default-schema/schemaChecker.ts b/packages/dds/tree/src/feature-libraries/default-schema/schemaChecker.ts index 6760c112b8ba..a47b18fa6e6a 100644 --- a/packages/dds/tree/src/feature-libraries/default-schema/schemaChecker.ts +++ b/packages/dds/tree/src/feature-libraries/default-schema/schemaChecker.ts @@ -73,7 +73,10 @@ export function isNodeInSchema( uncheckedFieldsFromNode.delete(fieldKey); } // The node has fields that we did not check as part of looking at every field defined in the node's schema - if (uncheckedFieldsFromNode.size !== 0) { + if ( + uncheckedFieldsFromNode.size !== 0 && + !schemaAndPolicy.policy.allowUnknownOptionalFields(node.type) + ) { return SchemaValidationErrors.ObjectNode_FieldNotInSchema; } } else if (schema instanceof MapNodeStoredSchema) { diff --git a/packages/dds/tree/src/feature-libraries/index.ts b/packages/dds/tree/src/feature-libraries/index.ts index cc3f3a0ae708..d3055fb3214d 100644 --- a/packages/dds/tree/src/feature-libraries/index.ts +++ b/packages/dds/tree/src/feature-libraries/index.ts @@ -76,7 +76,20 @@ export { type FieldKindConfigurationEntry, getAllowedContentDiscrepancies, isRepoSuperset, + type AllowedTypeDiscrepancy, + type FieldKindDiscrepancy, + type ValueSchemaDiscrepancy, + type FieldDiscrepancy, + type NodeDiscrepancy, + type NodeKindDiscrepancy, + type NodeFieldsDiscrepancy, isNeverTree, + type LinearExtension, + type Realizer, + fieldRealizer, + PosetComparisonResult, + comparePosetElements, + posetLte, } from "./modular-schema/index.js"; export { mapRootChanges } from "./deltaUtils.js"; diff --git a/packages/dds/tree/src/feature-libraries/modular-schema/discrepancies.ts b/packages/dds/tree/src/feature-libraries/modular-schema/discrepancies.ts index 43213c9193f9..ccbe4dc86d83 100644 --- a/packages/dds/tree/src/feature-libraries/modular-schema/discrepancies.ts +++ b/packages/dds/tree/src/feature-libraries/modular-schema/discrepancies.ts @@ -6,6 +6,7 @@ import { assert } from "@fluidframework/core-utils/internal"; import { + type FieldKey, type FieldKindIdentifier, LeafNodeStoredSchema, MapNodeStoredSchema, @@ -25,6 +26,7 @@ import { brand } from "../../util/index.js"; // Rather than both existing, one of which just returns boolean and the other which returns additional details, a simple comparison which returns everything needed should be used. /** + * Discriminated union (keyed on `mismatch`) of discrepancies between a view and stored schema. * @remarks * * 1. FieldDiscrepancy @@ -60,47 +62,73 @@ export type Discrepancy = FieldDiscrepancy | NodeDiscrepancy; export type NodeDiscrepancy = NodeKindDiscrepancy | NodeFieldsDiscrepancy; +/** + * A discrepancy in the declaration of a field. + */ export type FieldDiscrepancy = | AllowedTypeDiscrepancy | FieldKindDiscrepancy | ValueSchemaDiscrepancy; -export interface AllowedTypeDiscrepancy { - identifier: string | undefined; // undefined indicates root field schema +/** + * Information about where a field discrepancy is located within a collection of schema. + */ +export interface FieldDiscrepancyLocation { + /** + * The {@link TreeNodeSchemaIdentifier} that contains the discrepancy. + * + * Undefined iff the discrepancy is part of the root field schema. + */ + identifier: TreeNodeSchemaIdentifier | undefined; + /** + * The {@link FieldKey} for the field that contains the discrepancy. + * Undefined when: + * - the discrepancy is part of the root field schema + * - the discrepancy is for 'all fields' of a map node + */ + fieldKey: FieldKey | undefined; +} + +/** + * A discrepancy in the allowed types of a field. + * + * @remarks + * This reports the symmetric difference of allowed types in view/stored to enable more efficient checks for compatibility + */ +export interface AllowedTypeDiscrepancy extends FieldDiscrepancyLocation { mismatch: "allowedTypes"; /** - * List of allowed type identifiers in viewed schema + * List of allowed type identifiers in viewed schema which are not allowed in stored schema */ - view: string[]; + view: TreeNodeSchemaIdentifier[]; /** - * List of allowed type identifiers in stored schema + * List of allowed type identifiers in stored schema which are not allowed in view schema */ - stored: string[]; + stored: TreeNodeSchemaIdentifier[]; } -export interface FieldKindDiscrepancy { - identifier: string | undefined; // undefined indicates root field schema +export interface FieldKindDiscrepancy extends FieldDiscrepancyLocation { mismatch: "fieldKind"; view: FieldKindIdentifier; stored: FieldKindIdentifier; } export interface ValueSchemaDiscrepancy { - identifier: string; + identifier: TreeNodeSchemaIdentifier; mismatch: "valueSchema"; view: ValueSchema | undefined; stored: ValueSchema | undefined; } export interface NodeKindDiscrepancy { - identifier: string; + identifier: TreeNodeSchemaIdentifier; mismatch: "nodeKind"; view: SchemaFactoryNodeKind | undefined; stored: SchemaFactoryNodeKind | undefined; } export interface NodeFieldsDiscrepancy { - identifier: string; + identifier: TreeNodeSchemaIdentifier; mismatch: "fields"; differences: FieldDiscrepancy[]; } @@ -121,26 +149,25 @@ function getNodeSchemaType(nodeSchema: TreeNodeStoredSchema): SchemaFactoryNodeK /** * Finds and reports discrepancies between a view schema and a stored schema. * - * The workflow for finding schema incompatibilities: - * 1. Compare the two root schemas to identify any `FieldDiscrepancy`. - * - * 2. For each node schema in the `view`: - * - Verify if the node schema exists in the stored. If it does, ensure that the `SchemaFactoryNodeKind` are - * consistent. Otherwise this difference is treated as `NodeKindDiscrepancy` - * - If a node schema with the same identifier exists in both view and stored, and their `SchemaFactoryNodeKind` - * are consistent, perform a exhaustive validation to identify all `FieldDiscrepancy`. - * - * 3. For each node schema in the stored, verify if it exists in the view. The overlapping parts were already - * addressed in the previous step. + * See documentation on {@link Discrepancy} for details of possible discrepancies. + * @remarks + * This function does not attempt to distinguish between equivalent representations of a node/field involving extraneous never trees. + * For example, a Forbidden field with allowed type set `[]` is equivalent to an optional field with allowed type set `[]`, + * as well as an optional field with an allowed type set containing only unconstructable types. * - * @returns the discrepancies between two TreeStoredSchema objects + * It is up to the caller to determine whether such discrepancies matter. */ export function* getAllowedContentDiscrepancies( view: TreeStoredSchema, stored: TreeStoredSchema, ): Iterable { // check root schema discrepancies - yield* getFieldDiscrepancies(view.rootFieldSchema, stored.rootFieldSchema); + yield* getFieldDiscrepancies( + view.rootFieldSchema, + stored.rootFieldSchema, + undefined, + undefined, + ); for (const result of compareMaps(view.nodeSchema, stored.nodeSchema)) { switch (result.type) { @@ -195,6 +222,7 @@ function* getNodeDiscrepancies( case "object": { const differences = Array.from( trackObjectNodeDiscrepancies( + identifier, view as ObjectNodeStoredSchema, stored as ObjectNodeStoredSchema, ), @@ -213,6 +241,7 @@ function* getNodeDiscrepancies( (view as MapNodeStoredSchema).mapFields, (stored as MapNodeStoredSchema).mapFields, identifier, + undefined, ); break; case "leaf": { @@ -241,7 +270,8 @@ function* getNodeDiscrepancies( function* getFieldDiscrepancies( view: TreeFieldStoredSchema, stored: TreeFieldStoredSchema, - keyOrRoot?: string, + identifier: TreeNodeSchemaIdentifier | undefined, + fieldKey: FieldKey | undefined, ): Iterable { // Only track the symmetric differences of two sets. const findSetDiscrepancies = ( @@ -256,7 +286,8 @@ function* getFieldDiscrepancies( const [viewExtra, storedExtra] = findSetDiscrepancies(view.types, stored.types); if (viewExtra.length > 0 || storedExtra.length > 0) { yield { - identifier: keyOrRoot, + identifier, + fieldKey, mismatch: "allowedTypes", view: viewExtra, stored: storedExtra, @@ -265,7 +296,8 @@ function* getFieldDiscrepancies( if (view.kind !== stored.kind) { yield { - identifier: keyOrRoot, + identifier, + fieldKey, mismatch: "fieldKind", view: view.kind, stored: stored.kind, @@ -274,6 +306,7 @@ function* getFieldDiscrepancies( } function* trackObjectNodeDiscrepancies( + identifier: TreeNodeSchemaIdentifier, view: ObjectNodeStoredSchema, stored: ObjectNodeStoredSchema, ): Iterable { @@ -298,7 +331,8 @@ function* trackObjectNodeDiscrepancies( break; } yield { - identifier: fieldKey, + identifier, + fieldKey, mismatch: "fieldKind", view: result.value.kind, stored: storedEmptyFieldSchema.kind, @@ -312,7 +346,8 @@ function* trackObjectNodeDiscrepancies( break; } yield { - identifier: fieldKey, + identifier, + fieldKey, mismatch: "fieldKind", view: storedEmptyFieldSchema.kind, stored: result.value.kind, @@ -320,12 +355,11 @@ function* trackObjectNodeDiscrepancies( break; } case "both": { - yield* getFieldDiscrepancies(result.valueA, result.valueB, fieldKey); + yield* getFieldDiscrepancies(result.valueA, result.valueB, identifier, fieldKey); break; } - default: { + default: break; - } } } } @@ -428,13 +462,13 @@ function isFieldDiscrepancyCompatible(discrepancy: FieldDiscrepancy): boolean { * * The linear extension is represented as a lookup from each poset element to its index in the linear extension. */ -type LinearExtension = Map; +export type LinearExtension = Map; /** * A realizer for a partially-ordered set. See: * https://en.wikipedia.org/wiki/Order_dimension */ -type Realizer = LinearExtension[]; +export type Realizer = LinearExtension[]; /** * @privateRemarks @@ -468,7 +502,7 @@ const FieldKindIdentifiers = { * identifier * ``` */ -const fieldRealizer: Realizer = [ +export const fieldRealizer: Realizer = [ [ FieldKindIdentifiers.forbidden, FieldKindIdentifiers.identifier, @@ -485,7 +519,7 @@ const fieldRealizer: Realizer = [ ], ].map((extension) => new Map(extension.map((identifier, index) => [identifier, index]))); -const PosetComparisonResult = { +export const PosetComparisonResult = { Less: "<", Greater: ">", Equal: "=", @@ -494,7 +528,11 @@ const PosetComparisonResult = { type PosetComparisonResult = (typeof PosetComparisonResult)[keyof typeof PosetComparisonResult]; -function comparePosetElements(a: T, b: T, realizer: Realizer): PosetComparisonResult { +export function comparePosetElements( + a: T, + b: T, + realizer: Realizer, +): PosetComparisonResult { let hasLessThanResult = false; let hasGreaterThanResult = false; for (const extension of realizer) { @@ -517,7 +555,7 @@ function comparePosetElements(a: T, b: T, realizer: Realizer): PosetCompar : PosetComparisonResult.Equal; } -function posetLte(a: T, b: T, realizer: Realizer): boolean { +export function posetLte(a: T, b: T, realizer: Realizer): boolean { const comparison = comparePosetElements(a, b, realizer); return ( comparison === PosetComparisonResult.Less || comparison === PosetComparisonResult.Equal diff --git a/packages/dds/tree/src/feature-libraries/modular-schema/index.ts b/packages/dds/tree/src/feature-libraries/modular-schema/index.ts index 8378915b1208..a13179d02c28 100644 --- a/packages/dds/tree/src/feature-libraries/modular-schema/index.ts +++ b/packages/dds/tree/src/feature-libraries/modular-schema/index.ts @@ -77,4 +77,17 @@ export type { export { getAllowedContentDiscrepancies, isRepoSuperset, + type AllowedTypeDiscrepancy, + type FieldKindDiscrepancy, + type ValueSchemaDiscrepancy, + type FieldDiscrepancy, + type NodeDiscrepancy, + type NodeKindDiscrepancy, + type NodeFieldsDiscrepancy, + type LinearExtension, + type Realizer, + fieldRealizer, + PosetComparisonResult, + comparePosetElements, + posetLte, } from "./discrepancies.js"; diff --git a/packages/dds/tree/src/shared-tree/schematizeTree.ts b/packages/dds/tree/src/shared-tree/schematizeTree.ts index 233f55e5aa95..6ac813393303 100644 --- a/packages/dds/tree/src/shared-tree/schematizeTree.ts +++ b/packages/dds/tree/src/shared-tree/schematizeTree.ts @@ -7,7 +7,6 @@ import { assert, unreachableCase } from "@fluidframework/core-utils/internal"; import { AllowedUpdateType, - Compatibility, CursorLocationType, type ITreeCursorSynchronous, type TreeStoredSchema, @@ -24,7 +23,7 @@ import { import { fail, isReadonlyArray } from "../util/index.js"; import type { ITreeCheckout } from "./treeCheckout.js"; -import type { ViewSchema } from "../simple-tree/index.js"; +import { toStoredSchema, type ViewSchema } from "../simple-tree/index.js"; /** * Modify `storedSchema` and invoke `setInitialTree` when it's time to set the tree content. @@ -120,10 +119,7 @@ export function evaluateUpdate( ): UpdateType { const compatibility = viewSchema.checkCompatibility(checkout.storedSchema); - if ( - compatibility.read === Compatibility.Compatible && - compatibility.write === Compatibility.Compatible - ) { + if (compatibility.canUpgrade && compatibility.canView) { // Compatible as is return UpdateType.None; } @@ -133,13 +129,13 @@ export function evaluateUpdate( return UpdateType.Initialize; } - if (compatibility.read !== Compatibility.Compatible) { + if (!compatibility.canUpgrade) { // Existing stored schema permits trees which are incompatible with the view schema, so schema can not be updated return UpdateType.Incompatible; } - assert(compatibility.write === Compatibility.Incompatible, 0x8bd /* unexpected case */); - assert(compatibility.read === Compatibility.Compatible, 0x8be /* unexpected case */); + assert(!compatibility.canView, 0x8bd /* unexpected case */); + assert(compatibility.canUpgrade, 0x8be /* unexpected case */); // eslint-disable-next-line no-bitwise return allowedSchemaModifications & AllowedUpdateType.SchemaCompatible @@ -244,7 +240,7 @@ export function ensureSchema( return false; } case UpdateType.SchemaCompatible: { - checkout.updateSchema(viewSchema.schema); + checkout.updateSchema(toStoredSchema(viewSchema.schema)); return true; } case UpdateType.Initialize: { diff --git a/packages/dds/tree/src/shared-tree/schematizingTreeView.ts b/packages/dds/tree/src/shared-tree/schematizingTreeView.ts index bb49861c7276..5abd224c83d7 100644 --- a/packages/dds/tree/src/shared-tree/schematizingTreeView.ts +++ b/packages/dds/tree/src/shared-tree/schematizingTreeView.ts @@ -12,12 +12,7 @@ import { createEmitter } from "@fluid-internal/client-utils"; import { assert } from "@fluidframework/core-utils/internal"; import { UsageError } from "@fluidframework/telemetry-utils/internal"; -import { - AllowedUpdateType, - anchorSlot, - Compatibility, - type SchemaPolicy, -} from "../core/index.js"; +import { AllowedUpdateType, anchorSlot, type SchemaPolicy } from "../core/index.js"; import { type NodeKeyManager, defaultSchemaPolicy, @@ -58,7 +53,9 @@ import { HydratedContext, SimpleContextSlot, areImplicitFieldSchemaEqual, + createUnknownOptionalFieldPolicy, } from "../simple-tree/index.js"; + /** * Creating multiple tree views from the same checkout is not supported. This slot is used to detect if one already * exists and error if creating a second. @@ -116,14 +113,14 @@ export class SchematizingSimpleTreeView< } checkout.forest.anchors.slots.set(ViewSlot, this); - const policy = { + this.rootFieldSchema = normalizeFieldSchema(config.schema); + this.schemaPolicy = { ...defaultSchemaPolicy, validateSchema: config.enableSchemaValidation, + allowUnknownOptionalFields: createUnknownOptionalFieldPolicy(this.rootFieldSchema), }; - this.rootFieldSchema = normalizeFieldSchema(config.schema); - this.schemaPolicy = defaultSchemaPolicy; - this.viewSchema = new ViewSchema(policy, {}, toStoredSchema(this.rootFieldSchema)); + this.viewSchema = new ViewSchema(this.schemaPolicy, {}, this.rootFieldSchema); // This must be initialized before `update` can be called. this.currentCompatibility = { canView: false, @@ -166,16 +163,13 @@ export class SchematizingSimpleTreeView< this.nodeKeyManager, { schema: this.checkout.storedSchema, - policy: { - ...defaultSchemaPolicy, - validateSchema: this.config.enableSchemaValidation, - }, + policy: this.schemaPolicy, }, ); prepareContentForHydration(mapTree, this.checkout.forest); initialize(this.checkout, { - schema: this.viewSchema.schema, + schema: toStoredSchema(this.viewSchema.schema), initialTree: mapTree === undefined ? undefined : cursorForMapTreeNode(mapTree), }); }); @@ -202,7 +196,7 @@ export class SchematizingSimpleTreeView< AllowedUpdateType.SchemaCompatible, this.checkout, { - schema: this.viewSchema.schema, + schema: toStoredSchema(this.viewSchema.schema), initialTree: undefined, }, ); @@ -432,11 +426,7 @@ export function requireSchema( { const compatibility = viewSchema.checkCompatibility(checkout.storedSchema); - assert( - compatibility.write === Compatibility.Compatible && - compatibility.read === Compatibility.Compatible, - 0x8c3 /* requireSchema invoked with incompatible schema */, - ); + assert(compatibility.canView, 0x8c3 /* requireSchema invoked with incompatible schema */); } const view = new CheckoutFlexTreeView(checkout, schemaPolicy, nodeKeyManager, onDispose); diff --git a/packages/dds/tree/src/simple-tree/api/create.ts b/packages/dds/tree/src/simple-tree/api/create.ts index d2e962024b3f..100e011c402d 100644 --- a/packages/dds/tree/src/simple-tree/api/create.ts +++ b/packages/dds/tree/src/simple-tree/api/create.ts @@ -32,6 +32,7 @@ import { isFieldInSchema } from "../../feature-libraries/index.js"; import { toStoredSchema } from "../toStoredSchema.js"; import { inSchemaOrThrow, mapTreeFromNodeData } from "../toMapTree.js"; import { getUnhydratedContext } from "../createContext.js"; +import { createUnknownOptionalFieldPolicy } from "../objectNode.js"; /** * Construct tree content that is compatible with the field defined by the provided `schema`. @@ -122,7 +123,10 @@ export function createFromCursor( const flexSchema = context.flexContext.schema; const schemaValidationPolicy: SchemaAndPolicy = { - policy: defaultSchemaPolicy, + policy: { + ...defaultSchemaPolicy, + allowUnknownOptionalFields: createUnknownOptionalFieldPolicy(schema), + }, schema: context.flexContext.schema, }; diff --git a/packages/dds/tree/src/simple-tree/api/index.ts b/packages/dds/tree/src/simple-tree/api/index.ts index 83fda6bf3356..be037d6dce38 100644 --- a/packages/dds/tree/src/simple-tree/api/index.ts +++ b/packages/dds/tree/src/simple-tree/api/index.ts @@ -17,7 +17,11 @@ export { type TreeBranchEvents, asTreeViewAlpha, } from "./tree.js"; -export { SchemaFactory, type ScopedSchemaName } from "./schemaFactory.js"; +export { + SchemaFactory, + type ScopedSchemaName, + type SchemaFactoryObjectOptions, +} from "./schemaFactory.js"; export type { ValidateRecursiveSchema, FixRecursiveArraySchema, diff --git a/packages/dds/tree/src/simple-tree/api/schemaFactory.ts b/packages/dds/tree/src/simple-tree/api/schemaFactory.ts index 8a8efaf86628..ea79af26a0bc 100644 --- a/packages/dds/tree/src/simple-tree/api/schemaFactory.ts +++ b/packages/dds/tree/src/simple-tree/api/schemaFactory.ts @@ -18,6 +18,9 @@ import { getOrCreate, isReadonlyArray, } from "../../util/index.js"; +// This import is required for intellisense in @link doc comments on mouseover in VSCode. +// eslint-disable-next-line unused-imports/no-unused-imports, @typescript-eslint/no-unused-vars +import type { TreeAlpha } from "../../shared-tree/index.js"; import { booleanSchema, @@ -74,6 +77,7 @@ import type { import { createFieldSchemaUnsafe } from "./schemaFactoryRecursive.js"; import { TreeNodeValid } from "../treeNodeValid.js"; import { isLazy } from "../flexList.js"; + /** * Gets the leaf domain schema compatible with a given {@link TreeValue}. */ @@ -97,6 +101,58 @@ export function schemaFromValue(value: TreeValue): TreeNodeSchema { } } +/** + * Options when declaring an {@link SchemaFactory.object|object node}'s schema + */ +export interface SchemaFactoryObjectOptions { + /** + * Opt in to viewing documents which have unknown optional fields for this object's identifier, i.e. optional fields + * in the document schema which are not present in this object schema declaration. + * + * @defaultValue `false` + * @remarks + * The advantage of enabling this option is that it allows an application ecosystem with staged rollout to more quickly + * upgrade documents to include schema for new optional features. + * + * However, it does come with trade-offs that applications should weigh carefully when it comes to interactions between + * code and documents. + * When opening such documents, the API presented is still determined by the view schema. + * This can have implications on the behavior of edits or code which uses portions of the view schema, + * since this may inadvertently drop data which is present in those optional fields in the document schema. + * + * Consider the following example: + * + * ```typescript + * const sf = new SchemaFactory("com.example"); + * class PersonView extends sf.object("Person", { name: sf.string }, { allowUnknownOptionalFields: true }) {} + * class PersonStored extends sf.object("Person", { name: sf.string, nickname: sf.optional(sf.string) }) {} + * + * // Say we have a document which uses `PersonStored` in its schema, and application code constructs + * // a tree view using `PersonView`. If the application for some reason had implemented a function like this: + * function clonePerson(a: PersonView): PersonView { + * return new PersonView({ name: a.name }); + * } + * // ...or even like this: + * function clonePerson(a: PersonView): PersonView { + * return new PersonView({ ...a}) + * } + * // Then the alleged clone wouldn't actually clone the entire person in either case, it would drop the nickname. + * ``` + * + * If an application wants to be particularly careful to preserve all data on a node when editing it, it can use + * {@link TreeAlpha.(importVerbose:2)|import}/{@link TreeAlpha.(exportVerbose:2)|export} APIs with persistent keys. + * + * Note that public API methods which operate on entire nodes (such as `moveTo`, `moveToEnd`, etc. on arrays) do not encounter + * this problem as SharedTree's implementation stores the entire node in its lower layers. It's only when application code + * reaches into a node (either by accessing its fields, spreading it, or some other means) that this problem arises. + */ + allowUnknownOptionalFields?: boolean; +} + +export const defaultSchemaFactoryObjectOptions: Required = { + allowUnknownOptionalFields: false, +}; + /** * The name of a schema produced by {@link SchemaFactory}, including its optional scope prefix. * @@ -333,7 +389,12 @@ export class SchemaFactory< true, T > { - return objectSchema(this.scoped(name), fields, true); + return objectSchema( + this.scoped(name), + fields, + true, + defaultSchemaFactoryObjectOptions.allowUnknownOptionalFields, + ); } /** diff --git a/packages/dds/tree/src/simple-tree/api/storedSchema.ts b/packages/dds/tree/src/simple-tree/api/storedSchema.ts index 7a9fe7353b89..e26802c6a324 100644 --- a/packages/dds/tree/src/simple-tree/api/storedSchema.ts +++ b/packages/dds/tree/src/simple-tree/api/storedSchema.ts @@ -4,7 +4,7 @@ */ import type { ICodecOptions } from "../../codec/index.js"; -import { Compatibility, type TreeStoredSchema } from "../../core/index.js"; +import type { TreeStoredSchema } from "../../core/index.js"; import { defaultSchemaPolicy, encodeTreeSchema, @@ -59,7 +59,7 @@ export function extractPersistedSchema(schema: ImplicitFieldSchema): JsonCompati * opening a document that used the `persisted` schema and provided `view` to {@link ViewableTree.viewWith}. * * @param persisted - Schema persisted for a document. Typically persisted alongside the data and assumed to describe that data. - * @param view - Schema which would be used to view persisted content. Use {@link extractPersistedSchema} to convert the view schema into this format. + * @param view - Schema which would be used to view persisted content. * @param options - {@link ICodecOptions} used when parsing the provided schema. * @param canInitialize - Passed through to the return value unchanged and otherwise unused. * @returns The {@link SchemaCompatibilityStatus} a {@link TreeView} would report for this combination of schema. @@ -75,7 +75,7 @@ export function extractPersistedSchema(schema: ImplicitFieldSchema): JsonCompati * assert( * comparePersistedSchema( * require("./schema.json"), - * extractPersistedSchema(MySchema), + * MySchema, * { jsonValidator: typeboxValidator }, * false, * ).canUpgrade, @@ -85,14 +85,13 @@ export function extractPersistedSchema(schema: ImplicitFieldSchema): JsonCompati */ export function comparePersistedSchema( persisted: JsonCompatible, - view: JsonCompatible, + view: ImplicitFieldSchema, options: ICodecOptions, canInitialize: boolean, ): SchemaCompatibilityStatus { const schemaCodec = makeSchemaCodec(options); const stored = schemaCodec.decode(persisted as Format); - const viewParsed = schemaCodec.decode(view as Format); - const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, viewParsed); + const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, view); return comparePersistedSchemaInternal(stored, viewSchema, canInitialize); } @@ -105,22 +104,8 @@ export function comparePersistedSchemaInternal( viewSchema: ViewSchema, canInitialize: boolean, ): SchemaCompatibilityStatus { - const result = viewSchema.checkCompatibility(stored); - - // TODO: AB#8121: Weaken this check to support viewing under additional circumstances. - // In the near term, this should support viewing documents with additional optional fields in their schema on object types. - // Longer-term (as demand arises), we could also add APIs to constructing view schema to allow for more flexibility - // (e.g. out-of-schema content handlers could allow support for viewing docs which have extra allowed types in a particular field) - const canView = - result.write === Compatibility.Compatible && result.read === Compatibility.Compatible; - const canUpgrade = result.read === Compatibility.Compatible; - const isEquivalent = canView && canUpgrade; - const compatibility: SchemaCompatibilityStatus = { - canView, - canUpgrade, - isEquivalent, + return { + ...viewSchema.checkCompatibility(stored), canInitialize, }; - - return compatibility; } diff --git a/packages/dds/tree/src/simple-tree/api/tree.ts b/packages/dds/tree/src/simple-tree/api/tree.ts index 9dba1ea95b10..529b1312b19e 100644 --- a/packages/dds/tree/src/simple-tree/api/tree.ts +++ b/packages/dds/tree/src/simple-tree/api/tree.ts @@ -12,6 +12,10 @@ import type { RevertibleFactory, } from "../../core/index.js"; +// This is referenced by doc comments. +// eslint-disable-next-line @typescript-eslint/no-unused-vars, unused-imports/no-unused-imports +import type { TreeAlpha } from "../../shared-tree/index.js"; + import { type ImplicitFieldSchema, type InsertableField, @@ -31,6 +35,7 @@ import { markSchemaMostDerived } from "./schemaFactory.js"; import { fail, getOrCreate } from "../../util/index.js"; import type { MakeNominal } from "../../util/index.js"; import { walkFieldSchema } from "../walkFieldSchema.js"; + /** * A tree from which a {@link TreeView} can be created. * diff --git a/packages/dds/tree/src/simple-tree/api/view.ts b/packages/dds/tree/src/simple-tree/api/view.ts index 108066ef76d5..4cde4a003932 100644 --- a/packages/dds/tree/src/simple-tree/api/view.ts +++ b/packages/dds/tree/src/simple-tree/api/view.ts @@ -5,32 +5,56 @@ import { AdaptedViewSchema, + type TreeNodeStoredSchema, type Adapters, - Compatibility, type TreeFieldStoredSchema, type TreeNodeSchemaIdentifier, - type TreeNodeStoredSchema, type TreeStoredSchema, } from "../../core/index.js"; import { fail } from "../../util/index.js"; import { + FieldKinds, type FullSchemaPolicy, - allowsRepoSuperset, + type FieldDiscrepancy, + getAllowedContentDiscrepancies, isNeverTree, + PosetComparisonResult, + fieldRealizer, + comparePosetElements, } from "../../feature-libraries/index.js"; +import { + normalizeFieldSchema, + type FieldSchema, + type ImplicitFieldSchema, +} from "../schemaTypes.js"; +import { toStoredSchema } from "../toStoredSchema.js"; +import { unreachableCase } from "@fluidframework/core-utils/internal"; +import type { SchemaCompatibilityStatus } from "./tree.js"; /** * A collection of View information for schema, including policy. */ export class ViewSchema { /** - * @param schema - Cached conversion of view schema in the stored schema format. + * Cached conversion of the view schema in the stored schema format. + */ + private readonly viewSchemaAsStored: TreeStoredSchema; + /** + * Normalized view schema (implicitly allowed view schema types are converted to their canonical form). + */ + public readonly schema: FieldSchema; + + /** + * @param viewSchema - Schema for the root field of this view. */ public constructor( public readonly policy: FullSchemaPolicy, public readonly adapters: Adapters, - public readonly schema: TreeStoredSchema, - ) {} + viewSchema: ImplicitFieldSchema, + ) { + this.schema = normalizeFieldSchema(viewSchema); + this.viewSchemaAsStored = toStoredSchema(this.schema); + } /** * Determines the compatibility of a stored document @@ -42,53 +66,168 @@ export class ViewSchema { * TODO: this API violates the parse don't validate design philosophy. * It should be wrapped with (or replaced by) a parse style API. */ - public checkCompatibility(stored: TreeStoredSchema): { - read: Compatibility; - write: Compatibility; - writeAllowingStoredSchemaUpdates: Compatibility; - } { + public checkCompatibility( + stored: TreeStoredSchema, + ): Omit { // TODO: support adapters // const adapted = this.adaptRepo(stored); - const read = allowsRepoSuperset(this.policy, stored, this.schema) - ? Compatibility.Compatible - : // TODO: support adapters - // : allowsRepoSuperset(this.policy, adapted.adaptedForViewSchema, this.storedSchema) - // ? Compatibility.RequiresAdapters - Compatibility.Incompatible; - // TODO: Extract subset of adapters that are valid to use on stored - // TODO: separate adapters from schema updates - const write = allowsRepoSuperset(this.policy, this.schema, stored) - ? Compatibility.Compatible - : // TODO: support adapters - // : allowsRepoSuperset(this.policy, this.storedSchema, adapted.adaptedForViewSchema) - // TODO: IThis assumes adapters are bidirectional. - // Compatibility.RequiresAdapters - Compatibility.Incompatible; - - // TODO: compute this properly (and maybe include the set of schema changes needed for it?). - // Maybe updates would happen lazily when needed to store data? - // When willingness to updates can avoid need for some adapters, - // how should it be decided if the adapter should be used to avoid the update? - // TODO: is this case actually bi-variant, making this correct if we did it for each schema independently? - let writeAllowingStoredSchemaUpdates = - // TODO: This should consider just the updates needed - // (ex: when view covers a subset of stored after stored has a update to that subset). - allowsRepoSuperset(this.policy, stored, this.schema) - ? Compatibility.Compatible - : // TODO: this assumes adapters can translate in both directions. In general this will not be true. - // TODO: this also assumes that schema updates to the adapted repo would translate to - // updates on the stored schema, which is also likely untrue. - // // TODO: support adapters - // allowsRepoSuperset(this.policy, adapted.adaptedForViewSchema, this.storedSchema) - // ? Compatibility.RequiresAdapters // Requires schema updates. TODO: consider adapters that can update writes. - Compatibility.Incompatible; - - // Since the above does not consider partial updates, - // we can improve the tolerance a bit by considering the op-op update: - writeAllowingStoredSchemaUpdates = Math.max(writeAllowingStoredSchemaUpdates, write); - - return { read, write, writeAllowingStoredSchemaUpdates }; + // View schema allows a subset of documents that stored schema does, and the discrepancies are allowed by policy + // determined by the view schema (i.e. objects with extra optional fields in the stored schema have opted into allowing this. + // In the future, this would also include things like: + // - fields with more allowed types in the stored schema than in the view schema have out-of-schema "unknown content" adapters + let canView = true; + // View schema allows a superset of documents that stored schema does, hence the document could be upgraded to use a persisted version + // of this view schema as its stored schema. + let canUpgrade = true; + + const updateCompatibilityFromFieldDiscrepancy = (discrepancy: FieldDiscrepancy): void => { + switch (discrepancy.mismatch) { + case "allowedTypes": { + // Since we only track the symmetric difference between the allowed types in the view and + // stored schemas, it's sufficient to check if any extra allowed types still exist in the + // stored schema. + if ( + discrepancy.stored.some( + (identifier) => + !isNeverTree(this.policy, stored, stored.nodeSchema.get(identifier)), + ) + ) { + // Stored schema has extra allowed types that the view schema does not. + canUpgrade = false; + canView = false; + } + + if ( + discrepancy.view.some( + (identifier) => + !isNeverTree( + this.policy, + this.viewSchemaAsStored, + this.viewSchemaAsStored.nodeSchema.get(identifier), + ), + ) + ) { + // View schema has extra allowed types that the stored schema does not. + canView = false; + } + break; + } + case "fieldKind": { + const result = comparePosetElements( + discrepancy.stored, + discrepancy.view, + fieldRealizer, + ); + + if (result === PosetComparisonResult.Greater) { + // Stored schema is more relaxed than view schema. + canUpgrade = false; + if ( + discrepancy.view === FieldKinds.forbidden.identifier && + discrepancy.identifier !== undefined && + this.policy.allowUnknownOptionalFields(discrepancy.identifier) + ) { + // When the application has opted into it, we allow viewing documents which have additional + // optional fields in the stored schema that are not present in the view schema. + } else { + canView = false; + } + } + + if (result === PosetComparisonResult.Less) { + // View schema is more relaxed than stored schema. + canView = false; + } + + if (result === PosetComparisonResult.Incomparable) { + canUpgrade = false; + canView = false; + } + + break; + } + case "valueSchema": { + canView = false; + canUpgrade = false; + break; + } + default: + unreachableCase(discrepancy); + } + }; + + for (const discrepancy of getAllowedContentDiscrepancies( + this.viewSchemaAsStored, + stored, + )) { + if (!canView && !canUpgrade) { + break; + } + + switch (discrepancy.mismatch) { + case "nodeKind": { + const viewNodeSchema = this.viewSchemaAsStored.nodeSchema.get( + discrepancy.identifier, + ); + const storedNodeSchema = stored.nodeSchema.get(discrepancy.identifier); + // We conservatively do not allow node types to change. + // The only time this might be valid in the sense that the data canonically converts is converting an object node + // to a map node over the union of all the object fields' types. + if (discrepancy.stored === undefined) { + const viewIsNever = + viewNodeSchema !== undefined + ? isNeverTree(this.policy, this.viewSchemaAsStored, viewNodeSchema) + : true; + if (!viewIsNever) { + // View schema has added a node type that the stored schema doesn't know about. + canView = false; + } + } else if (discrepancy.view === undefined) { + const storedIsNever = + storedNodeSchema !== undefined + ? isNeverTree(this.policy, stored, storedNodeSchema) + : true; + if (!storedIsNever) { + // Stored schema has a node type that the view schema doesn't know about. + canUpgrade = false; + } + } else { + // Node type exists in both schemas but has changed. We conservatively never allow this. + const storedIsNever = + storedNodeSchema !== undefined + ? isNeverTree(this.policy, stored, storedNodeSchema) + : true; + const viewIsNever = + viewNodeSchema !== undefined + ? isNeverTree(this.policy, this.viewSchemaAsStored, viewNodeSchema) + : true; + if (!storedIsNever || !viewIsNever) { + canView = false; + canUpgrade = false; + } + } + break; + } + case "valueSchema": + case "allowedTypes": + case "fieldKind": { + updateCompatibilityFromFieldDiscrepancy(discrepancy); + break; + } + case "fields": { + discrepancy.differences.forEach(updateCompatibilityFromFieldDiscrepancy); + break; + } + // No default + } + } + + return { + canView, + canUpgrade, + isEquivalent: canView && canUpgrade, + }; } /** @@ -102,8 +241,15 @@ export class ViewSchema { // since there never is a reason to have a never type as an adapter input, // and its impossible for an adapter to be correctly implemented if its output type is never // (unless its input is also never). + for (const adapter of this.adapters?.tree ?? []) { - if (isNeverTree(this.policy, this.schema, this.schema.nodeSchema.get(adapter.output))) { + if ( + isNeverTree( + this.policy, + this.viewSchemaAsStored, + this.viewSchemaAsStored.nodeSchema.get(adapter.output), + ) + ) { fail("tree adapter for stored adapter.output should not be never"); } } diff --git a/packages/dds/tree/src/simple-tree/index.ts b/packages/dds/tree/src/simple-tree/index.ts index 708def9ac6bd..fe762268e014 100644 --- a/packages/dds/tree/src/simple-tree/index.ts +++ b/packages/dds/tree/src/simple-tree/index.ts @@ -35,6 +35,7 @@ export { type SchemaCompatibilityStatus, type ITreeConfigurationOptions, SchemaFactory, + type SchemaFactoryObjectOptions, type ScopedSchemaName, type ValidateRecursiveSchema, type FixRecursiveArraySchema, @@ -170,6 +171,7 @@ export { type ObjectFromSchemaRecord, type TreeObjectNode, setField, + createUnknownOptionalFieldPolicy, } from "./objectNode.js"; export type { TreeMapNode, MapNodeInsertableData } from "./mapNode.js"; export { diff --git a/packages/dds/tree/src/simple-tree/objectNode.ts b/packages/dds/tree/src/simple-tree/objectNode.ts index 5d692dc6d13a..db62bb46eecb 100644 --- a/packages/dds/tree/src/simple-tree/objectNode.ts +++ b/packages/dds/tree/src/simple-tree/objectNode.ts @@ -6,7 +6,7 @@ import { assert, Lazy } from "@fluidframework/core-utils/internal"; import { UsageError } from "@fluidframework/telemetry-utils/internal"; -import type { FieldKey } from "../core/index.js"; +import type { FieldKey, SchemaPolicy } from "../core/index.js"; import { FieldKinds, type FlexTreeField, @@ -42,7 +42,11 @@ import { } from "./core/index.js"; import { mapTreeFromNodeData, type InsertableContent } from "./toMapTree.js"; import { type RestrictiveStringRecord, fail, type FlattenKeys } from "../util/index.js"; -import type { ObjectNodeSchema, ObjectNodeSchemaInternalData } from "./objectNodeTypes.js"; +import { + isObjectNodeSchema, + type ObjectNodeSchema, + type ObjectNodeSchemaInternalData, +} from "./objectNodeTypes.js"; import { TreeNodeValid, type MostDerivedData } from "./treeNodeValid.js"; import { getUnhydratedContext } from "./createContext.js"; @@ -330,6 +334,7 @@ export function objectSchema< identifier: TName, info: T, implicitlyConstructable: ImplicitlyConstructable, + allowUnknownOptionalFields: boolean, ): ObjectNodeSchema & ObjectNodeSchemaInternalData { // Ensure no collisions between final set of property keys, and final set of stored keys (including those // implicitly derived from property keys) @@ -368,6 +373,7 @@ export function objectSchema< ]), ); public static readonly identifierFieldKeys: readonly FieldKey[] = identifierFieldKeys; + public static readonly allowUnknownOptionalFields: boolean = allowUnknownOptionalFields; public static override prepareInstance( this: typeof TreeNodeValid, @@ -509,3 +515,21 @@ function assertUniqueKeys< derivedStoredKeys.add(storedKey); } } + +/** + * Creates a policy for allowing unknown optional fields on an object node which delegates to the policy defined + * on the object node's internal schema data. + */ +export function createUnknownOptionalFieldPolicy( + schema: ImplicitFieldSchema, +): SchemaPolicy["allowUnknownOptionalFields"] { + const context = getUnhydratedContext(schema); + return (identifier) => { + const storedSchema = context.schema.get(identifier); + return ( + storedSchema !== undefined && + isObjectNodeSchema(storedSchema) && + storedSchema.allowUnknownOptionalFields + ); + }; +} diff --git a/packages/dds/tree/src/simple-tree/objectNodeTypes.ts b/packages/dds/tree/src/simple-tree/objectNodeTypes.ts index 58743b6f1bf7..acf1ab5bb510 100644 --- a/packages/dds/tree/src/simple-tree/objectNodeTypes.ts +++ b/packages/dds/tree/src/simple-tree/objectNodeTypes.ts @@ -55,6 +55,11 @@ export interface ObjectNodeSchemaInternalData { * Stored keys which hold identifiers. */ readonly identifierFieldKeys: readonly FieldKey[]; + + /** + * Whether to tolerate (and preserve) additional unknown optional fields in instances of this object node. + */ + readonly allowUnknownOptionalFields: boolean; } export const ObjectNodeSchema = { diff --git a/packages/dds/tree/src/simple-tree/toStoredSchema.ts b/packages/dds/tree/src/simple-tree/toStoredSchema.ts index a657c96ec712..eaff11f9beb3 100644 --- a/packages/dds/tree/src/simple-tree/toStoredSchema.ts +++ b/packages/dds/tree/src/simple-tree/toStoredSchema.ts @@ -20,7 +20,7 @@ import { type TreeTypeSet, } from "../core/index.js"; import { FieldKinds, type FlexFieldKind } from "../feature-libraries/index.js"; -import { brand, fail, isReadonlyArray } from "../util/index.js"; +import { brand, fail, getOrCreate, isReadonlyArray } from "../util/index.js"; import { NodeKind, type TreeNodeSchema } from "./core/index.js"; import { FieldKind, @@ -33,29 +33,35 @@ import { LeafNodeSchema } from "./leafNodeSchema.js"; import { isObjectNodeSchema } from "./objectNodeTypes.js"; import { normalizeFlexListEager } from "./flexList.js"; +const viewToStoredCache = new WeakMap(); + /** * Converts a {@link ImplicitFieldSchema} into a {@link TreeStoredSchema}. */ export function toStoredSchema(root: ImplicitFieldSchema): TreeStoredSchema { - const nodeSchema: Map = new Map(); - walkFieldSchema(root, { - node(schema) { - if (nodeSchema.has(brand(schema.identifier))) { - // Use JSON.stringify to quote and escape identifier string. - throw new UsageError( - `Multiple schema encountered with the identifier ${JSON.stringify( - schema.identifier, - )}. Remove or rename them to avoid the collision.`, - ); - } - nodeSchema.set(brand(schema.identifier), getStoredSchema(schema)); - }, - }); + return getOrCreate(viewToStoredCache, root, () => { + const nodeSchema: Map = new Map(); + walkFieldSchema(root, { + node(schema) { + if (nodeSchema.has(brand(schema.identifier))) { + // Use JSON.stringify to quote and escape identifier string. + throw new UsageError( + `Multiple schema encountered with the identifier ${JSON.stringify( + schema.identifier, + )}. Remove or rename them to avoid the collision.`, + ); + } + nodeSchema.set(brand(schema.identifier), getStoredSchema(schema)); + }, + }); - return { - nodeSchema, - rootFieldSchema: convertField(root), - }; + const result: TreeStoredSchema = { + nodeSchema, + rootFieldSchema: convertField(root), + }; + viewToStoredCache.set(root, result); + return result; + }); } /** diff --git a/packages/dds/tree/src/test/feature-libraries/default-schema/schemaChecker.spec.ts b/packages/dds/tree/src/test/feature-libraries/default-schema/schemaChecker.spec.ts index 92d5018e2cba..09955f2cda64 100644 --- a/packages/dds/tree/src/test/feature-libraries/default-schema/schemaChecker.spec.ts +++ b/packages/dds/tree/src/test/feature-libraries/default-schema/schemaChecker.spec.ts @@ -55,6 +55,8 @@ function createSchemaAndPolicy( // Note: the value of 'validateSchema' doesn't matter for the tests in this file because they're testing a // layer where we already decided that we are doing validation and are validating that it works correctly. validateSchema: true, + // TODO: unit test other options in this file + allowUnknownOptionalFields: () => false, }, }; } diff --git a/packages/dds/tree/src/test/feature-libraries/modular-schema/discrepancies.spec.ts b/packages/dds/tree/src/test/feature-libraries/modular-schema/discrepancies.spec.ts index 2c25effee617..588c8aee8107 100644 --- a/packages/dds/tree/src/test/feature-libraries/modular-schema/discrepancies.spec.ts +++ b/packages/dds/tree/src/test/feature-libraries/modular-schema/discrepancies.spec.ts @@ -216,18 +216,21 @@ describe("Schema Discrepancies", () => { [ { identifier: undefined, + fieldKey: undefined, mismatch: "fieldKind", view: "Optional", stored: "Value", }, { identifier: testTreeNodeIdentifier, + fieldKey: undefined, mismatch: "allowedTypes", view: [], stored: ["string"], }, { identifier: testTreeNodeIdentifier, + fieldKey: undefined, mismatch: "fieldKind", view: "Value", stored: "Optional", @@ -240,6 +243,7 @@ describe("Schema Discrepancies", () => { [ { identifier: testTreeNodeIdentifier, + fieldKey: undefined, mismatch: "allowedTypes", view: ["number"], stored: ["array"], @@ -349,19 +353,22 @@ describe("Schema Discrepancies", () => { mismatch: "fields", differences: [ { - identifier: "x", + fieldKey: "x", + identifier: testTreeNodeIdentifier, mismatch: "allowedTypes", view: ["number"], stored: ["string"], }, { - identifier: "x", + fieldKey: "x", + identifier: testTreeNodeIdentifier, mismatch: "fieldKind", view: "Value", stored: "Optional", }, { - identifier: "y", + fieldKey: "y", + identifier: testTreeNodeIdentifier, mismatch: "fieldKind", view: "Forbidden", stored: "Optional", @@ -443,6 +450,7 @@ describe("Schema Discrepancies", () => { assert.deepEqual(Array.from(getAllowedContentDiscrepancies(neverTree, mapNodeSchema)), [ { identifier: testTreeNodeIdentifier, + fieldKey: undefined, mismatch: "allowedTypes", view: [], stored: ["number"], @@ -457,7 +465,8 @@ describe("Schema Discrepancies", () => { mismatch: "fields", differences: [ { - identifier: "x", + identifier: testTreeNodeIdentifier, + fieldKey: "x", mismatch: "allowedTypes", view: [], stored: ["number"], @@ -498,7 +507,8 @@ describe("Schema Discrepancies", () => { mismatch: "fields", differences: [ { - identifier: "x", + identifier: testTreeNodeIdentifier, + fieldKey: "x", mismatch: "fieldKind", view: "Forbidden", stored: "Value", @@ -516,13 +526,15 @@ describe("Schema Discrepancies", () => { mismatch: "fields", differences: [ { - identifier: "x", + identifier: testTreeNodeIdentifier, + fieldKey: "x", mismatch: "allowedTypes", view: [], stored: ["number"], }, { - identifier: "x", + identifier: testTreeNodeIdentifier, + fieldKey: "x", mismatch: "fieldKind", view: "Forbidden", stored: "Value", diff --git a/packages/dds/tree/src/test/feature-libraries/modular-schema/schemaEvolutionExamples.spec.ts b/packages/dds/tree/src/test/feature-libraries/modular-schema/schemaEvolutionExamples.spec.ts index 7ee6f2c484b0..6bf1451d36d1 100644 --- a/packages/dds/tree/src/test/feature-libraries/modular-schema/schemaEvolutionExamples.spec.ts +++ b/packages/dds/tree/src/test/feature-libraries/modular-schema/schemaEvolutionExamples.spec.ts @@ -7,7 +7,6 @@ import { strict as assert } from "node:assert"; import { type Adapters, - Compatibility, EmptyKey, type TreeFieldStoredSchema, type TreeNodeSchemaIdentifier, @@ -116,14 +115,10 @@ describe("Schema Evolution Examples", () => { * (since adapters are not implemented yet, and they are the nice way to handle that). */ it("basic usage", () => { - // Collect our view schema. - // This will represent our view schema for a simple canvas application. - const viewCollection = toStoredSchema(root); - // This is where legacy schema handling logic for schematize. const adapters: Adapters = {}; // Compose all the view information together. - const view = new ViewSchema(defaultSchemaPolicy, adapters, viewCollection); + const view = new ViewSchema(defaultSchemaPolicy, adapters, root); // Now lets imagine using this application on a new empty document. // TreeStoredSchemaRepository defaults to a state that permits no document states at all. @@ -138,18 +133,8 @@ describe("Schema Evolution Examples", () => { // Sadly for our application, we did not allow empty roots in our view schema, // nor did we provide an adapter capable of handling empty roots. // This means our application is unable to view this document. - assertEnumEqual(Compatibility, compat.read, Compatibility.Incompatible); - - // And since the view schema currently excludes empty roots, its also incompatible for writing: - assertEnumEqual(Compatibility, compat.write, Compatibility.Incompatible); - - // Additionally even updating the document schema can't save this app, - // since the new schema would be incompatible with the existing document content. - assertEnumEqual( - Compatibility, - compat.writeAllowingStoredSchemaUpdates, - Compatibility.Incompatible, - ); + // And since the view schema currently excludes empty roots, its also incompatible for upgrading: + assert.deepEqual(compat, { canView: false, canUpgrade: false, isEquivalent: false }); // This is where the app would inform the user that the document // is not compatible with their version of the application. @@ -165,25 +150,16 @@ describe("Schema Evolution Examples", () => { // This example picks the first approach. // Lets simulate the developers of the app making this change by modifying the view schema: - const viewCollection2 = toStoredSchema(tolerantRoot); - const view2 = new ViewSchema(defaultSchemaPolicy, adapters, viewCollection2); + const view2 = new ViewSchema(defaultSchemaPolicy, adapters, tolerantRoot); // When we open this document, we should check it's compatibility with our application: const compat = view2.checkCompatibility(stored); // The adjusted view schema can be used read this document, no adapters needed. - assertEnumEqual(Compatibility, compat.read, Compatibility.Compatible); + assert.equal(compat.canUpgrade, true); // However the document just has its empty root schema, // so the app could make changes that could not be written back. - assertEnumEqual(Compatibility, compat.write, Compatibility.Incompatible); - - // However, it is possible to update the schema in the document to match our schema - // (since the existing data in compatible with our schema) - assertEnumEqual( - Compatibility, - compat.writeAllowingStoredSchemaUpdates, - Compatibility.Compatible, - ); + assert.equal(compat.canView, false); // The app can consider this compatible and proceed if it is ok with updating schema on write. // There are a few approaches apps might want to take here, but we will assume one that seems reasonable: @@ -212,11 +188,12 @@ describe("Schema Evolution Examples", () => { // which will notify and applications with the document open. // They can recheck their compatibility: const compatNew = view2.checkCompatibility(stored); - const report = Array.from(getAllowedContentDiscrepancies(viewCollection2, stored)); + const report = Array.from( + getAllowedContentDiscrepancies(toStoredSchema(tolerantRoot), stored), + ); assert.deepEqual(report, []); - assertEnumEqual(Compatibility, compatNew.read, Compatibility.Compatible); // It is now possible to write our date into the document. - assertEnumEqual(Compatibility, compatNew.write, Compatibility.Compatible); + assert.deepEqual(compatNew, { canView: true, canUpgrade: true, isEquivalent: true }); // Now lets imagine some time passes, and the developers want to add a second content type: @@ -231,18 +208,11 @@ describe("Schema Evolution Examples", () => { // And canvas is still the same storage wise, but its view schema references the updated positionedCanvasItem2: const canvas2 = builder.array("Canvas", positionedCanvasItem2); // Once again we will simulate reloading the app with different schema by modifying the view schema. - const viewCollection3 = toStoredSchema(builder.optional(canvas2)); - const view3 = new ViewSchema(defaultSchemaPolicy, adapters, viewCollection3); + const view3 = new ViewSchema(defaultSchemaPolicy, adapters, builder.optional(canvas2)); // With this new schema, we can load the document just like before: const compat2 = view3.checkCompatibility(stored); - assertEnumEqual(Compatibility, compat2.read, Compatibility.Compatible); - assertEnumEqual(Compatibility, compat2.write, Compatibility.Incompatible); - assertEnumEqual( - Compatibility, - compat2.writeAllowingStoredSchemaUpdates, - Compatibility.Compatible, - ); + assert.deepEqual(compat2, { canView: false, canUpgrade: true, isEquivalent: false }); // This is the same case as above where we can choose to do a schema update if we want: assert(stored.tryUpdateTreeSchema(positionedCanvasItem2)); @@ -250,12 +220,11 @@ describe("Schema Evolution Examples", () => { // And recheck compat: const compat3 = view3.checkCompatibility(stored); - assertEnumEqual(Compatibility, compat3.read, Compatibility.Compatible); - assertEnumEqual(Compatibility, compat3.write, Compatibility.Compatible); + assert.deepEqual(compat3, { canView: true, canUpgrade: true, isEquivalent: true }); } }); - // TODO: support adapters + // TODO: support adapters. // function makeTolerantRootAdapter(view: TreeStoredSchema): FieldAdapter { // return { diff --git a/packages/dds/tree/src/test/schemaFactoryAlpha.ts b/packages/dds/tree/src/test/schemaFactoryAlpha.ts new file mode 100644 index 000000000000..14b3a8ce95ca --- /dev/null +++ b/packages/dds/tree/src/test/schemaFactoryAlpha.ts @@ -0,0 +1,90 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import type { ScopedSchemaName, InsertableObjectFromSchemaRecord } from "../internalTypes.js"; +import { + SchemaFactory, + type SchemaFactoryObjectOptions, + type TreeNodeSchemaClass, + type NodeKind, + type Unenforced, + type ImplicitFieldSchema, +} from "../simple-tree/index.js"; +// eslint-disable-next-line import/no-internal-modules +import { defaultSchemaFactoryObjectOptions } from "../simple-tree/api/schemaFactory.js"; +// eslint-disable-next-line import/no-internal-modules +import { type TreeObjectNode, objectSchema } from "../simple-tree/objectNode.js"; +import type { RestrictiveStringRecord } from "../util/index.js"; + +/** + * Copy of {@link SchemaFactory} with additional alpha APIs. + * + * @privateRemarks + * Not currently exported to the public API surface as doing so produces errors in API-extractor. + * + * Can be removed once additional object node features are deemed stable and on the base class. + */ +export class SchemaFactoryAlpha< + out TScope extends string | undefined = string | undefined, + TName extends number | string = string, +> extends SchemaFactory { + // TS has trouble with subclassing schema factory and produces errors in the definition of objectRecursive without + // explicit type annotations saying the return type is "the same as the parent class". There's not a great way to do + // that AFAICT without using an instantiation expression, but `super` is unsupported in such expressions. + private readonly baseKludge: SchemaFactory = this; + private scoped2(name: Name): ScopedSchemaName { + return ( + this.scope === undefined ? `${name}` : `${this.scope}.${name}` + ) as ScopedSchemaName; + } + + /** + * Define a {@link TreeNodeSchemaClass} for a {@link TreeObjectNode}. + * + * @param name - Unique identifier for this schema within this factory's scope. + * @param fields - Schema for fields of the object node's schema. Defines what children can be placed under each key. + */ + public override object< + const Name extends TName, + const T extends RestrictiveStringRecord, + >( + name: Name, + fields: T, + options?: SchemaFactoryObjectOptions, + ): TreeNodeSchemaClass< + ScopedSchemaName, + NodeKind.Object, + TreeObjectNode>, + object & InsertableObjectFromSchemaRecord, + true, + T + > { + return objectSchema( + this.scoped2(name), + fields, + true, + options?.allowUnknownOptionalFields ?? + defaultSchemaFactoryObjectOptions.allowUnknownOptionalFields, + ); + } + + /** + * {@inheritdoc} + */ + public override objectRecursive< + const Name extends TName, + const T extends Unenforced>, + >( + name: Name, + t: T, + options?: SchemaFactoryObjectOptions, + ): ReturnType> { + return this.object( + name, + t as T & RestrictiveStringRecord, + options, + ) as unknown as ReturnType>; + } +} diff --git a/packages/dds/tree/src/test/shared-tree/schematizeTree.spec.ts b/packages/dds/tree/src/test/shared-tree/schematizeTree.spec.ts index b5d415101257..cd97df01a0a1 100644 --- a/packages/dds/tree/src/test/shared-tree/schematizeTree.spec.ts +++ b/packages/dds/tree/src/test/shared-tree/schematizeTree.spec.ts @@ -15,7 +15,7 @@ import { TreeStoredSchemaRepository, type AnchorSetRootEvents, } from "../../core/index.js"; -import { singleJsonCursor } from "../json/index.js"; +import { JsonUnion, singleJsonCursor } from "../json/index.js"; import { FieldKinds, allowsRepoSuperset, @@ -47,18 +47,17 @@ import { } from "../../simple-tree/index.js"; // eslint-disable-next-line import/no-internal-modules import { toStoredSchema } from "../../simple-tree/toStoredSchema.js"; -import { jsonSequenceRootSchema } from "../sequenceRootUtils.js"; import type { Transactor } from "../../shared-tree-core/index.js"; const builder = new SchemaFactory("test"); const root = builder.number; -const schema = toStoredSchema(root); +const schema = root; -const schemaGeneralized = toStoredSchema(builder.optional([root, builder.string])); -const schemaValueRoot = toStoredSchema([root, builder.string]); +const schemaGeneralized = builder.optional([root, builder.string]); +const schemaValueRoot = [root, builder.string]; // Schema for tree that must always be empty. -const emptySchema = toStoredSchema(builder.optional([])); +const emptySchema = builder.optional([]); function expectSchema(actual: TreeStoredSchema, expected: TreeStoredSchema): void { // Check schema match @@ -139,15 +138,24 @@ describe("schematizeTree", () => { }); } - testInitialize("optional-empty", { schema, initialTree: undefined }); - testInitialize("optional-full", { schema, initialTree: singleJsonCursor(5) }); - testInitialize("value", { schema: schemaValueRoot, initialTree: singleJsonCursor(6) }); + testInitialize("optional-empty", { + schema: toStoredSchema(schema), + initialTree: undefined, + }); + testInitialize("optional-full", { + schema: toStoredSchema(schema), + initialTree: singleJsonCursor(5), + }); + testInitialize("value", { + schema: toStoredSchema(schemaValueRoot), + initialTree: singleJsonCursor(6), + }); // TODO: Test schema validation of initial tree (once we have a utility for it) }); - function mockCheckout(InputSchema: TreeStoredSchema, isEmpty: boolean): ITreeCheckout { - const storedSchema = new TreeStoredSchemaRepository(InputSchema); + function mockCheckout(InputSchema: ImplicitFieldSchema, isEmpty: boolean): ITreeCheckout { + const storedSchema = new TreeStoredSchemaRepository(toStoredSchema(InputSchema)); const checkout: ITreeCheckout = { storedSchema, // eslint-disable-next-line @typescript-eslint/consistent-type-assertions @@ -185,13 +193,13 @@ describe("schematizeTree", () => { describe("evaluateUpdate", () => { describe("test cases", () => { - const testCases: [string, TreeStoredSchema, boolean][] = [ + const testCases: [string, ImplicitFieldSchema, boolean][] = [ ["empty", emptySchema, true], ["basic-optional-empty", schema, true], ["basic-optional", schema, false], ["basic-value", schemaValueRoot, false], - ["complex-empty", jsonSequenceRootSchema, true], - ["complex", jsonSequenceRootSchema, false], + ["complex-empty", JsonUnion, true], + ["complex", builder.arrayRecursive("root", JsonUnion), false], ]; for (const [name, data, isEmpty] of testCases) { it(name, () => { @@ -248,7 +256,7 @@ describe("schematizeTree", () => { describe("ensureSchema", () => { it("compatible empty schema", () => { const checkout = checkoutWithContent({ - schema: emptySchema, + schema: toStoredSchema(emptySchema), initialTree: undefined, }); const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, emptySchema); @@ -256,22 +264,22 @@ describe("schematizeTree", () => { }); it("initialize optional root", () => { - const emptyContent = { - schema: emptySchema, + const emptyContent: TreeStoredContent = { + schema: toStoredSchema(emptySchema), initialTree: undefined, }; const emptyCheckout = checkoutWithContent(emptyContent); const content: TreeStoredContent = { - schema: schemaGeneralized, + schema: toStoredSchema(schemaGeneralized), initialTree: singleJsonCursor(5), }; const initializedCheckout = checkoutWithContent(content); // Schema upgraded, but content not initialized const upgradedCheckout = checkoutWithContent({ - schema: schemaGeneralized, + schema: toStoredSchema(schemaGeneralized), initialTree: undefined, }); - const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, content.schema); + const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, schemaGeneralized); // Non updating cases { @@ -338,18 +346,18 @@ describe("schematizeTree", () => { }); it("initialize required root", () => { - const emptyContent = { - schema: emptySchema, + const emptyContent: TreeStoredContent = { + schema: toStoredSchema(emptySchema), initialTree: undefined, }; const emptyCheckout = checkoutWithContent(emptyContent); const content: TreeStoredContent = { - schema: schemaValueRoot, + schema: toStoredSchema(schemaValueRoot), initialTree: singleJsonCursor(5), }; const initializedCheckout = checkoutWithContent(content); - const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, content.schema); + const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, schemaValueRoot); // Non updating cases { @@ -398,23 +406,23 @@ describe("schematizeTree", () => { }); it("update non-empty", () => { - const initialContent = { - schema, + const initialContent: TreeStoredContent = { + schema: toStoredSchema(schema), get initialTree() { return singleJsonCursor(5); }, }; const initialCheckout = checkoutWithContent(initialContent); const content: TreeStoredContent = { - schema: schemaGeneralized, + schema: toStoredSchema(schemaGeneralized), initialTree: singleJsonCursor("Should not be used"), }; const updatedCheckout = checkoutWithContent({ - schema: schemaGeneralized, + schema: toStoredSchema(schemaGeneralized), initialTree: initialContent.initialTree, }); - const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, content.schema); + const viewSchema = new ViewSchema(defaultSchemaPolicy, {}, schemaGeneralized); // Non updating case { diff --git a/packages/dds/tree/src/test/shared-tree/schematizingTreeView.spec.ts b/packages/dds/tree/src/test/shared-tree/schematizingTreeView.spec.ts index 7858c7f5e5c1..64e52c64e444 100644 --- a/packages/dds/tree/src/test/shared-tree/schematizingTreeView.spec.ts +++ b/packages/dds/tree/src/test/shared-tree/schematizingTreeView.spec.ts @@ -29,6 +29,7 @@ import { } from "../utils.js"; import { insert } from "../sequenceRootUtils.js"; import type { TreeCheckout, TreeStoredContent } from "../../shared-tree/index.js"; +import { SchemaFactoryAlpha } from "../schemaFactoryAlpha.js"; const schema = new SchemaFactory("com.example"); const config = new TreeViewConfiguration({ schema: schema.number }); @@ -144,8 +145,6 @@ describe("SchematizingSimpleTreeView", () => { assert.deepEqual(log, [["rootChanged", 6]]); }); - // TODO: AB#8121: When adding support for additional optional fields, we may want a variant of this test which does the analogous flow using - // an intermediate state where canView is true but canUpgrade is false. it("Schema becomes un-upgradeable then exact match again", () => { const checkout = checkoutWithInitialTree(config, 5); const view = new SchematizingSimpleTreeView(checkout, config, new MockNodeKeyManager()); @@ -180,6 +179,140 @@ describe("SchematizingSimpleTreeView", () => { view.dispose(); }); + it("Open document whose stored schema has additional optional fields", () => { + // This sort of scenario might be reasonably encountered when an "older" version of an application opens + // up a document that has been created and/or edited by a "newer" version of an application (which has + // expanded the schema to include more information). + const factory = new SchemaFactoryAlpha(undefined); + class PersonGeneralized extends factory.object("Person", { + name: factory.string, + age: factory.number, + address: factory.optional(factory.string), + }) {} + class PersonSpecific extends factory.object( + "Person", + { + name: factory.string, + age: factory.number, + }, + { allowUnknownOptionalFields: true }, + ) {} + + const personConfig = new TreeViewConfiguration({ + schema: PersonSpecific, + }); + const personConfigGeneralied = new TreeViewConfiguration({ + schema: PersonGeneralized, + }); + const checkout = checkoutWithInitialTree( + personConfigGeneralied, + new PersonGeneralized({ name: "Alice", age: 42, address: "123 Main St" }), + ); + const viewSpecific = new SchematizingSimpleTreeView( + checkout, + personConfig, + new MockNodeKeyManager(), + ); + + assert.deepEqual(viewSpecific.compatibility, { + canView: true, + canUpgrade: false, + isEquivalent: false, + canInitialize: false, + }); + + assert.equal(Object.keys(viewSpecific.root).length, 2); + assert.equal(Object.entries(viewSpecific.root).length, 2); + assert.equal(viewSpecific.root.name, "Alice"); + assert.equal(viewSpecific.root.age, 42); + + viewSpecific.dispose(); + const viewGeneralized = new SchematizingSimpleTreeView( + checkout, + personConfigGeneralied, + new MockNodeKeyManager(), + ); + assert.deepEqual(viewGeneralized.compatibility, { + canView: true, + canUpgrade: true, + isEquivalent: true, + canInitialize: false, + }); + assert.equal(Object.keys(viewGeneralized.root).length, 3); + assert.equal(Object.entries(viewGeneralized.root).length, 3); + assert.equal(viewGeneralized.root.name, "Alice"); + assert.equal(viewGeneralized.root.age, 42); + assert.equal(viewGeneralized.root.address, "123 Main St"); + }); + + it("Calling moveToEnd on a more specific schema preserves a node's optional fields that were unknown to that schema", () => { + const factorySpecific = new SchemaFactoryAlpha(undefined); + const factoryGeneral = new SchemaFactoryAlpha(undefined); + class PersonGeneralized extends factorySpecific.object("Person", { + name: factoryGeneral.string, + age: factoryGeneral.number, + address: factoryGeneral.optional(factoryGeneral.string), + }) {} + class PersonSpecific extends factorySpecific.object( + "Person", + { + name: factorySpecific.string, + age: factorySpecific.number, + }, + { allowUnknownOptionalFields: true }, + ) {} + + const peopleConfig = new TreeViewConfiguration({ + schema: factorySpecific.array(PersonSpecific), + }); + const peopleGeneralizedConfig = new TreeViewConfiguration({ + schema: factoryGeneral.array(PersonGeneralized), + }); + const checkout = checkoutWithInitialTree(peopleGeneralizedConfig, [ + new PersonGeneralized({ name: "Alice", age: 42, address: "123 Main St" }), + new PersonGeneralized({ name: "Bob", age: 24 }), + ]); + const viewSpecific = new SchematizingSimpleTreeView( + checkout, + peopleConfig, + new MockNodeKeyManager(), + ); + + assert.deepEqual(viewSpecific.compatibility, { + canView: true, + canUpgrade: false, + isEquivalent: false, + canInitialize: false, + }); + + viewSpecific.root.moveRangeToEnd(0, 1); + + // To the view that doesn't have "address" in its schema, the node appears as if it doesn't + // have an address... + assert.equal(Object.keys(viewSpecific.root[1]).length, 2); + assert.equal(viewSpecific.root[1].name, "Alice"); + assert.equal(viewSpecific.root[1].age, 42); + viewSpecific.dispose(); + + const viewGeneralized = new SchematizingSimpleTreeView( + checkout, + peopleGeneralizedConfig, + new MockNodeKeyManager(), + ); + assert.deepEqual(viewGeneralized.compatibility, { + canView: true, + canUpgrade: true, + isEquivalent: true, + canInitialize: false, + }); + + // ...however, despite that client making an edit to Alice, the field is preserved via the move APIs. + assert.equal(Object.keys(viewGeneralized.root[1]).length, 3); + assert.equal(viewGeneralized.root[1].name, "Alice"); + assert.equal(viewGeneralized.root[1].age, 42); + assert.equal(viewGeneralized.root[1].address, "123 Main St"); + }); + it("Open upgradable document, then upgrade schema", () => { const checkout = checkoutWithInitialTree(config, 5); const view = new SchematizingSimpleTreeView( diff --git a/packages/dds/tree/src/test/simple-tree/api/storedSchema.spec.ts b/packages/dds/tree/src/test/simple-tree/api/storedSchema.spec.ts index ad911b7d7bee..e7fa6bde1535 100644 --- a/packages/dds/tree/src/test/simple-tree/api/storedSchema.spec.ts +++ b/packages/dds/tree/src/test/simple-tree/api/storedSchema.spec.ts @@ -27,10 +27,9 @@ describe("simple-tree storedSchema", () => { // but might as will give it a simple smoke test for the various test schema. it(`comparePersistedSchema to self ${test.name}`, () => { const persistedA = extractPersistedSchema(test.schema); - const persistedB = extractPersistedSchema(test.schema); const status = comparePersistedSchema( persistedA, - persistedB, + test.schema, { jsonValidator: typeboxValidator, }, diff --git a/packages/dds/tree/src/test/simple-tree/api/treeApi.spec.ts b/packages/dds/tree/src/test/simple-tree/api/treeApi.spec.ts index 72de90a45980..f8dd3d15fc7a 100644 --- a/packages/dds/tree/src/test/simple-tree/api/treeApi.spec.ts +++ b/packages/dds/tree/src/test/simple-tree/api/treeApi.spec.ts @@ -46,6 +46,7 @@ import { testSimpleTrees } from "../../testTrees.js"; import { FluidClientVersion } from "../../../codec/index.js"; import { ajvValidator } from "../../codec/index.js"; import { TreeAlpha } from "../../../shared-tree/index.js"; +import { SchemaFactoryAlpha } from "../../schemaFactoryAlpha.js"; const schema = new SchemaFactory("com.example"); @@ -1301,6 +1302,36 @@ describe("treeNodeApi", () => { }); } } + + describe("with misaligned view and stored schema", () => { + it("does not preserve additional optional fields", () => { + // (because stored keys are not being used, see analogous test in roundtrip-stored) + const sf1 = new SchemaFactoryAlpha("com.example"); + const sf2 = new SchemaFactoryAlpha("com.example"); + class Point2D extends sf1.object( + "Point", + { + x: sf1.number, + y: sf1.number, + }, + { allowUnknownOptionalFields: true }, + ) {} + class Point3D extends sf2.object("Point", { + x: sf2.number, + y: sf2.number, + z: sf2.optional(sf2.number), + }) {} + + const testTree = new Point3D({ x: 1, y: 2, z: 3 }); + const exported = TreeAlpha.exportVerbose(testTree); + + // TODO:AB#26720 The error here should be more clear. + assert.throws( + () => TreeAlpha.importVerbose(Point2D, exported), + /missing field info/, + ); + }); + }); }); describe("roundtrip-stored", () => { @@ -1320,6 +1351,59 @@ describe("treeNodeApi", () => { }); } } + + describe("with misaligned view and stored schema", () => { + const sf1 = new SchemaFactoryAlpha("com.example"); + class Point3D extends sf1.object("Point", { + x: sf1.number, + y: sf1.number, + z: sf1.optional(sf1.number), + }) {} + + it("preserves additional allowed optional fields", () => { + const sf2 = new SchemaFactoryAlpha("com.example"); + + class Point2D extends sf2.object( + "Point", + { + x: sf2.number, + y: sf2.number, + }, + { allowUnknownOptionalFields: true }, + ) {} + const testTree = new Point3D({ x: 1, y: 2, z: 3 }); + const exported = TreeAlpha.exportVerbose(testTree, { useStoredKeys: true }); + const imported = TreeAlpha.importVerbose(Point2D, exported, { useStoredKeys: true }); + const exported2 = TreeAlpha.exportVerbose(imported, { useStoredKeys: true }); + const imported2 = TreeAlpha.importVerbose(Point3D, exported2, { + useStoredKeys: true, + }); + assert.deepEqual(exported, exported2); + assert.deepEqual(Object.keys(imported), ["x", "y"]); + assert.deepEqual(Object.keys(imported2), ["x", "y", "z"]); + assert.equal(imported2.z, 3); + }); + + it("errors on additional disallowed optional fields", () => { + const sf2 = new SchemaFactoryAlpha("com.example"); + + class Point2D extends sf2.object( + "Point", + { + x: sf2.number, + y: sf2.number, + }, + { allowUnknownOptionalFields: false }, + ) {} + const testTree = new Point3D({ x: 1, y: 2, z: 3 }); + const exported = TreeAlpha.exportVerbose(testTree, { useStoredKeys: true }); + + assert.throws( + () => TreeAlpha.importVerbose(Point2D, exported, { useStoredKeys: true }), + /Tree does not conform to schema./, + ); + }); + }); }); }); diff --git a/packages/dds/tree/src/test/simple-tree/schemaTypes.spec.ts b/packages/dds/tree/src/test/simple-tree/schemaTypes.spec.ts index eff7319dd90a..2e6fb0d55938 100644 --- a/packages/dds/tree/src/test/simple-tree/schemaTypes.spec.ts +++ b/packages/dds/tree/src/test/simple-tree/schemaTypes.spec.ts @@ -227,7 +227,7 @@ describe("schemaTypes", () => { } // Class that implements both TreeNodeSchemaNonClass and TreeNodeSchemaNonClass - class CustomizedBoth extends objectSchema("B", { x: [schema.number] }, true) { + class CustomizedBoth extends objectSchema("B", { x: [schema.number] }, true, false) { public customized = true; } diff --git a/packages/dds/tree/src/test/simple-tree/toMapTree.spec.ts b/packages/dds/tree/src/test/simple-tree/toMapTree.spec.ts index 983f730ec153..e0b9f37e9c1f 100644 --- a/packages/dds/tree/src/test/simple-tree/toMapTree.spec.ts +++ b/packages/dds/tree/src/test/simple-tree/toMapTree.spec.ts @@ -1294,6 +1294,9 @@ describe("toMapTree", () => { policy: { fieldKinds, validateSchema: true, + // toMapTree drops all extra fields, so varying this policy is unnecessary + // (schema validation only occurs after converting to a MapTree) + allowUnknownOptionalFields: () => false, }, }; } @@ -1400,6 +1403,20 @@ describe("toMapTree", () => { outOfSchemaExpectedError, ); }); + + it("Only imports data in the schema", () => { + const schemaValidationPolicy = createSchemaAndPolicyForObjectNode(); + // Note that despite the content containing keys not in the object schema, this test passes. + // This is by design: if an app author wants to preserve data that isn't in the schema (ex: to + // collaborate with other clients that have newer schema without erasing auxiliary data), they + // can use import/export tree APIs as noted in `SchemaFactoryObjectOptions`. + mapTreeFromNodeData( + { foo: "Hello world", notInSchemaKey: 5, anotherNotInSchemaKey: false }, + [myObjectSchema, schemaFactory.string], + new MockNodeKeyManager(), + schemaValidationPolicy, + ); + }); }); describe("Map node", () => { diff --git a/packages/dds/tree/src/test/simple-tree/viewSchema.spec.ts b/packages/dds/tree/src/test/simple-tree/viewSchema.spec.ts new file mode 100644 index 000000000000..5f0cdd33928f --- /dev/null +++ b/packages/dds/tree/src/test/simple-tree/viewSchema.spec.ts @@ -0,0 +1,369 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { strict as assert } from "node:assert"; +import { + storedEmptyFieldSchema, + type Adapters, + type TreeStoredSchema, +} from "../../core/index.js"; +import { defaultSchemaPolicy, type FullSchemaPolicy } from "../../feature-libraries/index.js"; +import { + toStoredSchema, + ViewSchema, + type ImplicitFieldSchema, + type SchemaCompatibilityStatus, +} from "../../simple-tree/index.js"; +import { createUnknownOptionalFieldPolicy } from "../../simple-tree/index.js"; +import { SchemaFactoryAlpha } from "../schemaFactoryAlpha.js"; + +const noAdapters: Adapters = {}; +const emptySchema: TreeStoredSchema = { + nodeSchema: new Map(), + rootFieldSchema: storedEmptyFieldSchema, +}; + +const factory = new SchemaFactoryAlpha(""); + +function expectCompatibility( + { view, stored }: { view: ImplicitFieldSchema; stored: TreeStoredSchema }, + expected: ReturnType, + policy: FullSchemaPolicy = { + ...defaultSchemaPolicy, + allowUnknownOptionalFields: createUnknownOptionalFieldPolicy(view), + }, +) { + const viewSchema = new ViewSchema(policy, noAdapters, view); + const compatibility = viewSchema.checkCompatibility(stored); + assert.deepEqual(compatibility, expected); +} + +describe("viewSchema", () => { + describe(".checkCompatibility", () => { + it("works with never trees", () => { + class NeverObject extends factory.objectRecursive("NeverObject", { + foo: factory.requiredRecursive([() => NeverObject]), + }) {} + + const neverField = factory.required([]); + expectCompatibility( + { view: NeverObject, stored: emptySchema }, + { canView: false, canUpgrade: false, isEquivalent: false }, + ); + + expectCompatibility( + { view: neverField, stored: emptySchema }, + { canView: false, canUpgrade: false, isEquivalent: false }, + ); + + // We could reasonably detect these cases as equivalent and update the test expectation here. + // Doing so would amount to normalizing optional fields to forbidden fields when they do not + // contain any constructible types. + // Until we have a use case for it, we can leave it as is (i.e. be stricter with compatibility + // in cases that realistic users probably won't encounter). + expectCompatibility( + { view: factory.optional(NeverObject), stored: emptySchema }, + { canView: false, canUpgrade: true, isEquivalent: false }, + ); + expectCompatibility( + { view: factory.optional([]), stored: emptySchema }, + { canView: false, canUpgrade: true, isEquivalent: false }, + ); + }); + + describe("recognizes identical schema as equivalent", () => { + function expectSelfEquivalent(view: ImplicitFieldSchema) { + expectCompatibility( + { view, stored: toStoredSchema(view) }, + { canView: true, canUpgrade: true, isEquivalent: true }, + ); + } + it("empty schema", () => { + expectSelfEquivalent(factory.optional([])); + expectSelfEquivalent(factory.required([])); + }); + + it("object", () => { + expectSelfEquivalent( + factory.object("foo", { x: factory.number, y: factory.number, baz: factory.string }), + ); + }); + + it("map", () => { + expectSelfEquivalent(factory.map("foo", [factory.number, factory.boolean])); + }); + + it("array", () => { + expectSelfEquivalent(factory.array(factory.number)); + }); + + it("leaf", () => { + expectSelfEquivalent(factory.number); + expectSelfEquivalent(factory.boolean); + expectSelfEquivalent(factory.string); + }); + + it("recursive", () => { + class RecursiveObject extends factory.objectRecursive("foo", { + x: factory.optionalRecursive([() => RecursiveObject]), + }) {} + expectSelfEquivalent(RecursiveObject); + }); + }); + + describe("allows upgrades but not viewing when the view schema allows a strict superset of the stored schema", () => { + const expected: Omit = { + canView: false, + canUpgrade: true, + isEquivalent: false, + }; + + // Add allowed types to map node + it("view: SomethingMap ⊃ stored: NeverMap", () => { + class NeverMap extends factory.map("TestNode", []) {} + class SomethingMap extends factory.mapRecursive("TestNode", [factory.number]) {} + expectCompatibility( + { view: SomethingMap, stored: toStoredSchema(NeverMap) }, + expected, + ); + }); + + // Add allowed types to object node + it("view: FlexibleObject ⊃ stored: StricterObject", () => { + class StricterObject extends factory.object("TestNode", { + x: factory.number, + }) {} + class FlexibleObject extends factory.object("TestNode", { + x: [factory.number, factory.string], + }) {} + expectCompatibility( + { view: FlexibleObject, stored: toStoredSchema(StricterObject) }, + expected, + ); + }); + // Add optional field to existing schema + it("view: optional 3d Point ⊃ stored: 2d Point", () => { + class Point2D extends factory.object("Point", { + x: factory.number, + y: factory.number, + }) {} + class Point3D extends factory.object("Point", { + x: factory.number, + y: factory.number, + z: factory.optional(factory.number), + }) {} + expectCompatibility({ view: Point3D, stored: toStoredSchema(Point2D) }, expected); + }); + + describe("due to field kind relaxation", () => { + it("view: required field ⊃ stored: identifier field", () => { + // Identifiers are strings, so they should only be relaxable to fields which support strings. + expectCompatibility( + { + view: factory.required(factory.string), + stored: toStoredSchema(factory.identifier), + }, + expected, + ); + expectCompatibility( + { + view: factory.required(factory.number), + stored: toStoredSchema(factory.identifier), + }, + { canView: false, canUpgrade: false, isEquivalent: false }, + ); + }); + it("view: optional field ⊃ stored: required field", () => { + expectCompatibility( + { + view: factory.optional(factory.number), + stored: toStoredSchema(factory.required(factory.number)), + }, + expected, + ); + }); + it("view: optional field ⊃ stored: forbidden field", () => { + expectCompatibility( + { + view: factory.optional(factory.number), + stored: emptySchema, + }, + expected, + ); + }); + // Note: despite optional fields being relaxable to sequence fields in the stored schema representation, + // this is not possible to recreate using the current public API due to differences in array and sequence design + }); + }); + + describe("allows viewing but not upgrading when the view schema has opted into allowing the differences", () => { + it("due to additional optional fields in the stored schema", () => { + class Point2D extends factory.object( + "Point", + { + x: factory.number, + y: factory.number, + }, + { allowUnknownOptionalFields: true }, + ) {} + class Point3D extends factory.object("Point", { + x: factory.number, + y: factory.number, + z: factory.optional(factory.number), + }) {} + expectCompatibility( + { view: Point2D, stored: toStoredSchema(Point3D) }, + { canView: true, canUpgrade: false, isEquivalent: false }, + ); + }); + }); + + describe("forbids viewing and upgrading", () => { + describe("when the view schema and stored schema are incomparable", () => { + // (i.e. neither is a subset of the other, hence each allows documents the other does not) + function expectIncomparability(a: ImplicitFieldSchema, b: ImplicitFieldSchema): void { + const expected: Omit = { + canView: false, + canUpgrade: false, + isEquivalent: false, + }; + expectCompatibility({ view: a, stored: toStoredSchema(b) }, expected); + expectCompatibility({ view: b, stored: toStoredSchema(a) }, expected); + } + + describe("due to an allowed type difference", () => { + it("at the root", () => { + expectIncomparability(factory.number, factory.string); + }); + + it("in an object", () => { + class IncompatibleObject1 extends factory.object("TestNode", { + x: factory.number, + }) {} + class IncompatibleObject2 extends factory.objectRecursive("TestNode", { + x: factory.optionalRecursive([() => IncompatibleObject2]), + }) {} + expectIncomparability(IncompatibleObject1, IncompatibleObject2); + }); + + it("in a map", () => { + class IncompatibleMap1 extends factory.map("TestNode", [ + factory.null, + factory.number, + ]) {} + class IncompatibleMap2 extends factory.map("TestNode", [ + factory.null, + factory.string, + ]) {} + expectIncomparability(IncompatibleMap1, IncompatibleMap2); + }); + }); + + it("due to array vs not array differences", () => { + expectIncomparability(factory.array(factory.number), factory.number); + expectIncomparability( + factory.array(factory.number), + factory.optional(factory.number), + ); + expectIncomparability(factory.array(factory.string), factory.identifier); + }); + + describe("due to a field kind difference", () => { + it("view: identifier vs stored: forbidden", () => { + expectIncomparability(factory.identifier, factory.required([])); + }); + + it("view: 2d Point vs stored: required 3d Point", () => { + class Point2D extends factory.object("Point", { + x: factory.number, + y: factory.number, + }) {} + class Point3D extends factory.object("Point", { + x: factory.number, + y: factory.number, + z: factory.number, + }) {} + expectIncomparability(Point2D, Point3D); + }); + }); + }); + + describe("when the view schema allows a subset of the stored schema's documents but in ways that misalign with allowed viewing policies", () => { + const expected: Omit = { + canView: false, + canUpgrade: false, + isEquivalent: false, + }; + + // Note: the decision to not allow is policy. See + // "allows viewing but not upgrading when the view schema has opted into allowing the differences" above. + it("stored schema has additional optional fields which view schema did not allow", () => { + class Point2D extends factory.object("Point", { + x: factory.number, + y: factory.number, + }) {} + class Point3D extends factory.object("Point", { + x: factory.number, + y: factory.number, + z: factory.optional(factory.number), + }) {} + expectCompatibility({ view: Point2D, stored: toStoredSchema(Point3D) }, expected); + }); + + // This case demonstrates some need for care when allowing view schema to open documents with more flexible stored schema + it("stored schema has optional fields where view schema expects content", () => { + expectCompatibility( + { + view: factory.identifier, + stored: toStoredSchema(factory.optional(factory.string)), + }, + expected, + ); + expectCompatibility( + { view: factory.number, stored: toStoredSchema(factory.optional(factory.number)) }, + expected, + ); + }); + + describe("stored schema has additional unadapted allowed types", () => { + it("at the root", () => { + expectCompatibility( + { + view: factory.number, + stored: toStoredSchema(factory.required([factory.number, factory.string])), + }, + expected, + ); + }); + + it("in an object", () => { + class IncompatibleObject1 extends factory.object("TestNode", { + x: factory.number, + }) {} + class IncompatibleObject2 extends factory.object("TestNode", { + x: [factory.number, factory.string], + }) {} + expectCompatibility( + { view: IncompatibleObject1, stored: toStoredSchema(IncompatibleObject2) }, + expected, + ); + }); + + it("in a map", () => { + class IncompatibleMap1 extends factory.map("TestNode", [factory.number]) {} + class IncompatibleMap2 extends factory.map("TestNode", [ + factory.number, + factory.string, + ]) {} + expectCompatibility( + { view: IncompatibleMap1, stored: toStoredSchema(IncompatibleMap2) }, + expected, + ); + }); + }); + }); + }); + }); +}); diff --git a/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md b/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md index 752b27794015..4d6bc3b4d57d 100644 --- a/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md +++ b/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md @@ -65,7 +65,7 @@ export interface CommitMetadata { } // @alpha -export function comparePersistedSchema(persisted: JsonCompatible, view: JsonCompatible, options: ICodecOptions, canInitialize: boolean): SchemaCompatibilityStatus; +export function comparePersistedSchema(persisted: JsonCompatible, view: ImplicitFieldSchema, options: ICodecOptions, canInitialize: boolean): SchemaCompatibilityStatus; // @alpha export type ConciseTree = Exclude | THandle | ConciseTree[] | { From 459bbd5bf79841ca75cdbc1985c5617b93803972 Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Thu, 12 Dec 2024 15:52:38 -0800 Subject: [PATCH 2/6] docs(docs): Add some "how-to" documentation for website development (#23311) Includes notes on: - "Documents" vs "Pages" - Modifying sidebars - Docs versioning [AB#24262](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/24262) --- docs/README.md | 58 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/docs/README.md b/docs/README.md index 8e7a12065a39..3e4a67fcff5c 100644 --- a/docs/README.md +++ b/docs/README.md @@ -168,6 +168,64 @@ Rationale: For more details about leveraging Mermaid diagrams in Docusaurus, see [here](https://docusaurus.io/docs/markdown-features/diagrams). +### Documents vs Pages + +_Documents_ and _Pages_ are distinct concepts in Docusaurus. +For an overview of each, see: [Documents](https://docusaurus.io/docs/docs-introduction) and [Pages](https://docusaurus.io/docs/creating-pages). + +Some primary distinctions: + +1. _Documents_ live under `docs` and `versioned_docs`. _Pages_ live under `src/pages`. +1. _Documents_ are versioned. _Pages_ are not. + +### The Sidebar + +For an overview of how to configure sidebars in Docusaurus, see [here](https://docusaurus.io/docs/sidebar). + +The site's "current" version sidebar is configured via `sidebars.ts`. + +Sidebars for other versions are configured via `versioned_sidebars/version-.json`. + +- Versioned sidebars do not yet support JS/TS file formats. + See . + +Note that sidebars are configured for documents under `docs` and `versioned_docs`; they do not apply to unversioned _Pages_. + +### Documentation Versioning + +For an overview of Docusaurus's versioning functionality, see [here](https://docusaurus.io/docs/versioning). + +We currently offer versioned documentation for each of our supported major versions. +This documentation is intended to be kept up-to-date with the most recent release of each major version series. + +For now, this means we publish documentation (including generated API documentation) for versions `1.x` and `2.x`. + +- We also support generating API documentation for the local repo code in local development only. + See [Local API docs build](#local-api-docs-build), but these are not intended to be published. + +#### Why Only Major Versions? + +We aim to keep the number of concurrently maintained site versions minimized, for a number of reasons: + +1. Developer overhead - the more versions of the docs we offer, the more work we take on to keep documentation for supported versions up to date. +2. User overhead - the more versions we offer, the more users have to think about when they engage with our website. +3. Build performance - Docusaurus's build isn't fast. The more versions we add to our suite, the longer our build times become. + +#### Updating the Site Version + +These steps are based on Docusaurus's tutorial [here](https://docusaurus.io/docs/versioning#tutorials). + +Note: generating the API documentation for the new "current" version will fail if the release branch for that version has not yet been created. + +1. Run `npx --no-install docusaurus docs:version v` from the root of this directory. + E.g., `... docusaurus docs:version v2` when prepping for `v3` documentation. + - This will copy the contents of `docs` into `versioned_docs` under the specified version ID. + - This will also generate a sidebar configuration for the copied version under `versioned_sidebars`. +1. Update `config/docs-versions.mjs` to update the version ID for the "current" version, and add the newly frozen version to the `otherVersions` list. + This will automatically update aspects of the site, including: + 1. Which versions of the API documentation are generated during the build + 1. The version selection drop-down in the site header + ### Best practices #### Markdown From a976a85d5a8a5703832975da35671ea9edb50a23 Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Fri, 13 Dec 2024 10:04:09 -0800 Subject: [PATCH 3/6] refactor(docs): Use file path links (#23322) In accordance with the updated guidance in our docs README. [AB#25895](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/25895) --- docs/docs/build/audience.mdx | 4 ++-- docs/docs/build/auth.mdx | 2 +- docs/docs/build/container-states-events.mdx | 14 +++++++------- docs/docs/build/containers.mdx | 14 +++++++------- docs/docs/build/data-modeling.mdx | 4 ++-- docs/docs/build/dds.mdx | 16 ++++++++-------- docs/docs/build/experimental-features.mdx | 2 +- docs/docs/build/overview.mdx | 18 +++++++++--------- docs/docs/build/packages.mdx | 6 +++--- docs/docs/data-structures/map.mdx | 18 +++++++++--------- docs/docs/data-structures/overview.mdx | 13 ++++++------- docs/docs/data-structures/tree.mdx | 12 ++++++------ docs/docs/deployment/azure-fluid-relay.mdx | 2 +- docs/docs/faq.mdx | 2 +- docs/docs/start/quick-start.mdx | 2 +- docs/docs/start/tree-start.mdx | 12 ++++++------ docs/docs/start/tutorial.mdx | 6 +++--- docs/docs/testing/testing.mdx | 6 +++--- docs/docs/testing/tinylicious.mdx | 2 +- 19 files changed, 77 insertions(+), 78 deletions(-) diff --git a/docs/docs/build/audience.mdx b/docs/docs/build/audience.mdx index 8d0b14dd826b..5347c7f65bce 100644 --- a/docs/docs/build/audience.mdx +++ b/docs/docs/build/audience.mdx @@ -5,7 +5,7 @@ sidebar_position: 8 The audience is the collection of users connected to a container. When your app creates a container using a service-specific client library, the app is provided with a service-specific audience object for that container as well. Your code can query the audience object for connected users and use that information to build rich and collaborative user presence features. -This document will explain how to use the audience APIs and then provide examples on how to use the audience to show user presence. For anything service-specific, the [Tinylicious](/docs/testing/tinylicious) Fluid service is used. +This document will explain how to use the audience APIs and then provide examples on how to use the audience to show user presence. For anything service-specific, the [Tinylicious](../testing/tinylicious.mdx) Fluid service is used. ## Working with the audience @@ -100,7 +100,7 @@ While the audience is the foundation for user presence features, the list of con Most presence scenarios will involve data that only a single user or client knows and needs to communicate to other audience members. Some of those scenarios will require the app to save data for each user for future sessions. For example, consider a scenario where you want to display how long each user has spent in your application. An active user's time should increment while connected, pause when they disconnect, and resume once they reconnect. This means that the time each user has spent must be persisted so it can survive disconnections. -One option is to use a `SharedMap` object with a `SharedCounter` object as the value onto which each user will increment their time spent every minute (also see [Introducing distributed data structures](./dds)). All other connected users will then receive changes to that SharedMap automatically. Your app's UI can display data from the map for only users present in the audience. A returning user can find themselves in the map and resume from the latest state. +One option is to use a `SharedMap` object with a `SharedCounter` object as the value onto which each user will increment their time spent every minute (also see [Introducing distributed data structures](./dds.mdx)). All other connected users will then receive changes to that SharedMap automatically. Your app's UI can display data from the map for only users present in the audience. A returning user can find themselves in the map and resume from the latest state. #### Shared transient data diff --git a/docs/docs/build/auth.mdx b/docs/docs/build/auth.mdx index e520409e734e..0566cf992ae1 100644 --- a/docs/docs/build/auth.mdx +++ b/docs/docs/build/auth.mdx @@ -5,4 +5,4 @@ sidebar_position: 4 Security is critical to modern web applications. Fluid Framework, as a part of your web application architecture, is an important piece of infrastructure to secure. Fluid Framework is a layered architecture, and auth-related concepts are implemented based on the Fluid service it's connecting to. This means that the specifics of authentication will differ based on the Fluid service. -To learn how authentication and authorization works in Azure Fluid Relay, see [Authentication and authorization in your app](https://docs.microsoft.com/azure/azure-fluid-relay/concepts/authentication-authorization). Other Fluid services may differ. See [Available Fluid services](../deployment/service-options) for more information. +To learn how authentication and authorization works in Azure Fluid Relay, see [Authentication and authorization in your app](https://docs.microsoft.com/azure/azure-fluid-relay/concepts/authentication-authorization). Other Fluid services may differ. See [Available Fluid services](../deployment/service-options.mdx) for more information. diff --git a/docs/docs/build/container-states-events.mdx b/docs/docs/build/container-states-events.mdx index d042753c4249..907f103dedd6 100644 --- a/docs/docs/build/container-states-events.mdx +++ b/docs/docs/build/container-states-events.mdx @@ -5,7 +5,7 @@ sidebar_position: 3 import { ApiLink } from "@site/src/components/shortLinks"; -This article provides a detailed description of the lifecycle states of the containers and container events. It assumes that you are familiar with [Containers](./containers). +This article provides a detailed description of the lifecycle states of the containers and container events. It assumes that you are familiar with [Containers](./containers.mdx). :::note @@ -56,16 +56,16 @@ Details are below the diagram. #### Unpublished -When a [container is created](./containers#creating-a-container), it is in **unpublished** state and exists only on the creating client. +When a [container is created](./containers.mdx#creating-a-container), it is in **unpublished** state and exists only on the creating client. -Users, or your code, can start editing the data in the container right away, but the data is not shareable with other clients until the [container is published](./containers#publishing-a-container). +Users, or your code, can start editing the data in the container right away, but the data is not shareable with other clients until the [container is published](./containers.mdx#publishing-a-container). Use the unpublished state to create initial data to populate your shared objects if needed. This is often useful in scenarios where you want to make sure that all connected clients have a coherent initial state. For example, if collaboration involves tables, your code can set a minimum table size. #### Publishing -A container transitions to **publishing** state when `container.attach` method is called on the creating client. For details, see [Publishing a container](./containers#publishing-a-container). It is normally in this state only very briefly and then automatically transitions to **published** state when publication is complete. +A container transitions to **publishing** state when `container.attach` method is called on the creating client. For details, see [Publishing a container](./containers.mdx#publishing-a-container). It is normally in this state only very briefly and then automatically transitions to **published** state when publication is complete. :::note @@ -76,7 +76,7 @@ In the Fluid APIs, the terms "attach", "attaching", "attached" and "detached" me #### Published to the service When publishing completes, the container is **published**, so it exists in the Fluid service as well as the creating client. -Subsequent clients can [connect to the container](./containers#connecting-to-a-container), synchronize with its latest data values, and edit them. +Subsequent clients can [connect to the container](./containers.mdx#connecting-to-a-container), synchronize with its latest data values, and edit them. A container can never transition back to an unpublished state, but it can transition to a deleted state. @@ -241,10 +241,10 @@ In this state, the client is attempting to connect to the Fluid service, but has A container moves into the **establishing connection** state in any of the following circumstances: - On the creating client, your code calls the `container.attach` method. - For more information, see [Publishing a container](./containers#publishing-a-container). + For more information, see [Publishing a container](./containers.mdx#publishing-a-container). This method publishes the container _and_ connects the client to the service. - Your code calls the client's `getContainer` method on a client that has not previously been connected. - For more information, see [Connecting to a container](./containers#connecting-to-a-container). + For more information, see [Connecting to a container](./containers.mdx#connecting-to-a-container). - The Fluid client runtime tries to reconnect following a disconnection caused by a network problem. - Your code calls the `container.connect` method on a client that had become disconnected. diff --git a/docs/docs/build/containers.mdx b/docs/docs/build/containers.mdx index 112b99296468..11171ca20293 100644 --- a/docs/docs/build/containers.mdx +++ b/docs/docs/build/containers.mdx @@ -31,7 +31,7 @@ The device is a subsequent client in all future sessions. Your code creates containers using APIs provided by a service-specific client library. Each service-specific client library implements a common API for manipulating containers. -For example, the [Tinylicious library](/docs/testing/tinylicious) provides these APIs for the Tinylicious Fluid service. +For example, the [Tinylicious library](../testing/tinylicious.mdx) provides these APIs for the Tinylicious Fluid service. These common APIs enable your code to specify what shared objects should live in the `FluidContainer`, and to connect to the container once it is created. ### Container schema @@ -44,7 +44,7 @@ A schema can specify: - Some initial shared objects that are created as soon as the container is created, and are immediately and always available to all connected clients. - The types of shared objects that can be added to the container at runtime and made available for use by all connected clients. -As explained below, the same schema definition that is used to create the container must be provided when clients subsequently connect to the container. For more information about initial objects and dynamic object creation see [Data modeling](./data-modeling). +As explained below, the same schema definition that is used to create the container must be provided when clients subsequently connect to the container. For more information about initial objects and dynamic object creation see [Data modeling](./data-modeling.mdx). This example schema defines two initial objects, `layout` and `text`, and declares that objects of the distributed data structure (DDS) types `SharedCell` and `SharedString` can be created at runtime. @@ -77,7 +77,7 @@ Notes: - The `client` object is defined by the service-specific client library. See the documentation for the service you are using for more details about how to use its service-specific client library. - It is a good practice to destructure the object that is returned by `createContainer` into its two main parts; `container` and `services`. For more about the `services` object, see [Container services](#container-services). -A newly created container is in an **unpublished** state. An unpublished container is stored on the local client only and therefore no data is shared with other clients yet. But the data in it can, and sometimes should be, edited while it is unpublished. See [Container states and events](./container-states-events). +A newly created container is in an **unpublished** state. An unpublished container is stored on the local client only and therefore no data is shared with other clients yet. But the data in it can, and sometimes should be, edited while it is unpublished. See [Container states and events](./container-states-events.mdx). ### Publishing a container @@ -94,7 +94,7 @@ Once published, the Fluid container becomes an entity on Fluid service and subse Invoking the container's `attach` method returns the unique identifier for the container. Subsequent clients use this ID to connect to the container. -Note that once published, a container cannot be unpublished. (But it can be deleted. See [Deleting a container from the service](./container-states-events#deleting-a-container-from-the-service).) +Note that once published, a container cannot be unpublished. (But it can be deleted. See [Deleting a container from the service](./container-states-events.mdx#deleting-a-container-from-the-service).) ```typescript {linenos=inline,hl_lines=[10]} const schema = { @@ -108,7 +108,7 @@ const { container, services } = await client.createContainer(schema); const containerId = await container.attach(); ``` -In addition to publishing the container, the `attach` method also connects the creating client to the published container. See [Connecting to a container](#connecting-to-a-container) and [Connection status states](./container-states-events#connection-status-states). +In addition to publishing the container, the `attach` method also connects the creating client to the published container. See [Connecting to a container](#connecting-to-a-container) and [Connection status states](./container-states-events.mdx#connection-status-states). ### Connecting to a container @@ -131,7 +131,7 @@ const { container, services } = :::tip -This section provides only basic information about the _most important_ states that a container can be in. Details about _all_ container states, including state diagrams, state management, editing management, and container event handling are in [Container states and events](./container-states-events). +This section provides only basic information about the _most important_ states that a container can be in. Details about _all_ container states, including state diagrams, state management, editing management, and container event handling are in [Container states and events](./container-states-events.mdx). ::: @@ -191,4 +191,4 @@ For example, consider an education application where multiple teachers collabora When you create or connect to a container with `createContainer` or `getContainer`, the Fluid service will also return a service-specific _services_ object. This object contains references to useful services you can use to build richer applications. -An example of a container service is the [Audience](./audience), which provides user information for clients that are connected to the container. See [Working with the audience](./audience#working-with-the-audience) for more information. +An example of a container service is the [Audience](./audience.mdx), which provides user information for clients that are connected to the container. See [Working with the audience](./audience.mdx#working-with-the-audience) for more information. diff --git a/docs/docs/build/data-modeling.mdx b/docs/docs/build/data-modeling.mdx index 11c3cb9962fc..617cd1536ac8 100644 --- a/docs/docs/build/data-modeling.mdx +++ b/docs/docs/build/data-modeling.mdx @@ -24,7 +24,7 @@ The example below creates a new container with a `SharedMap` and a `SharedCell` About this code note: - `client` represents an object defined by the service-specific client library. See the documentation for the service you are using for more details about how to use its service-specific client library. -- It is a good practice to deconstruct the object that is returned by `createContainer` into its two main parts; `container` and `services`. For an example of the use of the latter, see [Working with the audience](./audience#working-with-the-audience). +- It is a good practice to deconstruct the object that is returned by `createContainer` into its two main parts; `container` and `services`. For an example of the use of the latter, see [Working with the audience](./audience.mdx#working-with-the-audience). ```typescript const schema = { @@ -131,7 +131,7 @@ map.on("valueChanged", (changed) => { } ``` -For more information about handles see [Handles](../concepts/handles). +For more information about handles see [Handles](../concepts/handles.mdx). ### When to use dynamic objects diff --git a/docs/docs/build/dds.mdx b/docs/docs/build/dds.mdx index eeb0d0f176c3..28fea307d3fb 100644 --- a/docs/docs/build/dds.mdx +++ b/docs/docs/build/dds.mdx @@ -10,7 +10,7 @@ DDSes are low-level data structures, while Data Objects are composed of DDSes an used to organize DDSes into semantically meaningful groupings for your scenario, as well as providing an API surface to your app's data. However, many Fluid applications will use only DDSes. -There are a number of shared objects built into the Fluid Framework. See [Distributed data structures](/docs/data-structures/overview) for more information. +There are a number of shared objects built into the Fluid Framework. See [Distributed data structures](../data-structures/overview.mdx) for more information. DDSes automatically ensure that each client has access to the same state. They're called _distributed data structures_ because they are similar to data structures used commonly when programming, like strings, maps/dictionaries, and @@ -40,7 +40,7 @@ that the merge behavior should match what users intend or expect as they are edi In Fluid, the merge behavior is defined by the DDS. The simplest merge strategy, employed by key-value distributed data structures like SharedMap, is _last writer wins_ (LWW). With this merge strategy, when multiple clients write different values to the same key, the value that was written last will overwrite the others. Refer to the -[documentation for each DDS](/docs/data-structures/overview) for more details about the merge +[documentation for each DDS](../data-structures/overview.mdx) for more details about the merge strategy it uses. ## Performance characteristics @@ -50,7 +50,7 @@ generally fall into two broad categories: _optimistic_ and _consensus-based_. :::note[See also] -- [Fluid Framework architecture](../concepts/architecture) +- [Fluid Framework architecture](../concepts/architecture.mdx) ::: @@ -106,11 +106,11 @@ When storing a DDS within another DDS, your code must store its handle, not the see [Using handles to store and retrieve shared objects][handles-example]. That's all you need to know about handles in order to use DDSes effectively. If you want to learn more about handles, -see [Fluid handles](../concepts/handles). +see [Fluid handles](../concepts/handles.mdx). :::note -If you are considering storing a DDS within another DDS in order to give your app's data a hierarchical structure, consider using a [SharedTree](/docs/data-structures/tree) DDS instead. +If you are considering storing a DDS within another DDS in order to give your app's data a hierarchical structure, consider using a [SharedTree](../data-structures/tree.mdx) DDS instead. ::: @@ -194,15 +194,15 @@ When you have more than one shape in your data model, you could create a _array_ These DDSes are used for storing key-value data. They are all optimistic and use a last-writer-wins merge policy. - [SharedMap][] -- a basic key-value distributed data structure. -- Map nodes in a [SharedTree][] -- a hierarchical data structure with three kinds of complex nodes; maps (similar to [SharedMap][]), arrays, and JavaScript objects. There are also several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles). +- Map nodes in a [SharedTree][] -- a hierarchical data structure with three kinds of complex nodes; maps (similar to [SharedMap][]), arrays, and JavaScript objects. There are also several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles.mdx). ### Array-like data -- Array nodes in a [SharedTree][] -- a hierarchical data structure with three kinds of complex nodes; maps (similar to [SharedMap][]), arrays, and JavaScript objects. There are also several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles). +- Array nodes in a [SharedTree][] -- a hierarchical data structure with three kinds of complex nodes; maps (similar to [SharedMap][]), arrays, and JavaScript objects. There are also several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles.mdx). ### Object data -- Object nodes in a [SharedTree][] -- a hierarchical data structure with three kinds of complex nodes; maps (similar to [SharedMap][]), arrays, and JavaScript objects. There are also several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles). +- Object nodes in a [SharedTree][] -- a hierarchical data structure with three kinds of complex nodes; maps (similar to [SharedMap][]), arrays, and JavaScript objects. There are also several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles.mdx). ### Specialized data structures diff --git a/docs/docs/build/experimental-features.mdx b/docs/docs/build/experimental-features.mdx index 46272c064207..bb5e1b13e1ff 100644 --- a/docs/docs/build/experimental-features.mdx +++ b/docs/docs/build/experimental-features.mdx @@ -23,7 +23,7 @@ To enable experimental features, you will be required to provide an implementati ### Observability -When a [container](./containers) is loaded, there will be several events logged to the provided logger. One of these events is `"fluid:telemetry:ContainerLoadStats"`. Within this event, there is a property `"featureGates"`, which will contain each of the experimental features (and the corresponding `ConfigType` values) that the container was loaded with. +When a [container](./containers.mdx) is loaded, there will be several events logged to the provided logger. One of these events is `"fluid:telemetry:ContainerLoadStats"`. Within this event, there is a property `"featureGates"`, which will contain each of the experimental features (and the corresponding `ConfigType` values) that the container was loaded with. ## Usage diff --git a/docs/docs/build/overview.mdx b/docs/docs/build/overview.mdx index 8bd368a3b9fb..4af79d830bb4 100644 --- a/docs/docs/build/overview.mdx +++ b/docs/docs/build/overview.mdx @@ -17,7 +17,7 @@ There are three primary concepts to understand when building an application with ### Service -Fluid clients require a centralized service that all connected clients use to send and receive operations. Fluid offers multiple service implementations that developers can use without any modifications. For each of them, there is a corresponding client library. You must use the client library that corresponds to the service you are connecting to. See [Available Fluid Services](../deployment/service-options) for more information about Fluid service options. +Fluid clients require a centralized service that all connected clients use to send and receive operations. Fluid offers multiple service implementations that developers can use without any modifications. For each of them, there is a corresponding client library. You must use the client library that corresponds to the service you are connecting to. See [Available Fluid Services](../deployment/service-options.mdx) for more information about Fluid service options. Each service-specific library adheres to a common API structure and has the primary goal of creating and retrieving container objects. The common structure enables you to switch from one service to another with minimal code changes. @@ -25,22 +25,22 @@ See [Service-specific client libraries](#service-specific-client-libraries) for #### Azure Fluid Relay -[Azure Fluid Relay](../deployment/azure-fluid-relay) is a cloud service that enables real-time collaboration on shared data models. It is a fully managed service that provides a secure, scalable, and reliable way to connect clients to each other and to the data models they share. +[Azure Fluid Relay](../deployment/azure-fluid-relay.mdx) is a cloud service that enables real-time collaboration on shared data models. It is a fully managed service that provides a secure, scalable, and reliable way to connect clients to each other and to the data models they share. #### SharePoint Embedded -Microsoft [SharePoint Embedded](../deployment/sharepoint-embedded) is a cloud-based file and document management system suitable for use in any application. It is a new API-only solution which enables app developers to harness the power of the Microsoft 365 file and document storage platform for any app, and is suitable for enterprises building line of business applications and ISVs building multi-tenant applications. +Microsoft [SharePoint Embedded](../deployment/sharepoint-embedded.mdx) is a cloud-based file and document management system suitable for use in any application. It is a new API-only solution which enables app developers to harness the power of the Microsoft 365 file and document storage platform for any app, and is suitable for enterprises building line of business applications and ISVs building multi-tenant applications. #### Tinylicious -The [Tinylicious service](/docs/testing/tinylicious) runs on your development computer and is used for development and testing. +The [Tinylicious service](../testing/tinylicious.mdx) runs on your development computer and is used for development and testing. It is used in Fluid examples throughout this documentation. ### Container The container is the primary unit of encapsulation in Fluid. It consists of a collection of shared objects and supporting APIs to manage the lifecycle of the container and the objects within it. New containers must be created from a client, but are bound to the data stored on the supporting server. After a container has been created, it can be accessed by other clients. -For more about containers see [Containers](./containers). +For more about containers see [Containers](./containers.mdx). ### Shared objects @@ -53,7 +53,7 @@ A Data Object contains one or more DDSes that are organized to enable a particul DDSes are low-level data structures, while Data Objects are composed of DDSes and other shared objects. Data Objects are used to organize DDSes into semantically meaningful groupings for your scenario, as well as providing an API surface to your app's data. -For more information about these types and the differences between them, see [Data modeling](./data-modeling) and [Introducing distributed data structures](./dds). +For more information about these types and the differences between them, see [Data modeling](./data-modeling.mdx) and [Introducing distributed data structures](./dds.mdx). ## Library structure @@ -71,7 +71,7 @@ Fluid works with multiple service implementations. Each service has a correspond For specifics about each service-specific client implementation see their corresponding documentation. -- The client library for the [Tinylicious](/docs/testing/tinylicious) service is in the package [@fluidframework/tinylicious-client](https://www.npmjs.com/package/@fluidframework/tinylicious-client). -- The client library for the [Azure Fluid Relay](../deployment/azure-fluid-relay) is in the package [@fluidframework/azure-client](https://www.npmjs.com/package/@fluidframework/azure-client). +- The client library for the [Tinylicious](../testing/tinylicious.mdx) service is in the package [@fluidframework/tinylicious-client](https://www.npmjs.com/package/@fluidframework/tinylicious-client). +- The client library for the [Azure Fluid Relay](../deployment/azure-fluid-relay.mdx) is in the package [@fluidframework/azure-client](https://www.npmjs.com/package/@fluidframework/azure-client). -For more information see [Packages](./packages). +For more information see [Packages](./packages.mdx). diff --git a/docs/docs/build/packages.mdx b/docs/docs/build/packages.mdx index 27eff2d73fae..07045ab09213 100644 --- a/docs/docs/build/packages.mdx +++ b/docs/docs/build/packages.mdx @@ -24,8 +24,8 @@ contained within itself, as well as to inspect the state of the collaboration se You'll use one or more shared objects in your container to model your collaborative data. The `fluid-framework` package includes three data structures that cover a broad range of scenarios: -1. [SharedMap](../data-structures/map), a map-like data structure for storing key/value pair data. -2. [SharedString](../data-structures/string), a data structure for string data. +1. [SharedMap](../data-structures/map.mdx), a map-like data structure for storing key/value pair data. +2. [SharedString](../data-structures/string.md), a data structure for string data. ## Package scopes @@ -36,7 +36,7 @@ Fluid Framework packages are published under one of the following npm scopes: - @fluid-internal - @fluid-tools -In addition to the scoped packages, two unscoped packages are published: the [fluid-framework][] package, described earlier, and the `tinylicious` package, which contains a minimal Fluid server. For more information, see [Tinylicious](../testing/tinylicious). +In addition to the scoped packages, two unscoped packages are published: the [fluid-framework][] package, described earlier, and the `tinylicious` package, which contains a minimal Fluid server. For more information, see [Tinylicious](../testing/tinylicious.mdx). Unless you are contributing to the Fluid Framework, you should only need the unscoped packages and packages from the **@fluidframework** scope. You can [read more about the scopes and their intent][scopes] in the Fluid Framework wiki. diff --git a/docs/docs/data-structures/map.mdx b/docs/docs/data-structures/map.mdx index f6ff894dc093..5c800984db68 100644 --- a/docs/docs/data-structures/map.mdx +++ b/docs/docs/data-structures/map.mdx @@ -16,15 +16,15 @@ For example, in a traditional `Map`, setting a key would only set it on the loca - You must only store the following as values in a `SharedMap`: - _Plain objects_ -- those that are safely JSON-serializable. If you store class instances, for example, then data synchronization will not work as expected. - - [Handles](/docs/concepts/handles) to other Fluid DDSes + - [Handles](../concepts/handles.mdx) to other Fluid DDSes - When storing objects as values in a SharedMap, changes to the object will be synchronized whole-for-whole. This means that individual changes to the properties of an object are not merged during synchronization. If you need this behavior you should store individual properties in the SharedMap instead of full objects. - See [Picking the right data structure](../build/dds#picking-the-right-data-structure) for more information. + See [Picking the right data structure](../build/dds.mdx#picking-the-right-data-structure) for more information. ::: -For additional background on DDSes and a general overview of their design, see [Introducing distributed data structures](../build/dds). +For additional background on DDSes and a general overview of their design, see [Introducing distributed data structures](../build/dds.mdx). ## Usage @@ -43,8 +43,8 @@ npm install fluid-framework The `FluidContainer` provides a container schema for defining which DDSes you would like to load from it. It provides two separate fields for establishing an initial roster of objects and dynamically creating new ones. -- For general guidance on using the `ContainerSchema`, please see [Data modeling](../build/data-modeling). -- For guidance on how to create/load a container using a service-specific client, please see [Containers - Creating and loading](../build/containers#creating--connecting). +- For general guidance on using the `ContainerSchema`, please see [Data modeling](../build/data-modeling.mdx). +- For guidance on how to create/load a container using a service-specific client, please see [Containers - Creating and loading](../build/containers.mdx#creating--connecting). Let's take a look at how you would specifically use the `ContainerSchema` for `SharedMap`. @@ -91,7 +91,7 @@ const { container, services } = await client.getContainer(id, schema); const newMap = await container.create(SharedMap); // Create a new SharedMap ``` -Once the async call to `create` returns, you can treat it the same as you were using the `SharedMap` instances from your initial objects above. The only caveat here is that you will need to maintain a pointer to your newly created object. To store it in another `SharedMap`, please see the [Storing shared objects](#storing-shared-objects) section below and for general guidance on storing DDS references as handles, please see [Using handles to store and retrieve shared objects](../build/data-modeling#using-handles-to-store-and-retrieve-shared-objects). +Once the async call to `create` returns, you can treat it the same as you were using the `SharedMap` instances from your initial objects above. The only caveat here is that you will need to maintain a pointer to your newly created object. To store it in another `SharedMap`, please see the [Storing shared objects](#storing-shared-objects) section below and for general guidance on storing DDS references as handles, please see [Using handles to store and retrieve shared objects](../build/data-modeling.mdx#using-handles-to-store-and-retrieve-shared-objects). ### API @@ -196,7 +196,7 @@ Because the local `set` call causes the `valueChanged` event to be sent, and you ### Storing objects -Storing objects in a `SharedMap` is very similar to storing primitives. However, one thing to note is that all values in `SharedMap` are merged using a last writer wins (LWW) strategy. This means that if multiple clients are writing values to the same key, whoever made the last update will "win" and overwrite the others. While this is fine for primitives, you should be mindful when storing objects in `SharedMaps` if you are looking for individual fields within the object to be independently modified. See [Picking the right data structure](../build/dds#picking-the-right-data-structure) for more information. +Storing objects in a `SharedMap` is very similar to storing primitives. However, one thing to note is that all values in `SharedMap` are merged using a last writer wins (LWW) strategy. This means that if multiple clients are writing values to the same key, whoever made the last update will "win" and overwrite the others. While this is fine for primitives, you should be mindful when storing objects in `SharedMaps` if you are looking for individual fields within the object to be independently modified. See [Picking the right data structure](../build/dds.mdx#picking-the-right-data-structure) for more information. :::warning @@ -321,9 +321,9 @@ One way to think about this is that each value stored into the `SharedMap` is th One of the powerful features of DDSes is that they are nestable. A DDS can be stored in another DDS allowing you to dynamically set up your data hierarchy as best fits your application needs. -When storing a DDS within another DDS, you must store its [handle](../build/data-modeling#using-handles-to-store-and-retrieve-shared-objects), not the DDS itself. Similarly, when retrieving DDSes nested within other DDSes, you need to first get the object's handle and then get the object from the handle. This reference based approach allows the Fluid Framework to virtualize the data underneath, only loading objects when they are requested. +When storing a DDS within another DDS, you must store its [handle](../build/data-modeling.mdx#using-handles-to-store-and-retrieve-shared-objects), not the DDS itself. Similarly, when retrieving DDSes nested within other DDSes, you need to first get the object's handle and then get the object from the handle. This reference based approach allows the Fluid Framework to virtualize the data underneath, only loading objects when they are requested. -That's all you need to know about handles in order to use DDSes effectively. If you want to learn more about handles, see [Fluid handles](/docs/concepts/handles). +That's all you need to know about handles in order to use DDSes effectively. If you want to learn more about handles, see [Fluid handles](../concepts/handles.mdx). The following example demonstrates nesting DDSes using `SharedMap`. You specify an initial SharedMap as part of the `initialObjects` in the `ContainerSchema` and add the `SharedMap` type to `dynamicObjectTypes`. diff --git a/docs/docs/data-structures/overview.mdx b/docs/docs/data-structures/overview.mdx index d93070195fca..bc5b19494bca 100644 --- a/docs/docs/data-structures/overview.mdx +++ b/docs/docs/data-structures/overview.mdx @@ -9,8 +9,7 @@ who've used common data structures before. :::note -This article assumes that you are familiar with -[Introducing distributed data structures](/docs/build/dds). +This article assumes that you are familiar with [Introducing distributed data structures](../build/dds.mdx). ::: @@ -41,14 +40,14 @@ Below we've enumerated the data structures and described when they may be most u ## SharedTree -[SharedTree](./tree) is a hierarchical data structure that looks and feels like simple JavaScript objects. -It is a tree of data with complex nodes, including maps that work similarly to [SharedMap](#sharedmap), and several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles). +[SharedTree](./tree.mdx) is a hierarchical data structure that looks and feels like simple JavaScript objects. +It is a tree of data with complex nodes, including maps that work similarly to [SharedMap](#sharedmap), and several kinds of leaf nodes, including boolean, string, number, null, and [Fluid handles](../concepts/handles.mdx). This is the recommended data structure for any new development. ## SharedMap -[SharedMap](./map) is a basic key-value data structure. +[SharedMap](./map.mdx) is a basic key-value data structure. It is optimistic and uses a last-writer-wins merge policy. Although the value of a pair can be a complex object, the value of any given pair can only be changed whole-for-whole. @@ -62,7 +61,7 @@ Although the value of a pair can be a complex object, the value of any given pai ## SharedString -The [SharedString](./string) DDS is used for unstructured text data that can be collaboratively edited. It is optimistic. +The [SharedString](./string.md) DDS is used for unstructured text data that can be collaboratively edited. It is optimistic. ## Consensus data structures @@ -85,7 +84,7 @@ Typical scenarios require the connected clients to "agree" on some course of act ## DDS Deprecation -[SharedTree](./tree) is an incredibly flexible DDS that allows developers to represent a variety of data models (primitives, objects, lists, maps in any nested orientation). +[SharedTree](./tree.mdx) is an incredibly flexible DDS that allows developers to represent a variety of data models (primitives, objects, lists, maps in any nested orientation). Due to this flexibility, it can also support features of various other DDSes in the Fluid repertoire. As SharedTree becomes more feature-rich, we plan to start reducing support for other DDSes that offer a sub-set of the functionality. This process takes the form of a deprecation journey for some of our existing DDSes, but rest assured, we will provide enough runway and support to migrate away from deprecated DDSes. diff --git a/docs/docs/data-structures/tree.mdx b/docs/docs/data-structures/tree.mdx index af6c89a31a0b..19d5b758955e 100644 --- a/docs/docs/data-structures/tree.mdx +++ b/docs/docs/data-structures/tree.mdx @@ -9,7 +9,7 @@ import { PackageLink } from "@site/src/components/shortLinks"; The `SharedTree` distributed data structure (DDS), available starting with Fluid Framework 2.0, is used to store most or all of your application's shared data in a hierarchical structure. -To get started working with `SharedTree` in your application, read this [quick start guide](../start/tree-start). +To get started working with `SharedTree` in your application, read this [quick start guide](../start/tree-start.mdx). A `SharedTree` has the following characteristics: @@ -32,7 +32,7 @@ The following leaf node types are available: - **number**: Works identically to a JavaScript JavaScript number. - **string**: Works identically to a JavaScript string. - **null**: Works identically to a JavaScript null. -- **FluidHandle**: A handle to a Fluid DDS or Data Object in the current container. For more information about handles see [Handles](../concepts/handles). +- **FluidHandle**: A handle to a Fluid DDS or Data Object in the current container. For more information about handles see [Handles](../concepts/handles.mdx). The following subsections describe the available internal node types. @@ -418,13 +418,13 @@ set(key: string, value: T) The `set()` method sets/changes the value of the item with the specified key. If the key is not present, the item is added to the map. Note the following: - The `T` can be any type that conforms to the map node's schema. For example, if the schema was defined with `class MyMap extends sf.map([sf.number, sf.string]);`, then `T` could be `number` or `string`. -- If multiple clients set the same key simultaneously, the key gets the value set by the last edit to apply. For the meaning of "simultaneously", see [Types of distributed data structures](./overview). +- If multiple clients set the same key simultaneously, the key gets the value set by the last edit to apply. For the meaning of "simultaneously", see [Types of distributed data structures](./overview.mdx). ```typescript delete(key: string): void ``` -The `delete()` method removes the item with the specified key. If one client sets a key and another deletes it simultaneously, the key is deleted only if the deletion op is the last one applied. For the meaning of "simultaneously", see [Types of distributed data structures](./overview). +The `delete()` method removes the item with the specified key. If one client sets a key and another deletes it simultaneously, the key is deleted only if the deletion op is the last one applied. For the meaning of "simultaneously", see [Types of distributed data structures](./overview.mdx). ##### Map node properties @@ -474,7 +474,7 @@ Array nodes have two methods that remove items from the node. Note the following - A removed item may be restored as a result of a simultaneous move operation from another client. For example, if one client removes items 3-5, and another client simultaneously moves items 4 and 5, then, if the move operation is ordered last, only item 3 is removed (items 4 and 5 are restored and moved to their destination by the move operation). If the remove operation is ordered last, then all three items will be removed, no matter where they reside. - Removal of items never overrides inserting (or moving in) items. For example, if one client removes items 10-15, and another client simultaneously inserts an item at index 12, the original items 10-15 are removed, but new item is inserted between item 9 and the item that used to be at index 16. This is true regardless of the order of the remove and insert operations. -For the meaning of "simultaneously", see [Types of distributed data structures](./overview). +For the meaning of "simultaneously", see [Types of distributed data structures](./overview.mdx). ```typescript removeAt(index: number) @@ -515,7 +515,7 @@ Note the following about these methods: - If multiple clients simultaneously move an item, then that item will be moved to the destination indicated by the move of the client whose edit is ordered last. - A moved item may be removed as a result of a simultaneous remove operation from another client. For example, if one client moves items 3-5, and another client simultaneously removes items 4 and 5, then, if the remove operation is ordered last, items 4 and 5 are removed from their destination by the remove operation. If the move operation is ordered last, then all three items will be moved to the destination. -For the meaning of "simultaneously", see [Types of distributed data structures](./overview). +For the meaning of "simultaneously", see [Types of distributed data structures](./overview.mdx). ### Events diff --git a/docs/docs/deployment/azure-fluid-relay.mdx b/docs/docs/deployment/azure-fluid-relay.mdx index 0b3d6da2bb35..7697527005b7 100644 --- a/docs/docs/deployment/azure-fluid-relay.mdx +++ b/docs/docs/deployment/azure-fluid-relay.mdx @@ -7,7 +7,7 @@ import { PackageLink } from "@site/src/components/shortLinks"; [Azure Fluid Relay](https://aka.ms/azurefluidrelay) is a cloud-hosted Fluid service. You can connect your Fluid application to an Azure Fluid Relay instance using the `AzureClient` in the @fluidframework/azure-client package. -AzureClient handles the logic of connecting your [Fluid container](../build/containers) to the service while keeping the container object itself service-agnostic. +AzureClient handles the logic of connecting your [Fluid container](../build/containers.mdx) to the service while keeping the container object itself service-agnostic. You can use one instance of this client to manage multiple containers. To learn more about using AzureClient and Azure Fluid Relay, see [Connect to an Azure Fluid Relay instance](https://docs.microsoft.com/azure/azure-fluid-relay/how-tos/connect-fluid-azure-service). diff --git a/docs/docs/faq.mdx b/docs/docs/faq.mdx index 45b40e406335..52a746d1c04b 100644 --- a/docs/docs/faq.mdx +++ b/docs/docs/faq.mdx @@ -101,7 +101,7 @@ while a SignalR solution designed to distribute state would require additional s ### Does Fluid use operational transforms? Fluid does not use Operational Transforms (OTs), but we learned a tremendous amount from the literature on OT. -While OT uses operations that can be applied out of order by transforming operations to account for recent changes, Fluid relies on a [Total Order Broadcast](./concepts/tob) to guarantee that all operations are applied in a specific order. +While OT uses operations that can be applied out of order by transforming operations to account for recent changes, Fluid relies on a [Total Order Broadcast](./concepts/tob.mdx) to guarantee that all operations are applied in a specific order. ### Does Fluid use CRDT? diff --git a/docs/docs/start/quick-start.mdx b/docs/docs/start/quick-start.mdx index 0a78f266d7b8..d23828424d45 100644 --- a/docs/docs/start/quick-start.mdx +++ b/docs/docs/start/quick-start.mdx @@ -61,4 +61,4 @@ that the state of the dice changes in both clients. ## Next step -Walk through the code of this dice roller app with [Tutorial: DiceRoller application](./tutorial) +Walk through the code of this dice roller app with [Tutorial: DiceRoller application](./tutorial.mdx) diff --git a/docs/docs/start/tree-start.mdx b/docs/docs/start/tree-start.mdx index c2185128482f..6827ad02441a 100644 --- a/docs/docs/start/tree-start.mdx +++ b/docs/docs/start/tree-start.mdx @@ -5,7 +5,7 @@ sidebar_position: 2 import { ApiLink } from "@site/src/components/shortLinks"; -`SharedTree` is a [distributed data structure](../data-structures/overview) that looks and feels like simple JavaScript objects with a type safe wrapper. +`SharedTree` is a [distributed data structure](../data-structures/overview.mdx) that looks and feels like simple JavaScript objects with a type safe wrapper. This guide will walk you through the basics of creating, configuring, and interacting with a `SharedTree` in your application. ## Creating a Tree @@ -15,7 +15,7 @@ This example creates a container using an Azure specific client. **note**: `enableRuntimeIdCompressor` must be enabled in the container runtime options in order to use `SharedTree` -See more info on creating and loading containers [here](../build/containers#creating-a-container). +See more info on creating and loading containers [here](../build/containers.mdx#creating-a-container). ```typescript import { AzureClient } from "@fluidframework/azure-client"; @@ -59,7 +59,7 @@ const schemaFactory = new SchemaFactory("some-schema-id-prob-a-uuid"); ``` `SchemaFactory` provides some methods for specifying collection types including `object()`, `array()`, and `map()`; and five primitive data types for specifying leaf nodes: `boolean`, `string`, `number`, `null`, and `handle`. -See [schema definition](../data-structures/tree) for more info on the provided types. +See [schema definition](../data-structures/tree.mdx) for more info on the provided types. You can define a schema by extending one of the built-in object types. As an example, let's write a schema for a todo list: @@ -182,7 +182,7 @@ class TodoList extends schemaFactory.object("TodoList", { ``` These methods are designed to merge well in collaborative settings without you having to think much about it. -See [schema definition](../data-structures/tree) for more details on the built-in editing methods. +See [schema definition](../data-structures/tree.mdx) for more details on the built-in editing methods. You can also read more about how these editing operations work in collaborative settings [here](https://github.com/microsoft/FluidFramework/blob/main/packages/dds/tree/docs/user-facing/merge-semantics.md). ### Grouping Edits into Transactions @@ -201,7 +201,7 @@ Tree.runTransaction(myNode, (node) => { }) ``` -See more information on transactions [here](../data-structures/tree). +See more information on transactions [here](../data-structures/tree.mdx). ### Undoing and Redoing Edits @@ -244,4 +244,4 @@ useEffect(() => { Note that any `Revertible`s obtained should be disposed of by the app author in order to free up the resources that are required to revert an edit. -See [this blog post](https://devblogs.microsoft.com/microsoft365dev/fluid-framework-undo-redo-and-transactions-in-sharedtree/) or [undo redo support](../data-structures/tree) for more information. +See [this blog post](https://devblogs.microsoft.com/microsoft365dev/fluid-framework-undo-redo-and-transactions-in-sharedtree/) or [undo redo support](../data-structures/tree.mdx) for more information. diff --git a/docs/docs/start/tutorial.mdx b/docs/docs/start/tutorial.mdx index eaba0463ebbd..1969848f3947 100644 --- a/docs/docs/start/tutorial.mdx +++ b/docs/docs/start/tutorial.mdx @@ -5,7 +5,7 @@ sidebar_position: 3 import { MockDiceRollerSample } from "@site/src/components/mockDiceRoller"; -In this walkthrough, you'll learn about using the Fluid Framework by examining the DiceRoller application at https://github.com/microsoft/FluidHelloWorld. To get started, go through the [Quick Start](./quick-start) guide. +In this walkthrough, you'll learn about using the Fluid Framework by examining the DiceRoller application at https://github.com/microsoft/FluidHelloWorld. To get started, go through the [Quick Start](./quick-start.mdx) guide. Date: Fri, 13 Dec 2024 10:11:19 -0800 Subject: [PATCH 4/6] fix(docs): Don't inherit `@system` notices from ancestry (#23312) It is valid for a non-`@system` API to extend a `@system` one. See [here](https://fluidframework.com/docs/build/releases-and-apitags#api-support-levels) for more details on our tags, including `@system`. In fact, this is a common pattern - we introduce a base interface that some of our API members extend. We don't want customers to use that base interface directly, but it needs to exist for one reason or another. In this case, we don't want the implementations of that interface to inherit the `@system` notice. [AB#22762](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/22762) --- docs/infra/api-markdown-documenter/render-api-documentation.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/infra/api-markdown-documenter/render-api-documentation.mjs b/docs/infra/api-markdown-documenter/render-api-documentation.mjs index cad96a3feb2e..3f58b96b3e6f 100644 --- a/docs/infra/api-markdown-documenter/render-api-documentation.mjs +++ b/docs/infra/api-markdown-documenter/render-api-documentation.mjs @@ -92,7 +92,7 @@ export async function renderApiDocumentation(inputDir, outputDir, uriRootDir, ap createDefaultLayout: layoutContent, getAlertsForItem: (apiItem) => { const alerts = []; - if (ApiItemUtilities.ancestryHasModifierTag(apiItem, "@system")) { + if (ApiItemUtilities.hasModifierTag(apiItem, "@system")) { alerts.push("System"); } else { if (ApiItemUtilities.isDeprecated(apiItem)) { From e8762e37cd5edbad36b78b5a40d62a730522e18f Mon Sep 17 00:00:00 2001 From: Tony Murphy Date: Fri, 13 Dec 2024 12:08:27 -0800 Subject: [PATCH 5/6] Merge-Tree and SharedString ISegment Deprecations (#23323) The current ISegment interface over-exposes a number of properties which do not have an external use case, and any external usage could result in damage to the underlying merge-tree including data corruption. The only use case that will continue to be supported is determining if a segment is removed. For this purpose we've add the following `function segmentIsRemoved(segment: ISegment): boolean` For example, checking if a segment is not removed would change as follows: ``` diff - if(segment.removedSeq === undefined){ + if(!segmentIsRemoved(segment)){ ``` The following properties are deprecated on ISegment and its implementations: - clientId - index - localMovedSeq - localRefs - localRemovedSeq - localSeq - movedClientsIds - movedSeq - movedSeqs - ordinal - removedClientIds - removedSeq - seq - wasMovedOnInsert Additionally, the following types are also deprecated, and will become internal: - IMergeNodeCommon - IMoveInfo - IRemovalInfo - LocalReferenceCollection --------- Co-authored-by: Alex Villarreal <716334+alexvy86@users.noreply.github.com> Co-authored-by: Abram Sanderson --- .changeset/real-grapes-kick.md | 41 ++++++ .../data-objects/webflow/src/view/layout.ts | 11 +- .../api-report/merge-tree.legacy.alpha.api.md | 69 ++++++--- .../dds/merge-tree/src/endOfTreeSegment.ts | 9 +- packages/dds/merge-tree/src/index.ts | 1 + packages/dds/merge-tree/src/localReference.ts | 1 + packages/dds/merge-tree/src/mergeTree.ts | 19 +++ packages/dds/merge-tree/src/mergeTreeNodes.ts | 131 +++++++++++++++++- packages/dds/merge-tree/src/partialLengths.ts | 4 + packages/dds/merge-tree/src/revertibles.ts | 3 + 10 files changed, 259 insertions(+), 30 deletions(-) create mode 100644 .changeset/real-grapes-kick.md diff --git a/.changeset/real-grapes-kick.md b/.changeset/real-grapes-kick.md new file mode 100644 index 000000000000..111d9fa44cda --- /dev/null +++ b/.changeset/real-grapes-kick.md @@ -0,0 +1,41 @@ +--- +"@fluidframework/merge-tree": minor +"@fluidframework/sequence": minor +--- +--- +"section": deprecation +--- + +Merge-Tree and SharedString ISegment Deprecations + +The current ISegment interface over-exposes a number of properties which do not have an external use case, and any external usage could result in damage to the underlying merge-tree including data corruption. + +The only use case that will continue to be supported is determining if a segment is removed. For this purpose we've added the free function `segmentIsRemoved(segment: ISegment): boolean`. + +For example, checking if a segment is not removed would change as follows: +``` diff +- if(segment.removedSeq === undefined){ ++ if(!segmentIsRemoved(segment)){ +``` + +The following properties are deprecated on ISegment and its implementations: +- clientId +- index +- localMovedSeq +- localRefs +- localRemovedSeq +- localSeq +- movedClientsIds +- movedSeq +- movedSeqs +- ordinal +- removedClientIds +- removedSeq +- seq +- wasMovedOnInsert + +Additionally, the following types are also deprecated, and will become internal (i.e. users of the Fluid Framework will not have access to them): +- IMergeNodeCommon +- IMoveInfo +- IRemovalInfo +- LocalReferenceCollection diff --git a/examples/data-objects/webflow/src/view/layout.ts b/examples/data-objects/webflow/src/view/layout.ts index 84685d9a68e8..25bc98eb3a8f 100644 --- a/examples/data-objects/webflow/src/view/layout.ts +++ b/examples/data-objects/webflow/src/view/layout.ts @@ -7,7 +7,10 @@ import assert from "assert"; import { EventEmitter } from "@fluid-example/example-utils"; -import { MergeTreeMaintenanceType } from "@fluidframework/merge-tree/internal"; +import { + MergeTreeMaintenanceType, + segmentIsRemoved, +} from "@fluidframework/merge-tree/internal"; import { ISegment, LocalReferencePosition, @@ -309,7 +312,7 @@ export class Layout extends EventEmitter { ); // Must not request a formatter for a removed segment. - assert.strictEqual(segment.removedSeq, undefined); + assert.strictEqual(segmentIsRemoved(segment), false); // If we've checkpointed this segment previously, we can potentially reuse our previous state to // minimize damage to the DOM. @@ -421,7 +424,7 @@ export class Layout extends EventEmitter { public nodeToSegment(node: Node): ISegment { const seg = this.nodeToSegmentMap.get(node); - return seg && (seg.removedSeq === undefined ? seg : undefined); + return seg && (!segmentIsRemoved(seg) ? seg : undefined); } public segmentAndOffsetToNodeAndOffset(segment: ISegment, offset: number) { @@ -609,7 +612,7 @@ export class Layout extends EventEmitter { // If the segment was removed, promptly remove any DOM nodes it emitted. for (const { segment } of e.ranges) { - if (segment.removedSeq) { + if (segmentIsRemoved(segment)) { this.removeSegment(segment); } } diff --git a/packages/dds/merge-tree/api-report/merge-tree.legacy.alpha.api.md b/packages/dds/merge-tree/api-report/merge-tree.legacy.alpha.api.md index dfcebd7202e5..83c61ee8a1e8 100644 --- a/packages/dds/merge-tree/api-report/merge-tree.legacy.alpha.api.md +++ b/packages/dds/merge-tree/api-report/merge-tree.legacy.alpha.api.md @@ -27,7 +27,7 @@ export abstract class BaseSegment implements ISegment { cachedLength: number; // (undocumented) canAppend(segment: ISegment): boolean; - // (undocumented) + // @deprecated (undocumented) clientId: number; // (undocumented) abstract clone(): ISegment; @@ -37,33 +37,33 @@ export abstract class BaseSegment implements ISegment { protected abstract createSplitSegmentAt(pos: number): BaseSegment | undefined; // (undocumented) hasProperty(key: string): boolean; - // (undocumented) + // @deprecated (undocumented) index: number; // (undocumented) isLeaf(): this is ISegment; - // (undocumented) + // @deprecated (undocumented) localMovedSeq?: number; - // (undocumented) + // @deprecated (undocumented) localRefs?: LocalReferenceCollection; - // (undocumented) + // @deprecated (undocumented) localRemovedSeq?: number; - // (undocumented) + // @deprecated (undocumented) localSeq?: number; - // (undocumented) + // @deprecated (undocumented) movedClientIds?: number[]; - // (undocumented) + // @deprecated (undocumented) movedSeq?: number; - // (undocumented) + // @deprecated (undocumented) movedSeqs?: number[]; - // (undocumented) + // @deprecated (undocumented) ordinal: string; // (undocumented) properties?: PropertySet; - // (undocumented) + // @deprecated (undocumented) removedClientIds?: number[]; - // (undocumented) + // @deprecated (undocumented) removedSeq?: number; - // (undocumented) + // @deprecated (undocumented) seq: number; // (undocumented) splitAt(pos: number): ISegment | undefined; @@ -73,7 +73,7 @@ export abstract class BaseSegment implements ISegment { readonly trackingCollection: TrackingGroupCollection; // (undocumented) abstract readonly type: string; - // (undocumented) + // @deprecated (undocumented) wasMovedOnInsert?: boolean | undefined; } @@ -160,7 +160,7 @@ export interface IMarkerDef { refType?: ReferenceType; } -// @alpha +// @alpha @deprecated export interface IMergeNodeCommon { index: number; // (undocumented) @@ -320,7 +320,7 @@ export interface IMergeTreeSegmentDelta { segment: ISegment; } -// @alpha +// @alpha @deprecated export interface IMoveInfo { localMovedSeq?: number; movedClientIds: number[]; @@ -345,7 +345,7 @@ export interface IRelativePosition { offset?: number; } -// @alpha +// @alpha @deprecated export interface IRemovalInfo { localRemovedSeq?: number; removedClientIds: number[]; @@ -353,21 +353,47 @@ export interface IRemovalInfo { } // @alpha -export interface ISegment extends IMergeNodeCommon, Partial, Partial { +export interface ISegment { // (undocumented) append(segment: ISegment): void; attribution?: IAttributionCollection; cachedLength: number; // (undocumented) canAppend(segment: ISegment): boolean; + // @deprecated clientId: number; // (undocumented) clone(): ISegment; + // @deprecated readonly endpointType?: "start" | "end"; + // @deprecated + index: number; + // (undocumented) + isLeaf(): this is ISegment; + // @deprecated + localMovedSeq?: number; + // @deprecated localRefs?: LocalReferenceCollection; + // @deprecated localRemovedSeq?: number; + // @deprecated localSeq?: number; + // @deprecated + movedClientIds?: number[]; + // @deprecated + movedSeq?: number; + // @deprecated + movedSeqs?: number[]; + // @deprecated + moveDst?: ReferencePosition; + // @deprecated + ordinal: string; properties?: PropertySet; + // @deprecated + removedClientIds?: number[]; + // @deprecated + removedSeq?: number; + // @deprecated seq?: number; // (undocumented) splitAt(pos: number): ISegment | undefined; @@ -377,6 +403,8 @@ export interface ISegment extends IMergeNodeCommon, Partial, Parti readonly trackingCollection: TrackingGroupCollection; // (undocumented) readonly type: string; + // @deprecated + wasMovedOnInsert?: boolean; } // @alpha (undocumented) @@ -399,7 +427,7 @@ export interface ITrackingGroup { unlink(trackable: Trackable): boolean; } -// @alpha @sealed +// @alpha @sealed @deprecated export class LocalReferenceCollection { [Symbol.iterator](): { next(): IteratorResult; @@ -568,6 +596,9 @@ export const reservedMarkerIdKey = "markerId"; // @alpha export function revertMergeTreeDeltaRevertibles(driver: MergeTreeRevertibleDriver, revertibles: MergeTreeDeltaRevertible[]): void; +// @alpha +export function segmentIsRemoved(segment: ISegment): boolean; + // @alpha (undocumented) export interface SequenceOffsets { // (undocumented) diff --git a/packages/dds/merge-tree/src/endOfTreeSegment.ts b/packages/dds/merge-tree/src/endOfTreeSegment.ts index 555d5b62b659..f85a928e4d6d 100644 --- a/packages/dds/merge-tree/src/endOfTreeSegment.ts +++ b/packages/dds/merge-tree/src/endOfTreeSegment.ts @@ -6,10 +6,12 @@ import { assert } from "@fluidframework/core-utils/internal"; import { LocalClientId } from "./constants.js"; +// eslint-disable-next-line import/no-deprecated import { LocalReferenceCollection } from "./localReference.js"; import { MergeTree } from "./mergeTree.js"; import { NodeAction, depthFirstNodeWalk } from "./mergeTreeNodeWalk.js"; -import { IRemovalInfo, ISegment, ISegmentLeaf, type MergeBlock } from "./mergeTreeNodes.js"; +// eslint-disable-next-line import/no-deprecated +import { ISegment, ISegmentLeaf, type MergeBlock } from "./mergeTreeNodes.js"; /** * This is a special segment that is not bound or known by the merge tree itself, @@ -71,6 +73,7 @@ abstract class BaseEndpointSegment { abstract get ordinal(): string; + // eslint-disable-next-line import/no-deprecated localRefs?: LocalReferenceCollection; /* @@ -99,7 +102,7 @@ const notSupported = (): never => { /** * The position immediately prior to the start of the tree */ -export class StartOfTreeSegment extends BaseEndpointSegment implements ISegment, IRemovalInfo { +export class StartOfTreeSegment extends BaseEndpointSegment implements ISegment { type: string = "StartOfTreeSegment"; readonly endpointType = "start"; @@ -149,7 +152,7 @@ export class StartOfTreeSegment extends BaseEndpointSegment implements ISegment, /** * The position immediately after the end of the tree */ -export class EndOfTreeSegment extends BaseEndpointSegment implements ISegment, IRemovalInfo { +export class EndOfTreeSegment extends BaseEndpointSegment implements ISegment { type: string = "EndOfTreeSegment"; readonly endpointType = "end"; diff --git a/packages/dds/merge-tree/src/index.ts b/packages/dds/merge-tree/src/index.ts index 75d1c454ca89..02bb51c078b6 100644 --- a/packages/dds/merge-tree/src/index.ts +++ b/packages/dds/merge-tree/src/index.ts @@ -63,6 +63,7 @@ export { IMergeNodeCommon, IMoveInfo, IRemovalInfo, + segmentIsRemoved, ISegment, ISegmentAction, Marker, diff --git a/packages/dds/merge-tree/src/localReference.ts b/packages/dds/merge-tree/src/localReference.ts index cc89f89d0aaa..02b96924ca6c 100644 --- a/packages/dds/merge-tree/src/localReference.ts +++ b/packages/dds/merge-tree/src/localReference.ts @@ -228,6 +228,7 @@ export function setValidateRefCount( * * @legacy * @alpha + * @deprecated - This class will be removed in 2.20 with no replacement. */ export class LocalReferenceCollection { public static append(seg1: ISegment, seg2: ISegment): void { diff --git a/packages/dds/merge-tree/src/mergeTree.ts b/packages/dds/merge-tree/src/mergeTree.ts index 9dd4857661f6..9df717458d48 100644 --- a/packages/dds/merge-tree/src/mergeTree.ts +++ b/packages/dds/merge-tree/src/mergeTree.ts @@ -20,6 +20,7 @@ import { } from "./constants.js"; import { EndOfTreeSegment, StartOfTreeSegment } from "./endOfTreeSegment.js"; import { + // eslint-disable-next-line import/no-deprecated LocalReferenceCollection, LocalReferencePosition, SlidingPreference, @@ -46,7 +47,9 @@ import { // eslint-disable-next-line import/no-deprecated CollaborationWindow, IMergeNode, + // eslint-disable-next-line import/no-deprecated IMoveInfo, + // eslint-disable-next-line import/no-deprecated IRemovalInfo, ISegmentAction, ISegmentChanges, @@ -100,6 +103,7 @@ import { Side, type InteriorSequencePlace } from "./sequencePlace.js"; import { SortedSegmentSet } from "./sortedSegmentSet.js"; import { zamboniSegments } from "./zamboni.js"; +// eslint-disable-next-line import/no-deprecated function markSegmentMoved(seg: ISegmentLeaf, moveInfo: IMoveInfo): void { seg.moveDst = moveInfo.moveDst; seg.movedClientIds = [...moveInfo.movedClientIds]; @@ -109,19 +113,23 @@ function markSegmentMoved(seg: ISegmentLeaf, moveInfo: IMoveInfo): void { seg.wasMovedOnInsert = moveInfo.wasMovedOnInsert; } +// eslint-disable-next-line import/no-deprecated function isMoved(segment: ISegmentLeaf): segment is ISegmentLeaf & IMoveInfo { return toMoveInfo(segment) !== undefined; } +// eslint-disable-next-line import/no-deprecated function isRemoved(segment: ISegmentLeaf): segment is ISegmentLeaf & IRemovalInfo { return toRemovalInfo(segment) !== undefined; } +// eslint-disable-next-line import/no-deprecated function isRemovedAndAcked(segment: ISegmentLeaf): segment is ISegmentLeaf & IRemovalInfo { const removalInfo = toRemovalInfo(segment); return removalInfo !== undefined && removalInfo.removedSeq !== UnassignedSequenceNumber; } +// eslint-disable-next-line import/no-deprecated function isMovedAndAcked(segment: ISegmentLeaf): segment is ISegmentLeaf & IMoveInfo { const moveInfo = toMoveInfo(segment); return moveInfo !== undefined && moveInfo.movedSeq !== UnassignedSequenceNumber; @@ -180,6 +188,7 @@ function ackSegment( } case MergeTreeDeltaType.REMOVE: { + // eslint-disable-next-line import/no-deprecated const removalInfo: IRemovalInfo | undefined = toRemovalInfo(segment); assert(removalInfo !== undefined, 0x046 /* "On remove ack, missing removal info!" */); segment.localRemovedSeq = undefined; @@ -192,6 +201,7 @@ function ackSegment( case MergeTreeDeltaType.OBLITERATE: case MergeTreeDeltaType.OBLITERATE_SIDED: { + // eslint-disable-next-line import/no-deprecated const moveInfo: IMoveInfo | undefined = toMoveInfo(segment); assert(moveInfo !== undefined, 0x86e /* On obliterate ack, missing move info! */); const obliterateInfo = segmentGroup.obliterateInfo; @@ -877,7 +887,9 @@ export class MergeTree { */ private slideAckedRemovedSegmentReferences(segments: ISegmentLeaf[]): void { // References are slid in groups to preserve their order. + // eslint-disable-next-line import/no-deprecated let currentForwardSlideGroup: LocalReferenceCollection[] = []; + // eslint-disable-next-line import/no-deprecated let currentBackwardSlideGroup: LocalReferenceCollection[] = []; let currentForwardMaybeEndpoint: "start" | "end" | undefined; @@ -891,6 +903,7 @@ export class MergeTree { const slideGroup = ( currentSlideDestination: ISegmentLeaf | undefined, currentSlideIsForward: boolean | undefined, + // eslint-disable-next-line import/no-deprecated currentSlideGroup: LocalReferenceCollection[], pred: (ref: LocalReferencePosition) => boolean, maybeEndpoint: "start" | "end" | undefined, @@ -915,6 +928,7 @@ export class MergeTree { if (maybeEndpoint) { const endpoint = maybeEndpoint === "start" ? this.startOfTree : this.endOfTree; + // eslint-disable-next-line import/no-deprecated const localRefs = LocalReferenceCollection.setOrGet(endpoint); if (currentSlideIsForward) { localRefs.addBeforeTombstones(...endpointRefsToAdd); @@ -934,6 +948,7 @@ export class MergeTree { } } } else { + // eslint-disable-next-line import/no-deprecated const localRefs = LocalReferenceCollection.setOrGet(currentSlideDestination); if (currentSlideIsForward) { localRefs.addBeforeTombstones(...nonEndpointRefsToAdd); @@ -947,11 +962,13 @@ export class MergeTree { segment: ISegmentLeaf, currentSlideDestination: ISegmentLeaf | undefined, currentSlideIsForward: boolean | undefined, + // eslint-disable-next-line import/no-deprecated currentSlideGroup: LocalReferenceCollection[], pred: (ref: LocalReferencePosition) => boolean, slidingPreference: SlidingPreference, currentMaybeEndpoint: "start" | "end" | undefined, reassign: ( + // eslint-disable-next-line import/no-deprecated localRefs: LocalReferenceCollection, slideToSegment: ISegmentLeaf | undefined, slideIsForward: boolean, @@ -1649,6 +1666,7 @@ export class MergeTree { } if (oldest && newest?.clientId !== clientId) { + // eslint-disable-next-line import/no-deprecated const moveInfo: IMoveInfo = { movedClientIds, movedSeq: oldest.seq, @@ -2510,6 +2528,7 @@ export class MergeTree { segment = _segment; } + // eslint-disable-next-line import/no-deprecated const localRefs = LocalReferenceCollection.setOrGet(segment); const segRef = localRefs.createLocalRef( diff --git a/packages/dds/merge-tree/src/mergeTreeNodes.ts b/packages/dds/merge-tree/src/mergeTreeNodes.ts index 316b709fd915..3d8d8b83680e 100644 --- a/packages/dds/merge-tree/src/mergeTreeNodes.ts +++ b/packages/dds/merge-tree/src/mergeTreeNodes.ts @@ -14,6 +14,7 @@ import { UnassignedSequenceNumber, UniversalSequenceNumber, } from "./constants.js"; +// eslint-disable-next-line import/no-deprecated import { LocalReferenceCollection, type LocalReferencePosition } from "./localReference.js"; import { TrackingGroupCollection } from "./mergeTreeTracking.js"; import { IJSONSegment, IMarkerDef, ReferenceType } from "./ops.js"; @@ -34,6 +35,7 @@ import { PropertiesManager } from "./segmentPropertiesManager.js"; * Common properties for a node in a merge tree. * @legacy * @alpha + * @deprecated - This interface will be removed in 2.20 with no replacement. */ export interface IMergeNodeCommon { /** @@ -58,9 +60,13 @@ export interface IMergeNodeCommon { * * @internal */ -export type ISegmentInternal = ISegment & { - localRefs?: LocalReferenceCollection; -}; +export type ISegmentInternal = Omit & + Partial & + Partial & + Partial & { + // eslint-disable-next-line import/no-deprecated + localRefs?: LocalReferenceCollection; + }; /** * We use tiered interface to control visibility of segment properties. @@ -89,6 +95,7 @@ export type IMergeNode = MergeBlock | ISegmentLeaf; * Contains removal information associated to an {@link ISegment}. * @legacy * @alpha + * @deprecated - This interface will be removed in 2.20 with no replacement. */ export interface IRemovalInfo { /** @@ -133,6 +140,7 @@ export function toRemovalInfo( * in the future, when moves _are_ supported. * @legacy * @alpha + * @deprecated - This interface will be removed in 2.20 with no replacement. */ export interface IMoveInfo { /** @@ -211,7 +219,7 @@ export function toMoveInfo(maybe: Partial | undefined): IMoveInfo | u * @legacy * @alpha */ -export interface ISegment extends IMergeNodeCommon, Partial, Partial { +export interface ISegment { readonly type: string; readonly trackingCollection: TrackingGroupCollection; @@ -223,6 +231,7 @@ export interface ISegment extends IMergeNodeCommon, Partial, Parti * after the tree. These segments cannot be referenced by regular operations * and exist primarily as a bucket for local references to slide onto during * deletion of regular segments. + * @deprecated - This property will be removed in 2.20 with no replacement. */ readonly endpointType?: "start" | "end"; @@ -254,6 +263,7 @@ export interface ISegment extends IMergeNodeCommon, Partial, Parti * * @privateRemarks * See {@link CollaborationWindow.localSeq} for more information on the semantics of localSeq. + * @deprecated - This property will be removed in 2.20 with no replacement. */ localSeq?: number; /** @@ -265,20 +275,25 @@ export interface ISegment extends IMergeNodeCommon, Partial, Parti * * @privateRemarks * See {@link CollaborationWindow.localSeq} for more information on the semantics of localSeq. + * @deprecated - This property will be removed in 2.20 with no replacement. */ localRemovedSeq?: number; /** * Seq at which this segment was inserted. * If undefined, it is assumed the segment was inserted prior to the collab window's minimum sequence number. + * @deprecated - This property will be removed in 2.20 with no replacement. */ seq?: number; /** * Short clientId for the client that inserted this segment. + * @deprecated - This property will be removed in 2.20 with no replacement. */ clientId: number; /** * Local references added to this segment. + * @deprecated - This property will be removed in 2.20 with no replacement. */ + // eslint-disable-next-line import/no-deprecated localRefs?: LocalReferenceCollection; /** * Properties that have been added to this segment via annotation. @@ -292,6 +307,70 @@ export interface ISegment extends IMergeNodeCommon, Partial, Parti // Changing this to something other than any would break consumers. // eslint-disable-next-line @typescript-eslint/no-explicit-any toJSONObject(): any; + isLeaf(): this is ISegment; + + /** + * {@inheritDoc @fluidframework/merge-tree#IMergeNodeCommon.index} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + index: number; + /** + * {@inheritDoc @fluidframework/merge-tree#IMergeNodeCommon.ordinal} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + ordinal: string; + + /** + * {@inheritDoc @fluidframework/merge-tree#IRemovalInfo.removedSeq} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + removedSeq?: number; + /** + * {@inheritDoc @fluidframework/merge-tree#IRemovalInfo.removedClientIds} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + removedClientIds?: number[]; + /** + * {@inheritDoc @fluidframework/merge-tree#IMoveInfo.localMovedSeq} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + localMovedSeq?: number; + /** + * {@inheritDoc @fluidframework/merge-tree#IMoveInfo.movedSeq} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + movedSeq?: number; + + /** + * {@inheritDoc @fluidframework/merge-tree#IMoveInfo.movedSeqs} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + movedSeqs?: number[]; + /** + * {@inheritDoc @fluidframework/merge-tree#IMoveInfo.moveDst} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + moveDst?: ReferencePosition; + /** + * {@inheritDoc @fluidframework/merge-tree#IMoveInfo.movedClientIds} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + movedClientIds?: number[]; + /** + * {@inheritDoc @fluidframework/merge-tree#IMoveInfo.wasMovedOnInsert} + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + wasMovedOnInsert?: boolean; +} + +/** + * Determine if a segment has been removed. + * @legacy + * @alpha + */ +export function segmentIsRemoved(segment: ISegment): boolean { + const leaf: ISegmentLeaf = segment; + return leaf.removedSeq !== undefined; } /** @@ -494,15 +573,45 @@ export function seqLTE(seq: number, minOrRefSeq: number): boolean { * @alpha */ export abstract class BaseSegment implements ISegment { + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public clientId: number = LocalClientId; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public seq: number = UniversalSequenceNumber; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public removedSeq?: number; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public removedClientIds?: number[]; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public movedSeq?: number; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public movedSeqs?: number[]; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public movedClientIds?: number[]; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public wasMovedOnInsert?: boolean | undefined; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public index: number = 0; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public ordinal: string = ""; public cachedLength: number = 0; @@ -513,10 +622,23 @@ export abstract class BaseSegment implements ISegment { public attribution?: IAttributionCollection; public properties?: PropertySet; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ + // eslint-disable-next-line import/no-deprecated public localRefs?: LocalReferenceCollection; public abstract readonly type: string; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public localSeq?: number; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public localRemovedSeq?: number; + /** + * @deprecated - This property will be removed in 2.20 with no replacement. + */ public localMovedSeq?: number; public constructor(properties?: PropertySet) { @@ -610,6 +732,7 @@ export abstract class BaseSegment implements ISegment { public append(other: ISegment): void { // Note: Must call 'appendLocalRefs' before modifying this segment's length as // 'this.cachedLength' is used to adjust the offsets of the local refs. + // eslint-disable-next-line import/no-deprecated LocalReferenceCollection.append(this, other); if (this.attribution) { assert( diff --git a/packages/dds/merge-tree/src/partialLengths.ts b/packages/dds/merge-tree/src/partialLengths.ts index e971a618fd95..297a529ba907 100644 --- a/packages/dds/merge-tree/src/partialLengths.ts +++ b/packages/dds/merge-tree/src/partialLengths.ts @@ -12,7 +12,9 @@ import { // eslint-disable-next-line import/no-deprecated CollaborationWindow, IMergeNode, + // eslint-disable-next-line import/no-deprecated IMoveInfo, + // eslint-disable-next-line import/no-deprecated IRemovalInfo, ISegmentLeaf, compareNumbers, @@ -639,7 +641,9 @@ export class PartialSequenceLengths { private static insertSegment( combinedPartialLengths: PartialSequenceLengths, segment: ISegmentLeaf, + // eslint-disable-next-line import/no-deprecated removalInfo?: IRemovalInfo, + // eslint-disable-next-line import/no-deprecated moveInfo?: IMoveInfo, ): void { const removalIsLocal = diff --git a/packages/dds/merge-tree/src/revertibles.ts b/packages/dds/merge-tree/src/revertibles.ts index b50b3329eb89..dad9056b6747 100644 --- a/packages/dds/merge-tree/src/revertibles.ts +++ b/packages/dds/merge-tree/src/revertibles.ts @@ -8,6 +8,7 @@ import { UsageError } from "@fluidframework/telemetry-utils/internal"; import { DoublyLinkedList } from "./collections/index.js"; import { EndOfTreeSegment } from "./endOfTreeSegment.js"; +// eslint-disable-next-line import/no-deprecated import { LocalReferenceCollection, LocalReferencePosition } from "./localReference.js"; import { MergeTree, findRootMergeBlock } from "./mergeTree.js"; import { IMergeTreeDeltaCallbackArgs } from "./mergeTreeDeltaCallback.js"; @@ -98,6 +99,7 @@ function findMergeTreeWithRevert(trackable: Trackable): MergeTreeWithRevert { const refCallbacks: MergeTreeWithRevert["__mergeTreeRevertible"]["refCallbacks"] = { afterSlide: (r: LocalReferencePosition) => { if (mergeTree.referencePositionToLocalPosition(r) === DetachedReferencePosition) { + // eslint-disable-next-line import/no-deprecated const refs = LocalReferenceCollection.setOrGet(detachedReferences); refs.addAfterTombstones([r]); } @@ -354,6 +356,7 @@ function revertLocalRemove( } if (insertRef !== undefined) { + // eslint-disable-next-line import/no-deprecated const localRefs = LocalReferenceCollection.setOrGet(insertSegment); if (insertRef.before?.empty === false) { localRefs.addBeforeTombstones(insertRef.before.map((n) => n.data)); From 89b1375543e9ad37430f6c680d4dc7a1d88bf5aa Mon Sep 17 00:00:00 2001 From: Mark Fields Date: Fri, 13 Dec 2024 15:32:21 -0800 Subject: [PATCH 6/6] Follow-ups about IContainerRuntimeOptionsInternal and flushMode option (#23309) These are follow-up changes to #23272 * Some post-merge PR discussions * Removing usages of `flushMode` so it can be removed when the time comes --- .../container-runtime.legacy.alpha.api.md | 2 +- .../container-runtime/src/containerRuntime.ts | 16 +++- .../src/test/containerRuntime.spec.ts | 81 ++++++++----------- .../src/test/opsOnReconnect.spec.ts | 3 +- .../src/test/batching.spec.ts | 8 +- .../src/test/flushModeValidation.spec.ts | 4 +- .../test-service-load/src/optionsMatrix.ts | 8 +- .../test/test-utils/src/testObjectProvider.ts | 4 +- 8 files changed, 60 insertions(+), 66 deletions(-) diff --git a/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md b/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md index d52db70782ad..4036d7fe5f8e 100644 --- a/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md +++ b/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md @@ -34,7 +34,7 @@ export enum ContainerMessageType { // @alpha export class ContainerRuntime extends TypedEventEmitter implements IContainerRuntime, IRuntime, ISummarizerRuntime, ISummarizerInternalsProvider, IProvideFluidHandleContext { - protected constructor(context: IContainerContext, registry: IFluidDataStoreRegistry, metadata: IContainerRuntimeMetadata | undefined, electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], runtimeOptions: Readonly>, containerScope: FluidObject, baseLogger: ITelemetryBaseLogger, existing: boolean, blobManagerSnapshot: IBlobManagerLoadInfo, _storage: IDocumentStorageService, createIdCompressor: () => Promise, documentsSchemaController: DocumentsSchemaController, featureGatesForTelemetry: Record, provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise, requestHandler?: ((request: IRequest, runtime: IContainerRuntime) => Promise) | undefined, summaryConfiguration?: ISummaryConfiguration, recentBatchInfo?: [number, string][]); + protected constructor(context: IContainerContext, registry: IFluidDataStoreRegistry, metadata: IContainerRuntimeMetadata | undefined, electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], runtimeOptions: Readonly> & IContainerRuntimeOptions>, containerScope: FluidObject, baseLogger: ITelemetryBaseLogger, existing: boolean, blobManagerSnapshot: IBlobManagerLoadInfo, _storage: IDocumentStorageService, createIdCompressor: () => Promise, documentsSchemaController: DocumentsSchemaController, featureGatesForTelemetry: Record, provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise, requestHandler?: ((request: IRequest, runtime: IContainerRuntime) => Promise) | undefined, summaryConfiguration?: ISummaryConfiguration, recentBatchInfo?: [number, string][]); // (undocumented) protected addContainerStateToSummary(summaryTree: ISummaryTreeWithStats, fullTree: boolean, trackState: boolean, telemetryContext?: ITelemetryContext): void; addedGCOutboundRoute(fromPath: string, toPath: string, messageTimestampMs?: number): void; diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index 0316c9d4a243..7565890bf485 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -941,7 +941,7 @@ export class ContainerRuntime chunkSizeInBytes = defaultChunkSizeInBytes, enableGroupedBatching = true, explicitSchemaControl = false, - } = runtimeOptions as IContainerRuntimeOptionsInternal; + }: IContainerRuntimeOptionsInternal = runtimeOptions; const registry = new FluidDataStoreRegistry(registryEntries); @@ -1120,7 +1120,7 @@ export class ContainerRuntime const featureGatesForTelemetry: Record = {}; - // Make sure we've got all the options including internal ones (even though we have to cast back to IContainerRuntimeOptions below) + // Make sure we've got all the options including internal ones const internalRuntimeOptions: Readonly> = { summaryOptions, gcOptions, @@ -1517,7 +1517,10 @@ export class ContainerRuntime electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], - runtimeOptions: Readonly>, + runtimeOptions: Readonly< + Required> & + IContainerRuntimeOptions // Let flushMode and enabledGroupedBatching be optional now since they're soon to be removed + >, private readonly containerScope: FluidObject, // Create a custom ITelemetryBaseLogger to output telemetry events. public readonly baseLogger: ITelemetryBaseLogger, @@ -1562,7 +1565,12 @@ export class ContainerRuntime snapshotWithContents, } = context; - this.runtimeOptions = runtimeOptions; + // Backfill in defaults for the internal runtimeOptions, since they may not be present on the provided runtimeOptions object + this.runtimeOptions = { + flushMode: defaultFlushMode, + enableGroupedBatching: true, + ...runtimeOptions, + }; this.logger = createChildLogger({ logger: this.baseLogger }); this.mc = createChildMonitoringContext({ diff --git a/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts b/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts index 85c1abfe7046..35d384955fa2 100644 --- a/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts +++ b/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts @@ -68,6 +68,7 @@ import { IPendingRuntimeState, defaultPendingOpsWaitTimeoutMs, getSingleUseLegacyLogCallback, + type IContainerRuntimeOptionsInternal, } from "../containerRuntime.js"; import { ContainerMessageType, @@ -266,7 +267,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -302,13 +302,14 @@ describe("Runtime", () => { }); it("Override default flush mode using options", async () => { + const runtimeOptions: IContainerRuntimeOptionsInternal = { + flushMode: FlushMode.Immediate, + }; const containerRuntime = await ContainerRuntime.loadRuntime({ context: getMockContext() as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - flushMode: FlushMode.Immediate, - }, + runtimeOptions, provideEntryPoint: mockProvideEntryPoint, }); @@ -327,9 +328,7 @@ describe("Runtime", () => { }) as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - flushMode: FlushMode.TurnBased, - }, + runtimeOptions: {}, provideEntryPoint: mockProvideEntryPoint, }); (containerRuntime as any).ensureNoDataModelChanges = (callback) => { @@ -374,9 +373,7 @@ describe("Runtime", () => { }) as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - flushMode: FlushMode.TurnBased, - }, + runtimeOptions: {}, provideEntryPoint: mockProvideEntryPoint, }); @@ -481,18 +478,20 @@ describe("Runtime", () => { beforeEach(async () => { mockContext = getMockContextForOrderSequentially(); + const runtimeOptions: IContainerRuntimeOptionsInternal = { + summaryOptions: { + summaryConfigOverrides: { + state: "disabled", + }, + }, + flushMode, + }; + containerRuntime = await ContainerRuntime.loadRuntime({ context: mockContext as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - summaryOptions: { - summaryConfigOverrides: { - state: "disabled", - }, - }, - flushMode, - }, + runtimeOptions, provideEntryPoint: mockProvideEntryPoint, }); containerErrors.length = 0; @@ -689,16 +688,17 @@ describe("Runtime", () => { }); beforeEach(async () => { + const runtimeOptions: IContainerRuntimeOptionsInternal = { + summaryOptions: { + summaryConfigOverrides: { state: "disabled" }, + }, + flushMode, + }; containerRuntime = await ContainerRuntime.loadRuntime({ context: getMockContextForOrderSequentially() as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - summaryOptions: { - summaryConfigOverrides: { state: "disabled" }, - }, - flushMode, - }, + runtimeOptions, provideEntryPoint: mockProvideEntryPoint, }); containerErrors.length = 0; @@ -1392,7 +1392,7 @@ describe("Runtime", () => { mockLogger = new MockLogger(); }); - const runtimeOptions = { + const runtimeOptions: IContainerRuntimeOptionsInternal = { compressionOptions: { minimumBatchSizeInBytes: 1024 * 1024, compressionAlgorithm: CompressionAlgorithms.lz4, @@ -1402,7 +1402,7 @@ describe("Runtime", () => { enableGroupedBatching: true, }; - const defaultRuntimeOptions = { + const defaultRuntimeOptions: IContainerRuntimeOptionsInternal = { summaryOptions: {}, gcOptions: {}, loadSequenceNumberVerification: "close", @@ -1416,7 +1416,7 @@ describe("Runtime", () => { enableRuntimeIdCompressor: undefined, enableGroupedBatching: false, explicitSchemaControl: false, - } satisfies IContainerRuntimeOptions; + }; const mergedRuntimeOptions = { ...defaultRuntimeOptions, ...runtimeOptions }; it("Container load stats", async () => { @@ -1490,6 +1490,10 @@ describe("Runtime", () => { }; }; + const runtimeOptions: IContainerRuntimeOptionsInternal = { + flushMode: FlushModeExperimental.Async as unknown as FlushMode, + }; + [ undefined, new Map([["referenceSequenceNumbers", false]]), @@ -1503,9 +1507,7 @@ describe("Runtime", () => { context: localGetMockContext(features) as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - flushMode: FlushModeExperimental.Async as unknown as FlushMode, - }, + runtimeOptions, provideEntryPoint: mockProvideEntryPoint, }); @@ -1526,9 +1528,7 @@ describe("Runtime", () => { ) as IContainerContext, registryEntries: [], existing: false, - runtimeOptions: { - flushMode: FlushModeExperimental.Async as unknown as FlushMode, - }, + runtimeOptions, provideEntryPoint: mockProvideEntryPoint, }); @@ -1840,7 +1840,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -1872,7 +1871,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -1914,7 +1912,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -1961,7 +1958,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -1983,7 +1979,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2031,7 +2026,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2081,7 +2075,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2120,7 +2113,6 @@ describe("Runtime", () => { registryEntries: [], existing: false, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2334,7 +2326,6 @@ describe("Runtime", () => { registryEntries: [["@fluid-example/smde", Promise.resolve(entryDefault)]], existing: true, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2363,7 +2354,6 @@ describe("Runtime", () => { registryEntries: [["@fluid-example/smde", Promise.resolve(entryDefault)]], existing: true, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2412,7 +2402,6 @@ describe("Runtime", () => { registryEntries: [["@fluid-example/smde", Promise.resolve(entryDefault)]], existing: true, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2472,7 +2461,6 @@ describe("Runtime", () => { registryEntries: [["@fluid-example/smde", Promise.resolve(entryDefault)]], existing: true, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2514,7 +2502,6 @@ describe("Runtime", () => { registryEntries: [["@fluid-example/smde", Promise.resolve(entryDefault)]], existing: true, runtimeOptions: { - flushMode: FlushMode.TurnBased, enableRuntimeIdCompressor: "on", }, provideEntryPoint: mockProvideEntryPoint, @@ -2608,7 +2595,6 @@ describe("Runtime", () => { requestHandler: undefined, runtimeOptions: { enableGroupedBatching: false, - flushMode: FlushMode.TurnBased, }, provideEntryPoint: mockProvideEntryPoint, }); @@ -3184,7 +3170,6 @@ describe("Runtime", () => { requestHandler: undefined, runtimeOptions: { enableGroupedBatching: false, - flushMode: FlushMode.TurnBased, }, provideEntryPoint: mockProvideEntryPoint, }); diff --git a/packages/test/local-server-tests/src/test/opsOnReconnect.spec.ts b/packages/test/local-server-tests/src/test/opsOnReconnect.spec.ts index 79502f22d371..e0175742165e 100644 --- a/packages/test/local-server-tests/src/test/opsOnReconnect.spec.ts +++ b/packages/test/local-server-tests/src/test/opsOnReconnect.spec.ts @@ -15,6 +15,7 @@ import { import { ContainerMessageType, IContainerRuntimeOptions, + type IContainerRuntimeOptionsInternal, } from "@fluidframework/container-runtime/internal"; import { IFluidHandle, IFluidLoadable } from "@fluidframework/core-interfaces"; import { ISequencedDocumentMessage } from "@fluidframework/driver-definitions/internal"; @@ -105,7 +106,7 @@ describe("Ops on Reconnect", () => { } async function setupFirstContainer( - runtimeOptions: IContainerRuntimeOptions = { flushMode: FlushMode.Immediate }, + runtimeOptions: IContainerRuntimeOptionsInternal = { flushMode: FlushMode.Immediate }, ) { // Create the first container, dataObject and DDSes. container1 = await createContainer(runtimeOptions); diff --git a/packages/test/test-end-to-end-tests/src/test/batching.spec.ts b/packages/test/test-end-to-end-tests/src/test/batching.spec.ts index 629ee3fd957a..369a540cafce 100644 --- a/packages/test/test-end-to-end-tests/src/test/batching.spec.ts +++ b/packages/test/test-end-to-end-tests/src/test/batching.spec.ts @@ -10,7 +10,7 @@ import { IContainer } from "@fluidframework/container-definitions/internal"; import { CompressionAlgorithms, ContainerMessageType, - IContainerRuntimeOptions, + type IContainerRuntimeOptionsInternal, } from "@fluidframework/container-runtime/internal"; import { IContainerRuntime } from "@fluidframework/container-runtime-definitions/internal"; import { ConfigTypes, IConfigProviderBase } from "@fluidframework/core-interfaces"; @@ -123,7 +123,7 @@ describeCompat("Flushing ops", "NoCompat", (getTestObjectProvider, apis) => { let dataObject2map2: ISharedMap; async function setupContainers( - runtimeOptions?: IContainerRuntimeOptions, + runtimeOptions?: IContainerRuntimeOptionsInternal, disableOfflineLoad = false, ) { if (disableOfflineLoad) { @@ -169,7 +169,7 @@ describeCompat("Flushing ops", "NoCompat", (getTestObjectProvider, apis) => { let dataObject2BatchMessages: ISequencedDocumentMessage[] = []; function testFlushingUsingOrderSequentially( - options: IContainerRuntimeOptions, + options: IContainerRuntimeOptionsInternal, disableOfflineLoad, ) { beforeEach("setupBatchMessageListeners", async () => { @@ -550,7 +550,7 @@ describeCompat("Flushing ops", "NoCompat", (getTestObjectProvider, apis) => { } function testAutomaticFlushingUsingOrderSequentially( - options: IContainerRuntimeOptions, + options: IContainerRuntimeOptionsInternal, disableOfflineLoad, ) { beforeEach("setupContainers", async () => { diff --git a/packages/test/test-end-to-end-tests/src/test/flushModeValidation.spec.ts b/packages/test/test-end-to-end-tests/src/test/flushModeValidation.spec.ts index dc1739e4dc3a..e58d0361c521 100644 --- a/packages/test/test-end-to-end-tests/src/test/flushModeValidation.spec.ts +++ b/packages/test/test-end-to-end-tests/src/test/flushModeValidation.spec.ts @@ -6,7 +6,7 @@ import { strict as assert } from "assert"; import { describeCompat } from "@fluid-private/test-version-utils"; -import { IContainerRuntimeOptions } from "@fluidframework/container-runtime/internal"; +import { type IContainerRuntimeOptionsInternal } from "@fluidframework/container-runtime/internal"; import type { ISharedMap } from "@fluidframework/map/internal"; import { FlushMode } from "@fluidframework/runtime-definitions/internal"; import { @@ -43,7 +43,7 @@ describeCompat("Flush mode validation", "NoCompat", (getTestObjectProvider, apis } }); - async function setupContainer(runtimeOptions?: IContainerRuntimeOptions) { + async function setupContainer(runtimeOptions?: IContainerRuntimeOptionsInternal) { const configCopy = { ...testContainerConfig, runtimeOptions }; // Create a Container for the first client. diff --git a/packages/test/test-service-load/src/optionsMatrix.ts b/packages/test/test-service-load/src/optionsMatrix.ts index ed8c568606c4..e33f6d3d22d8 100644 --- a/packages/test/test-service-load/src/optionsMatrix.ts +++ b/packages/test/test-service-load/src/optionsMatrix.ts @@ -14,9 +14,9 @@ import { import { ILoaderOptions } from "@fluidframework/container-loader/internal"; import { CompressionAlgorithms, - IContainerRuntimeOptions, IGCRuntimeOptions, ISummaryRuntimeOptions, + type IContainerRuntimeOptionsInternal, } from "@fluidframework/container-runtime/internal"; import { ConfigTypes } from "@fluidframework/core-interfaces"; import { LoggingError } from "@fluidframework/telemetry-utils/internal"; @@ -88,7 +88,7 @@ const summaryOptionsMatrix: OptionsMatrix = { export function generateRuntimeOptions( seed: number, - overrides: Partial> | undefined, + overrides: Partial> | undefined, ) { const gcOptions = generatePairwiseOptions( applyOverrides(gcOptionsMatrix, overrides?.gcOptions as any), @@ -100,7 +100,7 @@ export function generateRuntimeOptions( seed, ); - const runtimeOptionsMatrix: OptionsMatrix = { + const runtimeOptionsMatrix: OptionsMatrix = { gcOptions: [undefined, ...gcOptions], summaryOptions: [undefined, ...summaryOptions], loadSequenceNumberVerification: [undefined], @@ -116,7 +116,7 @@ export function generateRuntimeOptions( explicitSchemaControl: [true, false], }; - return generatePairwiseOptions( + return generatePairwiseOptions( applyOverrides(runtimeOptionsMatrix, { ...overrides, gcOptions: undefined, diff --git a/packages/test/test-utils/src/testObjectProvider.ts b/packages/test/test-utils/src/testObjectProvider.ts index a144182f8041..15ad3828cc3a 100644 --- a/packages/test/test-utils/src/testObjectProvider.ts +++ b/packages/test/test-utils/src/testObjectProvider.ts @@ -15,7 +15,7 @@ import { Loader, waitContainerToCatchUp as waitContainerToCatchUp_original, } from "@fluidframework/container-loader/internal"; -import { IContainerRuntimeOptions } from "@fluidframework/container-runtime/internal"; +import { type IContainerRuntimeOptionsInternal } from "@fluidframework/container-runtime/internal"; import { IRequestHeader, ITelemetryBaseEvent, @@ -235,7 +235,7 @@ export interface ITestContainerConfig { registry?: ChannelFactoryRegistry; /** Container runtime options for the container instance */ - runtimeOptions?: IContainerRuntimeOptions; + runtimeOptions?: IContainerRuntimeOptionsInternal; /** Whether this runtime should be instantiated using a mixed-in attributor class */ enableAttribution?: boolean;