diff --git a/src/index.js b/src/index.js index e38caef7..628e61f4 100644 --- a/src/index.js +++ b/src/index.js @@ -55,16 +55,14 @@ export { encodeStateAsUpdate, encodeStateAsUpdateV2, encodeStateVector, - encodeStateVectorV2, UndoManager, decodeSnapshot, encodeSnapshot, decodeSnapshotV2, encodeSnapshotV2, decodeStateVector, - decodeStateVectorV2, - encodeRelativePosition, - decodeRelativePosition, + logUpdate, + logUpdateV2, relativePositionToJSON, isDeleted, isParentOf, @@ -73,5 +71,15 @@ export { tryGc, transact, AbstractConnector, - logType + logType, + mergeUpdates, + mergeUpdatesV2, + parseUpdateMeta, + parseUpdateMetaV2, + encodeStateVectorFromUpdate, + encodeStateVectorFromUpdateV2, + encodeRelativePosition, + decodeRelativePosition, + diffUpdate, + diffUpdateV2 } from './internals.js' diff --git a/src/internals.js b/src/internals.js index 1984739b..bc386f0a 100644 --- a/src/internals.js +++ b/src/internals.js @@ -15,6 +15,7 @@ export * from './utils/Snapshot.js' export * from './utils/StructStore.js' export * from './utils/Transaction.js' export * from './utils/UndoManager.js' +export * from './utils/updates.js' export * from './utils/YEvent.js' export * from './types/AbstractType.js' @@ -39,3 +40,4 @@ export * from './structs/ContentAny.js' export * from './structs/ContentString.js' export * from './structs/ContentType.js' export * from './structs/Item.js' +export * from './structs/Skip.js' diff --git a/src/structs/AbstractStruct.js b/src/structs/AbstractStruct.js index fc335a73..889be3ab 100644 --- a/src/structs/AbstractStruct.js +++ b/src/structs/AbstractStruct.js @@ -1,6 +1,6 @@ import { - AbstractUpdateEncoder, ID, Transaction // eslint-disable-line + UpdateEncoderV1, UpdateEncoderV2, ID, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -34,7 +34,7 @@ export class AbstractStruct { } /** - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. * @param {number} offset * @param {number} encodingRef */ diff --git a/src/structs/ContentAny.js b/src/structs/ContentAny.js index f00128df..613144d8 100644 --- a/src/structs/ContentAny.js +++ b/src/structs/ContentAny.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line + UpdateEncoderV1, UpdateEncoderV2, UpdateDecoderV1, UpdateDecoderV2, Transaction, Item, StructStore // eslint-disable-line } from '../internals.js' export class ContentAny { @@ -74,7 +74,7 @@ export class ContentAny { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -95,7 +95,7 @@ export class ContentAny { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentAny} */ export const readContentAny = decoder => { diff --git a/src/structs/ContentBinary.js b/src/structs/ContentBinary.js index 15d92aa2..30e55fe5 100644 --- a/src/structs/ContentBinary.js +++ b/src/structs/ContentBinary.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -70,7 +70,7 @@ export class ContentBinary { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -86,7 +86,7 @@ export class ContentBinary { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder * @return {ContentBinary} */ export const readContentBinary = decoder => new ContentBinary(decoder.readBuf()) diff --git a/src/structs/ContentDeleted.js b/src/structs/ContentDeleted.js index 7e00bebe..7225e1f6 100644 --- a/src/structs/ContentDeleted.js +++ b/src/structs/ContentDeleted.js @@ -1,7 +1,7 @@ import { addToDeleteSet, - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line } from '../internals.js' export class ContentDeleted { @@ -77,7 +77,7 @@ export class ContentDeleted { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -95,7 +95,7 @@ export class ContentDeleted { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder * @return {ContentDeleted} */ export const readContentDeleted = decoder => new ContentDeleted(decoder.readLen()) diff --git a/src/structs/ContentDoc.js b/src/structs/ContentDoc.js index 10101896..939734c1 100644 --- a/src/structs/ContentDoc.js +++ b/src/structs/ContentDoc.js @@ -1,6 +1,6 @@ import { - Doc, AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Transaction, Item // eslint-disable-line + Doc, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -110,7 +110,7 @@ export class ContentDoc { gc (store) { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -129,7 +129,7 @@ export class ContentDoc { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentDoc} */ export const readContentDoc = decoder => new ContentDoc(new Doc({ guid: decoder.readString(), ...decoder.readAny() })) diff --git a/src/structs/ContentEmbed.js b/src/structs/ContentEmbed.js index 66b922e3..27dcd3fa 100644 --- a/src/structs/ContentEmbed.js +++ b/src/structs/ContentEmbed.js @@ -1,6 +1,6 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -74,7 +74,7 @@ export class ContentEmbed { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -92,7 +92,7 @@ export class ContentEmbed { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentEmbed} */ export const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON()) diff --git a/src/structs/ContentFormat.js b/src/structs/ContentFormat.js index 43716408..fb2610d2 100644 --- a/src/structs/ContentFormat.js +++ b/src/structs/ContentFormat.js @@ -1,6 +1,6 @@ import { - AbstractType, AbstractUpdateDecoder, AbstractUpdateEncoder, Item, StructStore, Transaction // eslint-disable-line + AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Item, StructStore, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -80,7 +80,7 @@ export class ContentFormat { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -97,7 +97,7 @@ export class ContentFormat { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentFormat} */ export const readContentFormat = decoder => new ContentFormat(decoder.readString(), decoder.readJSON()) diff --git a/src/structs/ContentJSON.js b/src/structs/ContentJSON.js index baf4c46c..29b2f754 100644 --- a/src/structs/ContentJSON.js +++ b/src/structs/ContentJSON.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line } from '../internals.js' /** @@ -77,7 +77,7 @@ export class ContentJSON { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -100,7 +100,7 @@ export class ContentJSON { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentJSON} */ export const readContentJSON = decoder => { diff --git a/src/structs/ContentString.js b/src/structs/ContentString.js index 10151e67..fa023ef9 100644 --- a/src/structs/ContentString.js +++ b/src/structs/ContentString.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line } from '../internals.js' /** @@ -88,7 +88,7 @@ export class ContentString { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -106,7 +106,7 @@ export class ContentString { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentString} */ export const readContentString = decoder => new ContentString(decoder.readString()) diff --git a/src/structs/ContentType.js b/src/structs/ContentType.js index 7fd45570..3be38298 100644 --- a/src/structs/ContentType.js +++ b/src/structs/ContentType.js @@ -7,13 +7,13 @@ import { readYXmlFragment, readYXmlHook, readYXmlText, - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' /** - * @type {Array>} + * @type {Array>} * @private */ export const typeRefs = [ @@ -148,7 +148,7 @@ export class ContentType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -166,7 +166,7 @@ export class ContentType { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentType} */ export const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder)) diff --git a/src/structs/GC.js b/src/structs/GC.js index 0b9e4244..42d71f73 100644 --- a/src/structs/GC.js +++ b/src/structs/GC.js @@ -2,7 +2,7 @@ import { AbstractStruct, addStruct, - AbstractUpdateEncoder, StructStore, Transaction, ID // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' export const structGCRefNumber = 0 @@ -22,6 +22,9 @@ export class GC extends AbstractStruct { * @return {boolean} */ mergeWith (right) { + if (this.constructor !== right.constructor) { + return false + } this.length += right.length return true } @@ -39,7 +42,7 @@ export class GC extends AbstractStruct { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { diff --git a/src/structs/Item.js b/src/structs/Item.js index 4e171224..95ef3f6a 100644 --- a/src/structs/Item.js +++ b/src/structs/Item.js @@ -22,7 +22,7 @@ import { readContentFormat, readContentType, addChangedTypeToTransaction, - AbstractUpdateDecoder, AbstractUpdateEncoder, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -554,6 +554,7 @@ export class Item extends AbstractStruct { */ mergeWith (right) { if ( + this.constructor === right.constructor && compareIDs(right.origin, this.lastId) && this.right === right && compareIDs(this.rightOrigin, right.rightOrigin) && @@ -619,7 +620,7 @@ export class Item extends AbstractStruct { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. * @param {number} offset */ write (encoder, offset) { @@ -639,16 +640,26 @@ export class Item extends AbstractStruct { } if (origin === null && rightOrigin === null) { const parent = /** @type {AbstractType} */ (this.parent) - const parentItem = parent._item - if (parentItem === null) { - // parent type on y._map - // find the correct key - const ykey = findRootTypeKey(parent) + if (parent._item !== undefined) { + const parentItem = parent._item + if (parentItem === null) { + // parent type on y._map + // find the correct key + const ykey = findRootTypeKey(parent) + encoder.writeParentInfo(true) // write parentYKey + encoder.writeString(ykey) + } else { + encoder.writeParentInfo(false) // write parent id + encoder.writeLeftID(parentItem.id) + } + } else if (parent.constructor === String) { // this edge case was added by differential updates encoder.writeParentInfo(true) // write parentYKey - encoder.writeString(ykey) - } else { + encoder.writeString(parent) + } else if (parent.constructor === ID) { encoder.writeParentInfo(false) // write parent id - encoder.writeLeftID(parentItem.id) + encoder.writeLeftID(parent) + } else { + error.unexpectedCase() } if (parentSub !== null) { encoder.writeString(parentSub) @@ -659,7 +670,7 @@ export class Item extends AbstractStruct { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @param {number} info */ export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder) @@ -667,10 +678,10 @@ export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS /** * A lookup map for reading Item content. * - * @type {Array} + * @type {Array} */ export const contentRefs = [ - () => { throw error.unexpectedCase() }, // GC is not ItemContent + () => { error.unexpectedCase() }, // GC is not ItemContent readContentDeleted, // 1 readContentJSON, // 2 readContentBinary, // 3 @@ -679,7 +690,8 @@ export const contentRefs = [ readContentFormat, // 6 readContentType, // 7 readContentAny, // 8 - readContentDoc // 9 + readContentDoc, // 9 + () => { error.unexpectedCase() } // 10 - Skip is not ItemContent ] /** @@ -759,7 +771,7 @@ export class AbstractContent { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { diff --git a/src/structs/Skip.js b/src/structs/Skip.js new file mode 100644 index 00000000..c763f853 --- /dev/null +++ b/src/structs/Skip.js @@ -0,0 +1,60 @@ + +import { + AbstractStruct, + UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line +} from '../internals.js' +import * as error from 'lib0/error.js' +import * as encoding from 'lib0/encoding.js' + +export const structSkipRefNumber = 10 + +/** + * @private + */ +export class Skip extends AbstractStruct { + get deleted () { + return true + } + + delete () {} + + /** + * @param {Skip} right + * @return {boolean} + */ + mergeWith (right) { + if (this.constructor !== right.constructor) { + return false + } + this.length += right.length + return true + } + + /** + * @param {Transaction} transaction + * @param {number} offset + */ + integrate (transaction, offset) { + // skip structs cannot be integrated + error.unexpectedCase() + } + + /** + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder + * @param {number} offset + */ + write (encoder, offset) { + encoder.writeInfo(structSkipRefNumber) + // write as VarUint because Skips can't make use of predictable length-encoding + encoding.writeVarUint(encoder.restEncoder, this.length - offset) + } + + /** + * @param {Transaction} transaction + * @param {StructStore} store + * @return {null | number} + */ + getMissing (transaction, store) { + return null + } +} diff --git a/src/types/AbstractType.js b/src/types/AbstractType.js index 3b6aa581..76a88a33 100644 --- a/src/types/AbstractType.js +++ b/src/types/AbstractType.js @@ -11,7 +11,7 @@ import { ContentAny, ContentBinary, getItemCleanStart, - ContentDoc, YText, YArray, AbstractUpdateEncoder, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line + ContentDoc, YText, YArray, UpdateEncoderV1, UpdateEncoderV2, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -324,7 +324,7 @@ export class AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { } diff --git a/src/types/YArray.js b/src/types/YArray.js index cadb16b5..cefc908b 100644 --- a/src/types/YArray.js +++ b/src/types/YArray.js @@ -15,7 +15,7 @@ import { YArrayRefID, callTypeObservers, transact, - ArraySearchMarker, AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, Transaction, Item // eslint-disable-line + ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line } from '../internals.js' import { typeListSlice } from './AbstractType.js' @@ -241,7 +241,7 @@ export class YArray extends AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YArrayRefID) @@ -249,7 +249,7 @@ export class YArray extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * * @private * @function diff --git a/src/types/YMap.js b/src/types/YMap.js index efaa16f5..2cb0342a 100644 --- a/src/types/YMap.js +++ b/src/types/YMap.js @@ -14,7 +14,7 @@ import { YMapRefID, callTypeObservers, transact, - AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, Transaction, Item // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line } from '../internals.js' import * as iterator from 'lib0/iterator.js' @@ -238,7 +238,7 @@ export class YMap extends AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YMapRefID) @@ -246,7 +246,7 @@ export class YMap extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * * @private * @function diff --git a/src/types/YText.js b/src/types/YText.js index b5694507..638846fa 100644 --- a/src/types/YText.js +++ b/src/types/YText.js @@ -26,7 +26,7 @@ import { typeMapGet, typeMapGetAll, updateMarkerChanges, - ArraySearchMarker, AbstractUpdateDecoder, AbstractUpdateEncoder, ID, Doc, Item, Snapshot, Transaction // eslint-disable-line + ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ID, Doc, Item, Snapshot, Transaction // eslint-disable-line } from '../internals.js' import * as object from 'lib0/object.js' @@ -1205,7 +1205,7 @@ export class YText extends AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YTextRefID) @@ -1213,7 +1213,7 @@ export class YText extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YText} * * @private diff --git a/src/types/YXmlElement.js b/src/types/YXmlElement.js index 3b64fa51..b3ce6ef9 100644 --- a/src/types/YXmlElement.js +++ b/src/types/YXmlElement.js @@ -8,7 +8,7 @@ import { typeMapGetAll, typeListForEach, YXmlElementRefID, - YXmlText, ContentType, AbstractType, AbstractUpdateDecoder, AbstractUpdateEncoder, Snapshot, Doc, Item // eslint-disable-line + YXmlText, ContentType, AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Snapshot, Doc, Item // eslint-disable-line } from '../internals.js' /** @@ -208,7 +208,7 @@ export class YXmlElement extends YXmlFragment { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. */ _write (encoder) { encoder.writeTypeRef(YXmlElementRefID) @@ -217,7 +217,7 @@ export class YXmlElement extends YXmlFragment { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlElement} * * @function diff --git a/src/types/YXmlFragment.js b/src/types/YXmlFragment.js index 21c6a3a0..1fd060b0 100644 --- a/src/types/YXmlFragment.js +++ b/src/types/YXmlFragment.js @@ -17,7 +17,7 @@ import { transact, typeListGet, typeListSlice, - AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -410,7 +410,7 @@ export class YXmlFragment extends AbstractType { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. */ _write (encoder) { encoder.writeTypeRef(YXmlFragmentRefID) @@ -418,7 +418,7 @@ export class YXmlFragment extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlFragment} * * @private diff --git a/src/types/YXmlHook.js b/src/types/YXmlHook.js index e28f70a3..be8c759b 100644 --- a/src/types/YXmlHook.js +++ b/src/types/YXmlHook.js @@ -2,7 +2,7 @@ import { YMap, YXmlHookRefID, - AbstractUpdateDecoder, AbstractUpdateEncoder // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' /** @@ -76,7 +76,7 @@ export class YXmlHook extends YMap { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. */ _write (encoder) { encoder.writeTypeRef(YXmlHookRefID) @@ -85,7 +85,7 @@ export class YXmlHook extends YMap { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlHook} * * @private diff --git a/src/types/YXmlText.js b/src/types/YXmlText.js index dd8d892d..470ce70f 100644 --- a/src/types/YXmlText.js +++ b/src/types/YXmlText.js @@ -2,7 +2,7 @@ import { YText, YXmlTextRefID, - ContentType, YXmlElement, AbstractUpdateDecoder, AbstractUpdateEncoder // eslint-disable-line + ContentType, YXmlElement, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, // eslint-disable-line } from '../internals.js' /** @@ -104,7 +104,7 @@ export class YXmlText extends YText { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YXmlTextRefID) @@ -112,7 +112,7 @@ export class YXmlText extends YText { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlText} * * @private diff --git a/src/utils/DeleteSet.js b/src/utils/DeleteSet.js index 658f5087..d7251faa 100644 --- a/src/utils/DeleteSet.js +++ b/src/utils/DeleteSet.js @@ -4,7 +4,8 @@ import { getState, splitItem, iterateStructs, - AbstractUpdateDecoder, AbstractDSDecoder, AbstractDSEncoder, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line + UpdateEncoderV2, + DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' import * as array from 'lib0/array.js' @@ -121,8 +122,8 @@ export const sortAndMergeDeleteSet = ds => { for (i = 1, j = 1; i < dels.length; i++) { const left = dels[j - 1] const right = dels[i] - if (left.clock + left.len === right.clock) { - left.len += right.len + if (left.clock + left.len >= right.clock) { + left.len = math.max(left.len, right.clock + right.len - left.clock) } else { if (j < i) { dels[j] = right @@ -210,7 +211,7 @@ export const createDeleteSetFromStructStore = ss => { } /** - * @param {AbstractDSEncoder} encoder + * @param {DSEncoderV1 | DSEncoderV2} encoder * @param {DeleteSet} ds * * @private @@ -232,7 +233,7 @@ export const writeDeleteSet = (encoder, ds) => { } /** - * @param {AbstractDSDecoder} decoder + * @param {DSDecoderV1 | DSDecoderV2} decoder * @return {DeleteSet} * * @private @@ -260,9 +261,10 @@ export const readDeleteSet = decoder => { */ /** - * @param {AbstractDSDecoder} decoder + * @param {DSDecoderV1 | DSDecoderV2} decoder * @param {Transaction} transaction * @param {StructStore} store + * @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully. * * @private * @function @@ -315,9 +317,10 @@ export const readAndApplyDeleteSet = (decoder, transaction, store) => { } } if (unappliedDS.clients.size > 0) { - // TODO: no need for encoding+decoding ds anymore - const unappliedDSEncoder = new DSEncoderV2() - writeDeleteSet(unappliedDSEncoder, unappliedDS) - store.pendingDeleteReaders.push(new DSDecoderV2(decoding.createDecoder((unappliedDSEncoder.toUint8Array())))) + const ds = new UpdateEncoderV2() + encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs + writeDeleteSet(ds, unappliedDS) + return ds.toUint8Array() } + return null } diff --git a/src/utils/Snapshot.js b/src/utils/Snapshot.js index ccd923e8..50c64772 100644 --- a/src/utils/Snapshot.js +++ b/src/utils/Snapshot.js @@ -14,9 +14,8 @@ import { getState, findIndexSS, UpdateEncoderV2, - DefaultDSEncoder, applyUpdateV2, - AbstractDSDecoder, AbstractDSEncoder, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line + DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -78,7 +77,7 @@ export const equalSnapshots = (snap1, snap2) => { /** * @param {Snapshot} snapshot - * @param {AbstractDSEncoder} [encoder] + * @param {DSEncoderV1 | DSEncoderV2} [encoder] * @return {Uint8Array} */ export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => { @@ -91,11 +90,11 @@ export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => { * @param {Snapshot} snapshot * @return {Uint8Array} */ -export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DefaultDSEncoder()) +export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncoderV1()) /** * @param {Uint8Array} buf - * @param {AbstractDSDecoder} [decoder] + * @param {DSDecoderV1 | DSDecoderV2} [decoder] * @return {Snapshot} */ export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => { diff --git a/src/utils/StructStore.js b/src/utils/StructStore.js index f2c7fd8a..4a773127 100644 --- a/src/utils/StructStore.js +++ b/src/utils/StructStore.js @@ -15,24 +15,13 @@ export class StructStore { */ this.clients = new Map() /** - * Store incompleted struct reads here - * `i` denotes to the next read operation - * We could shift the array of refs instead, but shift is incredible - * slow in Chrome for arrays with more than 100k elements - * @see tryResumePendingStructRefs - * @type {Map}>} + * @type {null | { missing: Map, update: Uint8Array }} */ - this.pendingClientsStructRefs = new Map() + this.pendingStructs = null /** - * Stack of pending structs waiting for struct dependencies - * Maximum length of stack is structReaders.size - * @type {Array} + * @type {null | Uint8Array} */ - this.pendingStack = [] - /** - * @type {Array} - */ - this.pendingDeleteReaders = [] + this.pendingDs = null } } diff --git a/src/utils/Transaction.js b/src/utils/Transaction.js index 10867038..1733b675 100644 --- a/src/utils/Transaction.js +++ b/src/utils/Transaction.js @@ -11,7 +11,7 @@ import { Item, generateNewClientId, createID, - AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV2, DefaultUpdateEncoder, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -118,7 +118,7 @@ export class Transaction { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Transaction} transaction * @return {boolean} Whether data was written. */ @@ -337,7 +337,7 @@ const cleanupTransactions = (transactionCleanups, i) => { // @todo Merge all the transactions into one and provide send the data as a single update message doc.emit('afterTransactionCleanup', [transaction, doc]) if (doc._observers.has('update')) { - const encoder = new DefaultUpdateEncoder() + const encoder = new UpdateEncoderV1() const hasContent = writeUpdateMessageFromTransaction(encoder, transaction) if (hasContent) { doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction]) diff --git a/src/utils/UpdateDecoder.js b/src/utils/UpdateDecoder.js index 372c7749..74568650 100644 --- a/src/utils/UpdateDecoder.js +++ b/src/utils/UpdateDecoder.js @@ -1,129 +1,9 @@ import * as buffer from 'lib0/buffer.js' -import * as error from 'lib0/error.js' import * as decoding from 'lib0/decoding.js' import { ID, createID } from '../internals.js' -export class AbstractDSDecoder { - /** - * @param {decoding.Decoder} decoder - */ - constructor (decoder) { - this.restDecoder = decoder - error.methodUnimplemented() - } - - resetDsCurVal () { } - - /** - * @return {number} - */ - readDsClock () { - error.methodUnimplemented() - } - - /** - * @return {number} - */ - readDsLen () { - error.methodUnimplemented() - } -} - -export class AbstractUpdateDecoder extends AbstractDSDecoder { - /** - * @return {ID} - */ - readLeftID () { - error.methodUnimplemented() - } - - /** - * @return {ID} - */ - readRightID () { - error.methodUnimplemented() - } - - /** - * Read the next client id. - * Use this in favor of readID whenever possible to reduce the number of objects created. - * - * @return {number} - */ - readClient () { - error.methodUnimplemented() - } - - /** - * @return {number} info An unsigned 8-bit integer - */ - readInfo () { - error.methodUnimplemented() - } - - /** - * @return {string} - */ - readString () { - error.methodUnimplemented() - } - - /** - * @return {boolean} isKey - */ - readParentInfo () { - error.methodUnimplemented() - } - - /** - * @return {number} info An unsigned 8-bit integer - */ - readTypeRef () { - error.methodUnimplemented() - } - - /** - * Write len of a struct - well suited for Opt RLE encoder. - * - * @return {number} len - */ - readLen () { - error.methodUnimplemented() - } - - /** - * @return {any} - */ - readAny () { - error.methodUnimplemented() - } - - /** - * @return {Uint8Array} - */ - readBuf () { - error.methodUnimplemented() - } - - /** - * Legacy implementation uses JSON parse. We use any-decoding in v2. - * - * @return {any} - */ - readJSON () { - error.methodUnimplemented() - } - - /** - * @return {string} - */ - readKey () { - error.methodUnimplemented() - } -} - export class DSDecoderV1 { /** * @param {decoding.Decoder} decoder @@ -247,6 +127,9 @@ export class DSDecoderV2 { * @param {decoding.Decoder} decoder */ constructor (decoder) { + /** + * @private + */ this.dsCurrVal = 0 this.restDecoder = decoder } @@ -255,11 +138,17 @@ export class DSDecoderV2 { this.dsCurrVal = 0 } + /** + * @return {number} + */ readDsClock () { this.dsCurrVal += decoding.readVarUint(this.restDecoder) return this.dsCurrVal } + /** + * @return {number} + */ readDsLen () { const diff = decoding.readVarUint(this.restDecoder) + 1 this.dsCurrVal += diff @@ -280,7 +169,7 @@ export class UpdateDecoderV2 extends DSDecoderV2 { * @type {Array} */ this.keys = [] - decoding.readUint8(decoder) // read feature flag - currently unused + decoding.readVarUint(decoder) // read feature flag - currently unused this.keyClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder)) this.clientDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder)) this.leftClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder)) diff --git a/src/utils/UpdateEncoder.js b/src/utils/UpdateEncoder.js index eb13fdc3..764cf7f0 100644 --- a/src/utils/UpdateEncoder.js +++ b/src/utils/UpdateEncoder.js @@ -6,110 +6,9 @@ import { ID // eslint-disable-line } from '../internals.js' -export class AbstractDSEncoder { - constructor () { - this.restEncoder = encoding.createEncoder() - } - - /** - * @return {Uint8Array} - */ - toUint8Array () { - error.methodUnimplemented() - } - - /** - * Resets the ds value to 0. - * The v2 encoder uses this information to reset the initial diff value. - */ - resetDsCurVal () { } - - /** - * @param {number} clock - */ - writeDsClock (clock) { } - - /** - * @param {number} len - */ - writeDsLen (len) { } -} - -export class AbstractUpdateEncoder extends AbstractDSEncoder { - /** - * @return {Uint8Array} - */ - toUint8Array () { - error.methodUnimplemented() - } - - /** - * @param {ID} id - */ - writeLeftID (id) { } - - /** - * @param {ID} id - */ - writeRightID (id) { } - - /** - * Use writeClient and writeClock instead of writeID if possible. - * @param {number} client - */ - writeClient (client) { } - - /** - * @param {number} info An unsigned 8-bit integer - */ - writeInfo (info) { } - - /** - * @param {string} s - */ - writeString (s) { } - - /** - * @param {boolean} isYKey - */ - writeParentInfo (isYKey) { } - - /** - * @param {number} info An unsigned 8-bit integer - */ - writeTypeRef (info) { } - - /** - * Write len of a struct - well suited for Opt RLE encoder. - * - * @param {number} len - */ - writeLen (len) { } - - /** - * @param {any} any - */ - writeAny (any) { } - - /** - * @param {Uint8Array} buf - */ - writeBuf (buf) { } - - /** - * @param {any} embed - */ - writeJSON (embed) { } - - /** - * @param {string} key - */ - writeKey (key) { } -} - export class DSEncoderV1 { constructor () { - this.restEncoder = new encoding.Encoder() + this.restEncoder = encoding.createEncoder() } toUint8Array () { @@ -228,7 +127,7 @@ export class UpdateEncoderV1 extends DSEncoderV1 { export class DSEncoderV2 { constructor () { - this.restEncoder = new encoding.Encoder() // encodes all the rest / non-optimized + this.restEncoder = encoding.createEncoder() // encodes all the rest / non-optimized this.dsCurrVal = 0 } @@ -288,7 +187,7 @@ export class UpdateEncoderV2 extends DSEncoderV2 { toUint8Array () { const encoder = encoding.createEncoder() - encoding.writeUint8(encoder, 0) // this is a feature flag that we might use in the future + encoding.writeVarUint(encoder, 0) // this is a feature flag that we might use in the future encoding.writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array()) encoding.writeVarUint8Array(encoder, this.clientEncoder.toUint8Array()) encoding.writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array()) diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 9efd16ee..c81bdab8 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -29,39 +29,23 @@ import { UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, - DSDecoderV2, DSEncoderV2, DSDecoderV1, DSEncoderV1, - AbstractDSEncoder, AbstractDSDecoder, AbstractUpdateEncoder, AbstractUpdateDecoder, AbstractContent, Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line + mergeUpdatesV2, + Skip, + diffUpdateV2, + DSDecoderV2, Doc, Transaction, GC, Item, StructStore // eslint-disable-line } from '../internals.js' import * as encoding from 'lib0/encoding.js' import * as decoding from 'lib0/decoding.js' import * as binary from 'lib0/binary.js' import * as map from 'lib0/map.js' - -export let DefaultDSEncoder = DSEncoderV1 -export let DefaultDSDecoder = DSDecoderV1 -export let DefaultUpdateEncoder = UpdateEncoderV1 -export let DefaultUpdateDecoder = UpdateDecoderV1 - -export const useV1Encoding = () => { - DefaultDSEncoder = DSEncoderV1 - DefaultDSDecoder = DSDecoderV1 - DefaultUpdateEncoder = UpdateEncoderV1 - DefaultUpdateDecoder = UpdateDecoderV1 -} - -export const useV2Encoding = () => { - DefaultDSEncoder = DSEncoderV2 - DefaultDSDecoder = DSDecoderV2 - DefaultUpdateEncoder = UpdateEncoderV2 - DefaultUpdateDecoder = UpdateDecoderV2 -} +import * as math from 'lib0/math.js' /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Array} structs All structs by `client` * @param {number} client * @param {number} clock write structs starting with `ID(client,clock)` @@ -70,6 +54,7 @@ export const useV2Encoding = () => { */ const writeStructs = (encoder, structs, client, clock) => { // write first id + clock = math.max(clock, structs[0].id.clock) // make sure the first id exists const startNewStructs = findIndexSS(structs, clock) // write # encoded structs encoding.writeVarUint(encoder.restEncoder, structs.length - startNewStructs) @@ -84,7 +69,7 @@ const writeStructs = (encoder, structs, client, clock) => { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {StructStore} store * @param {Map} _sm * @@ -116,15 +101,18 @@ export const writeClientsStructs = (encoder, store, _sm) => { } /** - * @param {AbstractUpdateDecoder} decoder The decoder object to read data from. - * @param {Map>} clientRefs + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from. * @param {Doc} doc - * @return {Map>} + * @return {Map }>} * * @private * @function */ -export const readClientsStructRefs = (decoder, clientRefs, doc) => { +export const readClientsStructRefs = (decoder, doc) => { + /** + * @type {Map }>} + */ + const clientRefs = map.create() const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) for (let i = 0; i < numOfStateUpdates; i++) { const numberOfStructs = decoding.readVarUint(decoder.restDecoder) @@ -135,61 +123,72 @@ export const readClientsStructRefs = (decoder, clientRefs, doc) => { const client = decoder.readClient() let clock = decoding.readVarUint(decoder.restDecoder) // const start = performance.now() - clientRefs.set(client, refs) + clientRefs.set(client, { i: 0, refs }) for (let i = 0; i < numberOfStructs; i++) { const info = decoder.readInfo() - if ((binary.BITS5 & info) !== 0) { - /** - * The optimized implementation doesn't use any variables because inlining variables is faster. - * Below a non-optimized version is shown that implements the basic algorithm with - * a few comments - */ - const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 - // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` - // and we read the next string as parentYKey. - // It indicates how we store/retrieve parent from `y.share` - // @type {string|null} - const struct = new Item( - createID(client, clock), - null, // leftd - (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin - null, // right - (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin - cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent - cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub - readItemContent(decoder, info) // item content - ) - /* A non-optimized implementation of the above algorithm: + switch (binary.BITS5 & info) { + case 0: { // GC + const len = decoder.readLen() + refs[i] = new GC(createID(client, clock), len) + clock += len + break + } + case 10: { // Skip Struct (nothing to apply) + // @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing. + const len = decoding.readVarUint(decoder.restDecoder) + refs[i] = new Skip(createID(client, clock), len) + clock += len + break + } + default: { // Item with content + /** + * The optimized implementation doesn't use any variables because inlining variables is faster. + * Below a non-optimized version is shown that implements the basic algorithm with + * a few comments + */ + const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 + // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` + // and we read the next string as parentYKey. + // It indicates how we store/retrieve parent from `y.share` + // @type {string|null} + const struct = new Item( + createID(client, clock), + null, // leftd + (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin + null, // right + (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin + cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent + cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub + readItemContent(decoder, info) // item content + ) + /* A non-optimized implementation of the above algorithm: - // The item that was originally to the left of this item. - const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null - // The item that was originally to the right of this item. - const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null - const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 - const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false - // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` - // and we read the next string as parentYKey. - // It indicates how we store/retrieve parent from `y.share` - // @type {string|null} - const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null + // The item that was originally to the left of this item. + const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null + // The item that was originally to the right of this item. + const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null + const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 + const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false + // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` + // and we read the next string as parentYKey. + // It indicates how we store/retrieve parent from `y.share` + // @type {string|null} + const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null - const struct = new Item( - createID(client, clock), - null, // leftd - origin, // origin - null, // right - rightOrigin, // right origin - cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent - cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub - readItemContent(decoder, info) // item content - ) - */ - refs[i] = struct - clock += struct.length - } else { - const len = decoder.readLen() - refs[i] = new GC(createID(client, clock), len) - clock += len + const struct = new Item( + createID(client, clock), + null, // leftd + origin, // origin + null, // right + rightOrigin, // right origin + cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent + cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub + readItemContent(decoder, info) // item content + ) + */ + refs[i] = struct + clock += struct.length + } } } // console.log('time to read: ', performance.now() - start) // @todo remove @@ -218,26 +217,32 @@ export const readClientsStructRefs = (decoder, clientRefs, doc) => { * * @param {Transaction} transaction * @param {StructStore} store + * @param {Map} clientsStructRefs + * @return { null | { update: Uint8Array, missing: Map } } * * @private * @function */ -const resumeStructIntegration = (transaction, store) => { - const stack = store.pendingStack // @todo don't forget to append stackhead at the end - const clientsStructRefs = store.pendingClientsStructRefs +const integrateStructs = (transaction, store, clientsStructRefs) => { + /** + * @type {Array} + */ + const stack = [] // sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user. - const clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b) + let clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b) if (clientsStructRefsIds.length === 0) { - return + return null } const getNextStructTarget = () => { + if (clientsStructRefsIds.length === 0) { + return null + } let nextStructsTarget = /** @type {{i:number,refs:Array}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1])) while (nextStructsTarget.refs.length === nextStructsTarget.i) { clientsStructRefsIds.pop() if (clientsStructRefsIds.length > 0) { nextStructsTarget = /** @type {{i:number,refs:Array}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1])) } else { - store.pendingClientsStructRefs.clear() return null } } @@ -245,98 +250,115 @@ const resumeStructIntegration = (transaction, store) => { } let curStructsTarget = getNextStructTarget() if (curStructsTarget === null && stack.length === 0) { - return + return null + } + + /** + * @type {StructStore} + */ + const restStructs = new StructStore() + const missingSV = new Map() + /** + * @param {number} client + * @param {number} clock + */ + const updateMissingSv = (client, clock) => { + const mclock = missingSV.get(client) + if (mclock == null || mclock > clock) { + missingSV.set(client, clock) + } } /** * @type {GC|Item} */ - let stackHead = stack.length > 0 - ? /** @type {GC|Item} */ (stack.pop()) - : /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++] + let stackHead = /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++] // caching the state because it is used very often const state = new Map() + + const addStackToRestSS = () => { + for (const item of stack) { + const client = item.id.client + const unapplicableItems = clientsStructRefs.get(client) + if (unapplicableItems) { + // decrement because we weren't able to apply previous operation + unapplicableItems.i-- + restStructs.clients.set(client, unapplicableItems.refs.slice(unapplicableItems.i)) + clientsStructRefs.delete(client) + unapplicableItems.i = 0 + unapplicableItems.refs = [] + } else { + // item was the last item on clientsStructRefs and the field was already cleared. Add item to restStructs and continue + restStructs.clients.set(client, [item]) + } + // remove client from clientsStructRefsIds to prevent users from applying the same update again + clientsStructRefsIds = clientsStructRefsIds.filter(c => c !== client) + } + stack.length = 0 + } + // iterate over all struct readers until we are done while (true) { - const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client)) - const offset = stackHead.id.clock < localClock ? localClock - stackHead.id.clock : 0 - if (stackHead.id.clock + offset !== localClock) { - // A previous message from this client is missing - // check if there is a pending structRef with a smaller clock and switch them - /** - * @type {{ refs: Array, i: number }} - */ - const structRefs = clientsStructRefs.get(stackHead.id.client) || { refs: [], i: 0 } - if (structRefs.refs.length !== structRefs.i) { - const r = structRefs.refs[structRefs.i] - if (r.id.clock < stackHead.id.clock) { - // put ref with smaller clock on stack instead and continue - structRefs.refs[structRefs.i] = stackHead - stackHead = r - // sort the set because this approach might bring the list out of order - structRefs.refs = structRefs.refs.slice(structRefs.i).sort((r1, r2) => r1.id.clock - r2.id.clock) - structRefs.i = 0 - continue - } - } - // wait until missing struct is available - stack.push(stackHead) - return - } - const missing = stackHead.getMissing(transaction, store) - if (missing === null) { - if (offset === 0 || offset < stackHead.length) { - stackHead.integrate(transaction, offset) - state.set(stackHead.id.client, stackHead.id.clock + stackHead.length) - } - // iterate to next stackHead - if (stack.length > 0) { - stackHead = /** @type {GC|Item} */ (stack.pop()) - } else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) { - stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) - } else { - curStructsTarget = getNextStructTarget() - if (curStructsTarget === null) { - // we are done! - break - } else { - stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) - } - } - } else { - // get the struct reader that has the missing struct - /** - * @type {{ refs: Array, i: number }} - */ - const structRefs = clientsStructRefs.get(missing) || { refs: [], i: 0 } - if (structRefs.refs.length === structRefs.i) { - // This update message causally depends on another update message. + if (stackHead.constructor !== Skip) { + const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client)) + const offset = localClock - stackHead.id.clock + if (offset < 0) { + // update from the same client is missing stack.push(stackHead) - return + updateMissingSv(stackHead.id.client, stackHead.id.clock - 1) + // hid a dead wall, add all items from stack to restSS + addStackToRestSS() + } else { + const missing = stackHead.getMissing(transaction, store) + if (missing !== null) { + stack.push(stackHead) + // get the struct reader that has the missing struct + /** + * @type {{ refs: Array, i: number }} + */ + const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 } + if (structRefs.refs.length === structRefs.i) { + // This update message causally depends on another update message that doesn't exist yet + updateMissingSv(/** @type {number} */ (missing), getState(store, missing)) + addStackToRestSS() + } else { + stackHead = structRefs.refs[structRefs.i++] + continue + } + } else if (offset === 0 || offset < stackHead.length) { + // all fine, apply the stackhead + stackHead.integrate(transaction, offset) + state.set(stackHead.id.client, stackHead.id.clock + stackHead.length) + } + } + } + // iterate to next stackHead + if (stack.length > 0) { + stackHead = /** @type {GC|Item} */ (stack.pop()) + } else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) { + stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) + } else { + curStructsTarget = getNextStructTarget() + if (curStructsTarget === null) { + // we are done! + break + } else { + stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) } - stack.push(stackHead) - stackHead = structRefs.refs[structRefs.i++] } } - store.pendingClientsStructRefs.clear() -} - -/** - * @param {Transaction} transaction - * @param {StructStore} store - * - * @private - * @function - */ -export const tryResumePendingDeleteReaders = (transaction, store) => { - const pendingReaders = store.pendingDeleteReaders - store.pendingDeleteReaders = [] - for (let i = 0; i < pendingReaders.length; i++) { - readAndApplyDeleteSet(pendingReaders[i], transaction, store) + if (restStructs.clients.size > 0) { + const encoder = new UpdateEncoderV2() + writeClientsStructs(encoder, restStructs, new Map()) + // write empty deleteset + // writeDeleteSet(encoder, new DeleteSet()) + encoding.writeVarUint(encoder.restEncoder, 0) // => no need for an extra function call, just write 0 deletes + return { missing: missingSV, update: encoder.toUint8Array() } } + return null } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Transaction} transaction * * @private @@ -344,78 +366,6 @@ export const tryResumePendingDeleteReaders = (transaction, store) => { */ export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState) -/** - * @param {StructStore} store - * @param {Map>} clientsStructsRefs - * - * @private - * @function - */ -const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => { - const pendingClientsStructRefs = store.pendingClientsStructRefs - clientsStructsRefs.forEach((structRefs, client) => { - const pendingStructRefs = pendingClientsStructRefs.get(client) - if (pendingStructRefs === undefined) { - pendingClientsStructRefs.set(client, { refs: structRefs, i: 0 }) - } else { - // merge into existing structRefs - const merged = pendingStructRefs.i > 0 ? pendingStructRefs.refs.slice(pendingStructRefs.i) : pendingStructRefs.refs - for (let i = 0; i < structRefs.length; i++) { - merged.push(structRefs[i]) - } - pendingStructRefs.i = 0 - pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock) - } - }) -} - -/** - * @param {Map,i:number}>} pendingClientsStructRefs - */ -const cleanupPendingStructs = pendingClientsStructRefs => { - // cleanup pendingClientsStructs if not fully finished - pendingClientsStructRefs.forEach((refs, client) => { - if (refs.i === refs.refs.length) { - pendingClientsStructRefs.delete(client) - } else { - refs.refs.splice(0, refs.i) - refs.i = 0 - } - }) -} - -/** - * Read the next Item in a Decoder and fill this Item with the read data. - * - * This is called when data is received from a remote peer. - * - * @param {AbstractUpdateDecoder} decoder The decoder object to read data from. - * @param {Transaction} transaction - * @param {StructStore} store - * - * @private - * @function - */ -export const readStructs = (decoder, transaction, store) => { - const clientsStructRefs = new Map() - // let start = performance.now() - readClientsStructRefs(decoder, clientsStructRefs, transaction.doc) - // console.log('time to read structs: ', performance.now() - start) // @todo remove - // start = performance.now() - mergeReadStructsIntoPendingReads(store, clientsStructRefs) - // console.log('time to merge: ', performance.now() - start) // @todo remove - // start = performance.now() - resumeStructIntegration(transaction, store) - // console.log('time to integrate: ', performance.now() - start) // @todo remove - // start = performance.now() - cleanupPendingStructs(store.pendingClientsStructRefs) - // console.log('time to cleanup: ', performance.now() - start) // @todo remove - // start = performance.now() - tryResumePendingDeleteReaders(transaction, store) - // console.log('time to resume delete readers: ', performance.now() - start) // @todo remove - // start = performance.now() -} - /** * Read and apply a document update. * @@ -424,14 +374,75 @@ export const readStructs = (decoder, transaction, store) => { * @param {decoding.Decoder} decoder * @param {Doc} ydoc * @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))` - * @param {AbstractUpdateDecoder} [structDecoder] + * @param {UpdateDecoderV1 | UpdateDecoderV2} [structDecoder] * * @function */ export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) => transact(ydoc, transaction => { - readStructs(structDecoder, transaction, ydoc.store) - readAndApplyDeleteSet(structDecoder, transaction, ydoc.store) + let retry = false + const doc = transaction.doc + const store = doc.store + // let start = performance.now() + const ss = readClientsStructRefs(structDecoder, doc) + // console.log('time to read structs: ', performance.now() - start) // @todo remove + // start = performance.now() + // console.log('time to merge: ', performance.now() - start) // @todo remove + // start = performance.now() + const restStructs = integrateStructs(transaction, store, ss) + const pending = store.pendingStructs + if (pending) { + // check if we can apply something + for (const [client, clock] of pending.missing) { + if (clock < getState(store, client)) { + retry = true + break + } + } + if (restStructs) { + // merge restStructs into store.pending + for (const [client, clock] of restStructs.missing) { + const mclock = pending.missing.get(client) + if (mclock == null || mclock > clock) { + pending.missing.set(client, clock) + } + } + pending.update = mergeUpdatesV2([pending.update, restStructs.update]) + } + } else { + store.pendingStructs = restStructs + } + // console.log('time to integrate: ', performance.now() - start) // @todo remove + // start = performance.now() + const dsRest = readAndApplyDeleteSet(structDecoder, transaction, store) + if (store.pendingDs) { + // @todo we could make a lower-bound state-vector check as we do above + const pendingDSUpdate = new UpdateDecoderV2(decoding.createDecoder(store.pendingDs)) + decoding.readVarUint(pendingDSUpdate.restDecoder) // read 0 structs, because we only encode deletes in pendingdsupdate + const dsRest2 = readAndApplyDeleteSet(pendingDSUpdate, transaction, store) + if (dsRest && dsRest2) { + // case 1: ds1 != null && ds2 != null + store.pendingDs = mergeUpdatesV2([dsRest, dsRest2]) + } else { + // case 2: ds1 != null + // case 3: ds2 != null + // case 4: ds1 == null && ds2 == null + store.pendingDs = dsRest || dsRest2 + } + } else { + // Either dsRest == null && pendingDs == null OR dsRest != null + store.pendingDs = dsRest + } + // console.log('time to cleanup: ', performance.now() - start) // @todo remove + // start = performance.now() + + // console.log('time to resume delete readers: ', performance.now() - start) // @todo remove + // start = performance.now() + if (retry) { + const update = /** @type {{update: Uint8Array}} */ (store.pendingStructs).update + store.pendingStructs = null + applyUpdateV2(transaction.doc, update) + } }, transactionOrigin, false) /** @@ -445,7 +456,7 @@ export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = n * * @function */ -export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new DefaultUpdateDecoder(decoder)) +export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new UpdateDecoderV1(decoder)) /** * Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`. @@ -475,13 +486,13 @@ export const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = Update * * @function */ -export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, DefaultUpdateDecoder) +export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, UpdateDecoderV1) /** * Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will * only write the operations that are missing. * - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Doc} doc * @param {Map} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs * @@ -500,15 +511,29 @@ export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) * * @param {Doc} doc * @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs - * @param {AbstractUpdateEncoder} [encoder] + * @param {UpdateEncoderV1 | UpdateEncoderV2} [encoder] * @return {Uint8Array} * * @function */ -export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = new UpdateEncoderV2()) => { - const targetStateVector = encodedTargetStateVector == null ? new Map() : decodeStateVector(encodedTargetStateVector) +export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector = new Uint8Array([0]), encoder = new UpdateEncoderV2()) => { + const targetStateVector = decodeStateVector(encodedTargetStateVector) writeStateAsUpdate(encoder, doc, targetStateVector) - return encoder.toUint8Array() + const updates = [encoder.toUint8Array()] + // also add the pending updates (if there are any) + // @todo support diffirent encoders + if (encoder.constructor === UpdateEncoderV2) { + if (doc.store.pendingDs) { + updates.push(doc.store.pendingDs) + } + if (doc.store.pendingStructs) { + updates.push(diffUpdateV2(doc.store.pendingStructs.update, encodedTargetStateVector)) + } + if (updates.length > 1) { + return mergeUpdatesV2(updates) + } + } + return updates[0] } /** @@ -523,12 +548,12 @@ export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = n * * @function */ -export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new DefaultUpdateEncoder()) +export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new UpdateEncoderV1()) /** * Read state vector from Decoder and return as Map * - * @param {AbstractDSDecoder} decoder + * @param {DSDecoderV1 | DSDecoderV2} decoder * @return {Map} Maps `client` to the number next expected `clock` from that client. * * @function @@ -552,7 +577,7 @@ export const readStateVector = decoder => { * * @function */ -export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState))) +// export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState))) /** * Read decodedState and return State as Map. @@ -562,10 +587,10 @@ export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoder * * @function */ -export const decodeStateVector = decodedState => readStateVector(new DefaultDSDecoder(decoding.createDecoder(decodedState))) +export const decodeStateVector = decodedState => readStateVector(new DSDecoderV1(decoding.createDecoder(decodedState))) /** - * @param {AbstractDSEncoder} encoder + * @param {DSEncoderV1 | DSEncoderV2} encoder * @param {Map} sv * @function */ @@ -579,7 +604,7 @@ export const writeStateVector = (encoder, sv) => { } /** - * @param {AbstractDSEncoder} encoder + * @param {DSEncoderV1 | DSEncoderV2} encoder * @param {Doc} doc * * @function @@ -589,23 +614,27 @@ export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encod /** * Encode State as Uint8Array. * - * @param {Doc} doc - * @param {AbstractDSEncoder} [encoder] + * @param {Doc|Map} doc + * @param {DSEncoderV1 | DSEncoderV2} [encoder] * @return {Uint8Array} * * @function */ export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => { - writeDocumentStateVector(encoder, doc) + if (doc instanceof Map) { + writeStateVector(encoder, doc) + } else { + writeDocumentStateVector(encoder, doc) + } return encoder.toUint8Array() } /** * Encode State as Uint8Array. * - * @param {Doc} doc + * @param {Doc|Map} doc * @return {Uint8Array} * * @function */ -export const encodeStateVector = doc => encodeStateVectorV2(doc, new DefaultDSEncoder()) +export const encodeStateVector = doc => encodeStateVectorV2(doc, new DSEncoderV1()) diff --git a/src/utils/updates.js b/src/utils/updates.js new file mode 100644 index 00000000..4bac25bc --- /dev/null +++ b/src/utils/updates.js @@ -0,0 +1,510 @@ + +import * as binary from 'lib0/binary.js' +import * as decoding from 'lib0/decoding.js' +import * as encoding from 'lib0/encoding.js' +import * as logging from 'lib0/logging.js' +import * as math from 'lib0/math.js' +import { + createID, + readItemContent, + readDeleteSet, + writeDeleteSet, + Skip, + mergeDeleteSets, + DSEncoderV1, + DSEncoderV2, + decodeStateVector, + Item, GC, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line +} from '../internals.js' + +/** + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder + */ +function * lazyStructReaderGenerator (decoder) { + const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) + for (let i = 0; i < numOfStateUpdates; i++) { + const numberOfStructs = decoding.readVarUint(decoder.restDecoder) + const client = decoder.readClient() + let clock = decoding.readVarUint(decoder.restDecoder) + for (let i = 0; i < numberOfStructs; i++) { + const info = decoder.readInfo() + // @todo use switch instead of ifs + if (info === 10) { + const len = decoding.readVarUint(decoder.restDecoder) + yield new Skip(createID(client, clock), len) + clock += len + } else if ((binary.BITS5 & info) !== 0) { + const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 + // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` + // and we read the next string as parentYKey. + // It indicates how we store/retrieve parent from `y.share` + // @type {string|null} + const struct = new Item( + createID(client, clock), + null, // left + (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin + null, // right + (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin + // @ts-ignore Force writing a string here. + cantCopyParentInfo ? (decoder.readParentInfo() ? decoder.readString() : decoder.readLeftID()) : null, // parent + cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub + readItemContent(decoder, info) // item content + ) + yield struct + clock += struct.length + } else { + const len = decoder.readLen() + yield new GC(createID(client, clock), len) + clock += len + } + } + } +} + +export class LazyStructReader { + /** + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder + * @param {boolean} filterSkips + */ + constructor (decoder, filterSkips) { + this.gen = lazyStructReaderGenerator(decoder) + /** + * @type {null | Item | Skip | GC} + */ + this.curr = null + this.done = false + this.filterSkips = filterSkips + this.next() + } + + /** + * @return {Item | GC | Skip |null} + */ + next () { + // ignore "Skip" structs + do { + this.curr = this.gen.next().value || null + } while (this.filterSkips && this.curr !== null && this.curr.constructor === Skip) + return this.curr + } +} + +/** + * @param {Uint8Array} update + * + */ +export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1) + +/** + * @param {Uint8Array} update + * @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder] + * + */ +export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => { + const structs = [] + const updateDecoder = new YDecoder(decoding.createDecoder(update)) + const lazyDecoder = new LazyStructReader(updateDecoder, false) + for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) { + structs.push(curr) + } + logging.print('Structs: ', structs) + const ds = readDeleteSet(updateDecoder) + logging.print('DeleteSet: ', ds) +} + +export class LazyStructWriter { + /** + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder + */ + constructor (encoder) { + this.currClient = 0 + this.startClock = 0 + this.written = 0 + this.encoder = encoder + /** + * We want to write operations lazily, but also we need to know beforehand how many operations we want to write for each client. + * + * This kind of meta-information (#clients, #structs-per-client-written) is written to the restEncoder. + * + * We fragment the restEncoder and store a slice of it per-client until we know how many clients there are. + * When we flush (toUint8Array) we write the restEncoder using the fragments and the meta-information. + * + * @type {Array<{ written: number, restEncoder: Uint8Array }>} + */ + this.clientStructs = [] + } +} + +/** + * @param {Array} updates + * @return {Uint8Array} + */ +export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1) + +/** + * @param {Uint8Array} update + * @param {typeof DSEncoderV1 | typeof DSEncoderV2} YEncoder + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder + * @return {Uint8Array} + */ +export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YDecoder = UpdateDecoderV2) => { + const encoder = new YEncoder() + const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), true) + let curr = updateDecoder.curr + if (curr !== null) { + let size = 1 + let currClient = curr.id.client + let currClock = curr.id.clock + let stopCounting = false + for (; curr !== null; curr = updateDecoder.next()) { + if (currClient !== curr.id.client) { + size++ + // We found a new client + // write what we have to the encoder + encoding.writeVarUint(encoder.restEncoder, currClient) + encoding.writeVarUint(encoder.restEncoder, currClock) + currClient = curr.id.client + stopCounting = false + } + if (curr.constructor === Skip) { + stopCounting = true + } + if (!stopCounting) { + currClock = curr.id.clock + curr.length + } + } + // write what we have + encoding.writeVarUint(encoder.restEncoder, currClient) + encoding.writeVarUint(encoder.restEncoder, currClock) + // prepend the size of the state vector + const enc = encoding.createEncoder() + encoding.writeVarUint(enc, size) + encoding.writeBinaryEncoder(enc, encoder.restEncoder) + encoder.restEncoder = enc + return encoder.toUint8Array() + } else { + encoding.writeVarUint(encoder.restEncoder, 0) + return encoder.toUint8Array() + } +} + +/** + * @param {Uint8Array} update + * @return {Uint8Array} + */ +export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV1) + +/** + * @param {Uint8Array} update + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder + * @return {{ from: Map, to: Map }} + */ +export const parseUpdateMetaV2 = (update, YDecoder = UpdateDecoderV2) => { + /** + * @type {Map} + */ + const from = new Map() + /** + * @type {Map} + */ + const to = new Map() + const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false) + let curr = updateDecoder.curr + if (curr !== null) { + let currClient = curr.id.client + let currClock = curr.id.clock + // write the beginning to `from` + from.set(currClient, currClock) + for (; curr !== null; curr = updateDecoder.next()) { + if (currClient !== curr.id.client) { + // We found a new client + // write the end to `to` + to.set(currClient, currClock) + // write the beginning to `from` + from.set(curr.id.client, curr.id.clock) + // update currClient + currClient = curr.id.client + } + currClock = curr.id.clock + curr.length + } + // write the end to `to` + to.set(currClient, currClock) + } + return { from, to } +} + +/** + * @param {Uint8Array} update + * @return {{ from: Map, to: Map }} + */ +export const parseUpdateMeta = update => parseUpdateMetaV2(update, UpdateDecoderV1) + +/** + * This method is intended to slice any kind of struct and retrieve the right part. + * It does not handle side-effects, so it should only be used by the lazy-encoder. + * + * @param {Item | GC | Skip} left + * @param {number} diff + * @return {Item | GC} + */ +const sliceStruct = (left, diff) => { + if (left.constructor === GC) { + const { client, clock } = left.id + return new GC(createID(client, clock + diff), left.length - diff) + } else if (left.constructor === Skip) { + const { client, clock } = left.id + return new Skip(createID(client, clock + diff), left.length - diff) + } else { + const leftItem = /** @type {Item} */ (left) + const { client, clock } = leftItem.id + return new Item( + createID(client, clock + diff), + null, + createID(client, clock + diff - 1), + null, + leftItem.rightOrigin, + leftItem.parent, + leftItem.parentSub, + leftItem.content.splice(diff) + ) + } +} + +/** + * + * This function works similarly to `readUpdateV2`. + * + * @param {Array} updates + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder] + * @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder] + * @return {Uint8Array} + */ +export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => { + const updateDecoders = updates.map(update => new YDecoder(decoding.createDecoder(update))) + let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder, true)) + + /** + * @todo we don't need offset because we always slice before + * @type {null | { struct: Item | GC | Skip, offset: number }} + */ + let currWrite = null + + const updateEncoder = new YEncoder() + // write structs lazily + const lazyStructEncoder = new LazyStructWriter(updateEncoder) + + // Note: We need to ensure that all lazyStructDecoders are fully consumed + // Note: Should merge document updates whenever possible - even from different updates + // Note: Should handle that some operations cannot be applied yet () + + while (true) { + // Write higher clients first ⇒ sort by clientID & clock and remove decoders without content + lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null) + lazyStructDecoders.sort( + /** @type {function(any,any):number} */ (dec1, dec2) => { + if (dec1.curr.id.client === dec2.curr.id.client) { + const clockDiff = dec1.curr.id.clock - dec2.curr.id.clock + if (clockDiff === 0) { + return dec1.curr.constructor === dec2.curr.constructor ? 0 : ( + dec1.curr.constructor === Skip ? 1 : -1 + ) + } else { + return clockDiff + } + } else { + return dec2.curr.id.client - dec1.curr.id.client + } + } + ) + if (lazyStructDecoders.length === 0) { + break + } + const currDecoder = lazyStructDecoders[0] + // write from currDecoder until the next operation is from another client or if filler-struct + // then we need to reorder the decoders and find the next operation to write + const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client + + if (currWrite !== null) { + let curr = /** @type {Item | GC | null} */ (currDecoder.curr) + + // iterate until we find something that we haven't written already + // remember: first the high client-ids are written + while (curr !== null && curr.id.clock + curr.length <= currWrite.struct.id.clock + currWrite.struct.length && curr.id.client >= currWrite.struct.id.client) { + curr = currDecoder.next() + } + if (curr === null || curr.id.client !== firstClient) { + continue + } + + if (firstClient !== currWrite.struct.id.client) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: curr, offset: 0 } + currDecoder.next() + } else { + if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) { + // @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock) + if (currWrite.struct.constructor === Skip) { + // extend existing skip + currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock + } else { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length + /** + * @type {Skip} + */ + const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff) + currWrite = { struct, offset: 0 } + } + } else { // if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) { + const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock + if (diff > 0) { + if (currWrite.struct.constructor === Skip) { + // prefer to slice Skip because the other struct might contain more information + currWrite.struct.length -= diff + } else { + curr = sliceStruct(curr, diff) + } + } + if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: curr, offset: 0 } + currDecoder.next() + } + } + } + } else { + currWrite = { struct: /** @type {Item | GC} */ (currDecoder.curr), offset: 0 } + currDecoder.next() + } + for ( + let next = currDecoder.curr; + next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length && next.constructor !== Skip; + next = currDecoder.next() + ) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: next, offset: 0 } + } + } + if (currWrite !== null) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = null + } + finishLazyStructWriting(lazyStructEncoder) + + const dss = updateDecoders.map(decoder => readDeleteSet(decoder)) + const ds = mergeDeleteSets(dss) + writeDeleteSet(updateEncoder, ds) + return updateEncoder.toUint8Array() +} + +/** + * @param {Uint8Array} update + * @param {Uint8Array} sv + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder] + * @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder] + */ +export const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => { + const state = decodeStateVector(sv) + const encoder = new YEncoder() + const lazyStructWriter = new LazyStructWriter(encoder) + const decoder = new YDecoder(decoding.createDecoder(update)) + const reader = new LazyStructReader(decoder, false) + while (reader.curr) { + const curr = reader.curr + const currClient = curr.id.client + const svClock = state.get(currClient) || 0 + if (reader.curr.constructor === Skip) { + // the first written struct shouldn't be a skip + reader.next() + continue + } + if (curr.id.clock + curr.length > svClock) { + writeStructToLazyStructWriter(lazyStructWriter, curr, math.max(svClock - curr.id.clock, 0)) + reader.next() + while (reader.curr && reader.curr.id.client === currClient) { + writeStructToLazyStructWriter(lazyStructWriter, reader.curr, 0) + reader.next() + } + } else { + // read until something new comes up + while (reader.curr && reader.curr.id.client === currClient && reader.curr.id.clock + reader.curr.length <= svClock) { + reader.next() + } + } + } + finishLazyStructWriting(lazyStructWriter) + // write ds + const ds = readDeleteSet(decoder) + writeDeleteSet(encoder, ds) + return encoder.toUint8Array() +} + +/** + * @param {Uint8Array} update + * @param {Uint8Array} sv + */ +export const diffUpdate = (update, sv) => diffUpdateV2(update, sv, UpdateDecoderV1, UpdateEncoderV1) + +/** + * @param {LazyStructWriter} lazyWriter + */ +const flushLazyStructWriter = lazyWriter => { + if (lazyWriter.written > 0) { + lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: encoding.toUint8Array(lazyWriter.encoder.restEncoder) }) + lazyWriter.encoder.restEncoder = encoding.createEncoder() + lazyWriter.written = 0 + } +} + +/** + * @param {LazyStructWriter} lazyWriter + * @param {Item | GC} struct + * @param {number} offset + */ +const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => { + // flush curr if we start another client + if (lazyWriter.written > 0 && lazyWriter.currClient !== struct.id.client) { + flushLazyStructWriter(lazyWriter) + } + if (lazyWriter.written === 0) { + lazyWriter.currClient = struct.id.client + // write next client + lazyWriter.encoder.writeClient(struct.id.client) + // write startClock + encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock + offset) + } + struct.write(lazyWriter.encoder, offset) + lazyWriter.written++ +} +/** + * Call this function when we collected all parts and want to + * put all the parts together. After calling this method, + * you can continue using the UpdateEncoder. + * + * @param {LazyStructWriter} lazyWriter + */ +const finishLazyStructWriting = (lazyWriter) => { + flushLazyStructWriter(lazyWriter) + + // this is a fresh encoder because we called flushCurr + const restEncoder = lazyWriter.encoder.restEncoder + + /** + * Now we put all the fragments together. + * This works similarly to `writeClientsStructs` + */ + + // write # states that were updated - i.e. the clients + encoding.writeVarUint(restEncoder, lazyWriter.clientStructs.length) + + for (let i = 0; i < lazyWriter.clientStructs.length; i++) { + const partStructs = lazyWriter.clientStructs[i] + /** + * Works similarly to `writeStructs` + */ + // write # encoded structs + encoding.writeVarUint(restEncoder, partStructs.written) + // write the rest of the fragment + encoding.writeUint8Array(restEncoder, partStructs.restEncoder) + } +} diff --git a/tests/encoding.tests.js b/tests/encoding.tests.js index 65563e9d..c5afd833 100644 --- a/tests/encoding.tests.js +++ b/tests/encoding.tests.js @@ -22,7 +22,7 @@ import { * @param {t.TestCase} tc */ export const testStructReferences = tc => { - t.assert(contentRefs.length === 10) + t.assert(contentRefs.length === 11) t.assert(contentRefs[1] === readContentDeleted) t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json? t.assert(contentRefs[3] === readContentBinary) @@ -32,6 +32,7 @@ export const testStructReferences = tc => { t.assert(contentRefs[7] === readContentType) t.assert(contentRefs[8] === readContentAny) t.assert(contentRefs[9] === readContentDoc) + // contentRefs[10] is reserved for Skip structs } /** diff --git a/tests/index.js b/tests/index.js index df701f20..53a85b90 100644 --- a/tests/index.js +++ b/tests/index.js @@ -8,6 +8,7 @@ import * as undoredo from './undo-redo.tests.js' import * as compatibility from './compatibility.tests.js' import * as doc from './doc.tests.js' import * as snapshot from './snapshot.tests.js' +import * as updates from './updates.tests.js' import * as relativePositions from './relativePositions.tests.js' import { runTests } from 'lib0/testing.js' @@ -18,7 +19,7 @@ if (isBrowser) { log.createVConsole(document.body) } runTests({ - doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, relativePositions + doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates, relativePositions }).then(success => { /* istanbul ignore next */ if (isNode) { diff --git a/tests/testHelper.js b/tests/testHelper.js index 2409af79..5ebb7836 100644 --- a/tests/testHelper.js +++ b/tests/testHelper.js @@ -27,6 +27,39 @@ const broadcastMessage = (y, m) => { } } +export let useV2 = false + +export const encV1 = { + encodeStateAsUpdate: Y.encodeStateAsUpdate, + mergeUpdates: Y.mergeUpdates, + applyUpdate: Y.applyUpdate, + logUpdate: Y.logUpdate, + updateEventName: 'update', + diffUpdate: Y.diffUpdate +} + +export const encV2 = { + encodeStateAsUpdate: Y.encodeStateAsUpdateV2, + mergeUpdates: Y.mergeUpdatesV2, + applyUpdate: Y.applyUpdateV2, + logUpdate: Y.logUpdateV2, + updateEventName: 'updateV2', + diffUpdate: Y.diffUpdateV2 +} + +export let enc = encV1 + +const useV1Encoding = () => { + useV2 = false + enc = encV1 +} + +const useV2Encoding = () => { + console.error('sync protocol doesnt support v2 protocol yet, fallback to v1 encoding') // @Todo + useV2 = false + enc = encV1 +} + export class TestYInstance extends Y.Doc { /** * @param {TestConnector} testConnector @@ -44,12 +77,19 @@ export class TestYInstance extends Y.Doc { */ this.receiving = new Map() testConnector.allConns.add(this) + /** + * The list of received updates. + * We are going to merge them later using Y.mergeUpdates and check if the resulting document is correct. + * @type {Array} + */ + this.updates = [] // set up observe on local model - this.on('update', /** @param {Uint8Array} update @param {any} origin */ (update, origin) => { + this.on(enc.updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => { if (origin !== testConnector) { const encoder = encoding.createEncoder() syncProtocol.writeUpdate(encoder, update) broadcastMessage(this, encoding.toUint8Array(encoder)) + this.updates.push(update) } }) this.connect() @@ -162,6 +202,17 @@ export class TestConnector { // send reply message sender._receive(encoding.toUint8Array(encoder), receiver) } + { + // If update message, add the received message to the list of received messages + const decoder = decoding.createDecoder(m) + const messageType = decoding.readVarUint(decoder) + switch (messageType) { + case syncProtocol.messageYjsUpdate: + case syncProtocol.messageYjsSyncStep2: + receiver.updates.push(decoding.readVarUint8Array(decoder)) + break + } + } return true } return false @@ -240,9 +291,9 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => { const gen = tc.prng // choose an encoding approach at random if (prng.bool(gen)) { - Y.useV2Encoding() + useV2Encoding() } else { - Y.useV1Encoding() + useV1Encoding() } const testConnector = new TestConnector(gen) @@ -258,7 +309,7 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => { } testConnector.syncAll() result.testObjects = result.users.map(initTestObject || (() => null)) - Y.useV1Encoding() + useV1Encoding() return /** @type {any} */ (result) } @@ -274,14 +325,21 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => { export const compare = users => { users.forEach(u => u.connect()) while (users[0].tc.flushAllMessages()) {} + // For each document, merge all received document updates with Y.mergeUpdates and create a new document which will be added to the list of "users" + // This ensures that mergeUpdates works correctly + const mergedDocs = users.map(user => { + const ydoc = new Y.Doc() + enc.applyUpdate(ydoc, enc.mergeUpdates(user.updates)) + return ydoc + }) + users.push(.../** @type {any} */(mergedDocs)) const userArrayValues = users.map(u => u.getArray('array').toJSON()) const userMapValues = users.map(u => u.getMap('map').toJSON()) const userXmlValues = users.map(u => u.get('xml', Y.YXmlElement).toString()) const userTextValues = users.map(u => u.getText('text').toDelta()) for (const u of users) { - t.assert(u.store.pendingDeleteReaders.length === 0) - t.assert(u.store.pendingStack.length === 0) - t.assert(u.store.pendingClientsStructRefs.size === 0) + t.assert(u.store.pendingDs === null) + t.assert(u.store.pendingStructs === null) } // Test Array iterator t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array'))) diff --git a/tests/updates.tests.js b/tests/updates.tests.js new file mode 100644 index 00000000..9c006642 --- /dev/null +++ b/tests/updates.tests.js @@ -0,0 +1,246 @@ +import * as t from 'lib0/testing.js' +import { init, compare } from './testHelper.js' // eslint-disable-line +import * as Y from '../src/index.js' +import { readClientsStructRefs, readDeleteSet, UpdateDecoderV2, UpdateEncoderV2, writeDeleteSet } from '../src/internals.js' +import * as encoding from 'lib0/encoding.js' +import * as decoding from 'lib0/decoding.js' + +/** + * @typedef {Object} Enc + * @property {function(Array):Uint8Array} Enc.mergeUpdates + * @property {function(Y.Doc):Uint8Array} Enc.encodeStateAsUpdate + * @property {function(Y.Doc, Uint8Array):void} Enc.applyUpdate + * @property {function(Uint8Array):void} Enc.logUpdate + * @property {function(Uint8Array):{from:Map,to:Map}} Enc.parseUpdateMeta + * @property {function(Y.Doc):Uint8Array} Enc.encodeStateVector + * @property {function(Uint8Array):Uint8Array} Enc.encodeStateVectorFromUpdate + * @property {string} Enc.updateEventName + * @property {string} Enc.description + * @property {function(Uint8Array, Uint8Array):Uint8Array} Enc.diffUpdate + */ + +/** + * @type {Enc} + */ +const encV1 = { + mergeUpdates: Y.mergeUpdates, + encodeStateAsUpdate: Y.encodeStateAsUpdate, + applyUpdate: Y.applyUpdate, + logUpdate: Y.logUpdate, + parseUpdateMeta: Y.parseUpdateMeta, + encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdate, + encodeStateVector: Y.encodeStateVector, + updateEventName: 'update', + description: 'V1', + diffUpdate: Y.diffUpdate +} + +/** + * @type {Enc} + */ +const encV2 = { + mergeUpdates: Y.mergeUpdatesV2, + encodeStateAsUpdate: Y.encodeStateAsUpdateV2, + applyUpdate: Y.applyUpdateV2, + logUpdate: Y.logUpdateV2, + parseUpdateMeta: Y.parseUpdateMetaV2, + encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, + encodeStateVector: Y.encodeStateVector, + updateEventName: 'updateV2', + description: 'V2', + diffUpdate: Y.diffUpdateV2 +} + +/** + * @type {Enc} + */ +const encDoc = { + mergeUpdates: (updates) => { + const ydoc = new Y.Doc({ gc: false }) + updates.forEach(update => { + Y.applyUpdateV2(ydoc, update) + }) + return Y.encodeStateAsUpdateV2(ydoc) + }, + encodeStateAsUpdate: Y.encodeStateAsUpdateV2, + applyUpdate: Y.applyUpdateV2, + logUpdate: Y.logUpdateV2, + parseUpdateMeta: Y.parseUpdateMetaV2, + encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, + encodeStateVector: Y.encodeStateVector, + updateEventName: 'updateV2', + description: 'Merge via Y.Doc', + /** + * @param {Uint8Array} update + * @param {Uint8Array} sv + */ + diffUpdate: (update, sv) => { + const ydoc = new Y.Doc({ gc: false }) + Y.applyUpdateV2(ydoc, update) + return Y.encodeStateAsUpdateV2(ydoc, sv) + } +} + +const encoders = [encV1, encV2, encDoc] + +/** + * @param {Array} users + * @param {Enc} enc + */ +const fromUpdates = (users, enc) => { + const updates = users.map(user => + enc.encodeStateAsUpdate(user) + ) + const ydoc = new Y.Doc() + enc.applyUpdate(ydoc, enc.mergeUpdates(updates)) + return ydoc +} + +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdates = tc => { + const { users, array0, array1 } = init(tc, { users: 3 }) + + array0.insert(0, [1]) + array1.insert(0, [2]) + + compare(users) + encoders.forEach(enc => { + const merged = fromUpdates(users, enc) + t.compareArrays(array0.toArray(), merged.getArray('array').toArray()) + }) +} + +/** + * @param {Y.Doc} ydoc + * @param {Array} updates - expecting at least 4 updates + * @param {Enc} enc + * @param {boolean} hasDeletes + */ +const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => { + const cases = [] + + // Case 1: Simple case, simply merge everything + cases.push(enc.mergeUpdates(updates)) + + // Case 2: Overlapping updates + cases.push(enc.mergeUpdates([ + enc.mergeUpdates(updates.slice(2)), + enc.mergeUpdates(updates.slice(0, 2)) + ])) + + // Case 3: Overlapping updates + cases.push(enc.mergeUpdates([ + enc.mergeUpdates(updates.slice(2)), + enc.mergeUpdates(updates.slice(1, 3)), + updates[0] + ])) + + // Case 4: Separated updates (containing skips) + cases.push(enc.mergeUpdates([ + enc.mergeUpdates([updates[0], updates[2]]), + enc.mergeUpdates([updates[1], updates[3]]), + enc.mergeUpdates(updates.slice(4)) + ])) + + // Case 5: overlapping with many duplicates + cases.push(enc.mergeUpdates(cases)) + + // const targetState = enc.encodeStateAsUpdate(ydoc) + // t.info('Target State: ') + // enc.logUpdate(targetState) + + cases.forEach((mergedUpdates, i) => { + // t.info('State Case $' + i + ':') + // enc.logUpdate(updates) + const merged = new Y.Doc({ gc: false }) + enc.applyUpdate(merged, mergedUpdates) + t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray()) + t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(mergedUpdates)) + + if (enc.updateEventName !== 'update') { // @todo should this also work on legacy updates? + for (let j = 1; j < updates.length; j++) { + const partMerged = enc.mergeUpdates(updates.slice(j)) + const partMeta = enc.parseUpdateMeta(partMerged) + const targetSV = Y.encodeStateVectorFromUpdateV2(Y.mergeUpdatesV2(updates.slice(0, j))) + const diffed = enc.diffUpdate(mergedUpdates, targetSV) + const diffedMeta = enc.parseUpdateMeta(diffed) + const decDiffedSV = Y.decodeStateVector(enc.encodeStateVectorFromUpdate(diffed)) + t.compare(partMeta, diffedMeta) + t.compare(decDiffedSV, partMeta.to) + { + // We can'd do the following + // - t.compare(diffed, mergedDeletes) + // because diffed contains the set of all deletes. + // So we add all deletes from `diffed` to `partDeletes` and compare then + const decoder = decoding.createDecoder(diffed) + const updateDecoder = new UpdateDecoderV2(decoder) + readClientsStructRefs(updateDecoder, new Y.Doc()) + const ds = readDeleteSet(updateDecoder) + const updateEncoder = new UpdateEncoderV2() + encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs + writeDeleteSet(updateEncoder, ds) + const deletesUpdate = updateEncoder.toUint8Array() + const mergedDeletes = Y.mergeUpdatesV2([deletesUpdate, partMerged]) + if (!hasDeletes || enc !== encDoc) { + // deletes will almost definitely lead to different encoders because of the mergeStruct feature that is present in encDoc + t.compare(diffed, mergedDeletes) + } + } + } + } + + const meta = enc.parseUpdateMeta(mergedUpdates) + meta.from.forEach((clock, client) => t.assert(clock === 0)) + meta.to.forEach((clock, client) => { + const structs = /** @type {Array} */ (merged.store.clients.get(client)) + const lastStruct = structs[structs.length - 1] + t.assert(lastStruct.id.clock + lastStruct.length === clock) + }) + }) +} + +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdates1 = tc => { + encoders.forEach((enc, i) => { + t.info(`Using encoder: ${enc.description}`) + const ydoc = new Y.Doc({ gc: false }) + const updates = /** @type {Array} */ ([]) + ydoc.on(enc.updateEventName, update => { updates.push(update) }) + + const array = ydoc.getArray() + array.insert(0, [1]) + array.insert(0, [2]) + array.insert(0, [3]) + array.insert(0, [4]) + + checkUpdateCases(ydoc, updates, enc, false) + }) +} + +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdates2 = tc => { + encoders.forEach((enc, i) => { + t.info(`Using encoder: ${enc.description}`) + const ydoc = new Y.Doc({ gc: false }) + const updates = /** @type {Array} */ ([]) + ydoc.on(enc.updateEventName, update => { updates.push(update) }) + + const array = ydoc.getArray() + array.insert(0, [1, 2]) + array.delete(1, 1) + array.insert(0, [3, 4]) + array.delete(1, 2) + + checkUpdateCases(ydoc, updates, enc, true) + }) +} + +/** + * @todo be able to apply Skip structs to Yjs docs + */ diff --git a/tests/y-array.tests.js b/tests/y-array.tests.js index 45022ed6..f2c0e86f 100644 --- a/tests/y-array.tests.js +++ b/tests/y-array.tests.js @@ -64,7 +64,7 @@ export const testInsertThreeElementsTryRegetProperty = tc => { * @param {t.TestCase} tc */ export const testConcurrentInsertWithThreeConflicts = tc => { - var { users, array0, array1, array2 } = init(tc, { users: 3 }) + const { users, array0, array1, array2 } = init(tc, { users: 3 }) array0.insert(0, [0]) array1.insert(0, [1]) array2.insert(0, [2]) @@ -107,7 +107,7 @@ export const testInsertionsInLateSync = tc => { * @param {t.TestCase} tc */ export const testDisconnectReallyPreventsSendingMessages = tc => { - var { testConnector, users, array0, array1 } = init(tc, { users: 3 }) + const { testConnector, users, array0, array1 } = init(tc, { users: 3 }) array0.insert(0, ['x', 'y']) testConnector.flushAllMessages() users[1].disconnect() @@ -388,13 +388,13 @@ const getUniqueNumber = () => _uniqueNumber++ const arrayTransactions = [ function insert (user, gen) { const yarray = user.getArray('array') - var uniqueNumber = getUniqueNumber() - var content = [] - var len = prng.int32(gen, 1, 4) - for (var i = 0; i < len; i++) { + const uniqueNumber = getUniqueNumber() + const content = [] + const len = prng.int32(gen, 1, 4) + for (let i = 0; i < len; i++) { content.push(uniqueNumber) } - var pos = prng.int32(gen, 0, yarray.length) + const pos = prng.int32(gen, 0, yarray.length) const oldContent = yarray.toArray() yarray.insert(pos, content) oldContent.splice(pos, 0, ...content) @@ -402,28 +402,28 @@ const arrayTransactions = [ }, function insertTypeArray (user, gen) { const yarray = user.getArray('array') - var pos = prng.int32(gen, 0, yarray.length) + const pos = prng.int32(gen, 0, yarray.length) yarray.insert(pos, [new Y.Array()]) - var array2 = yarray.get(pos) + const array2 = yarray.get(pos) array2.insert(0, [1, 2, 3, 4]) }, function insertTypeMap (user, gen) { const yarray = user.getArray('array') - var pos = prng.int32(gen, 0, yarray.length) + const pos = prng.int32(gen, 0, yarray.length) yarray.insert(pos, [new Y.Map()]) - var map = yarray.get(pos) + const map = yarray.get(pos) map.set('someprop', 42) map.set('someprop', 43) map.set('someprop', 44) }, function _delete (user, gen) { const yarray = user.getArray('array') - var length = yarray.length + const length = yarray.length if (length > 0) { - var somePos = prng.int32(gen, 0, length - 1) - var delLength = prng.int32(gen, 1, math.min(2, length - somePos)) + let somePos = prng.int32(gen, 0, length - 1) + let delLength = prng.int32(gen, 1, math.min(2, length - somePos)) if (prng.bool(gen)) { - var type = yarray.get(somePos) + const type = yarray.get(somePos) if (type.length > 0) { somePos = prng.int32(gen, 0, type.length - 1) delLength = prng.int32(gen, 0, math.min(2, type.length - somePos)) diff --git a/tests/y-map.tests.js b/tests/y-map.tests.js index 2c3d128a..c1540a76 100644 --- a/tests/y-map.tests.js +++ b/tests/y-map.tests.js @@ -138,7 +138,7 @@ export const testGetAndSetOfMapPropertySyncs = tc => { t.compare(map0.get('stuff'), 'stuffy') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.compare(u.get('stuff'), 'stuffy') } compare(users) @@ -153,7 +153,7 @@ export const testGetAndSetOfMapPropertyWithConflict = tc => { map1.set('stuff', 'c1') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.compare(u.get('stuff'), 'c1') } compare(users) @@ -183,7 +183,7 @@ export const testGetAndSetAndDeleteOfMapProperty = tc => { map1.delete('stuff') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.assert(u.get('stuff') === undefined) } compare(users) @@ -200,7 +200,7 @@ export const testGetAndSetOfMapPropertyWithThreeConflicts = tc => { map2.set('stuff', 'c3') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.compare(u.get('stuff'), 'c3') } compare(users) @@ -223,7 +223,7 @@ export const testGetAndSetAndDeleteOfMapPropertyWithThreeConflicts = tc => { map3.delete('stuff') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.assert(u.get('stuff') === undefined) } compare(users) @@ -296,7 +296,7 @@ export const testObserversUsingObservedeep = tc => { * @param {Object} should */ const compareEvent = (is, should) => { - for (var key in should) { + for (const key in should) { t.compare(should[key], is[key]) } } @@ -474,12 +474,12 @@ export const testYmapEventHasCorrectValueWhenSettingAPrimitiveFromOtherUser = tc const mapTransactions = [ function set (user, gen) { const key = prng.oneOf(gen, ['one', 'two']) - var value = prng.utf16String(gen) + const value = prng.utf16String(gen) user.getMap('map').set(key, value) }, function setType (user, gen) { const key = prng.oneOf(gen, ['one', 'two']) - var type = prng.oneOf(gen, [new Y.Array(), new Y.Map()]) + const type = prng.oneOf(gen, [new Y.Array(), new Y.Map()]) user.getMap('map').set(key, type) if (type instanceof Y.Array) { type.insert(0, [1, 2, 3, 4])