From 7a45be8c88ba17c0817faf1d146b0f4003412741 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Mon, 7 Dec 2020 19:47:48 +0100 Subject: [PATCH 01/23] add merge tests for #263 --- src/index.js | 3 +- src/internals.js | 1 + src/utils/updates.js | 16 +++++++++++ tests/index.js | 3 +- tests/updates.tests.js | 64 ++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 85 insertions(+), 2 deletions(-) create mode 100644 src/utils/updates.js create mode 100644 tests/updates.tests.js diff --git a/src/index.js b/src/index.js index 3daecee2..af555a23 100644 --- a/src/index.js +++ b/src/index.js @@ -72,5 +72,6 @@ export { tryGc, transact, AbstractConnector, - logType + logType, + mergeUpdates } from './internals.js' diff --git a/src/internals.js b/src/internals.js index 1984739b..5d32f0ad 100644 --- a/src/internals.js +++ b/src/internals.js @@ -15,6 +15,7 @@ export * from './utils/Snapshot.js' export * from './utils/StructStore.js' export * from './utils/Transaction.js' export * from './utils/UndoManager.js' +export * from './utils/updates.js' export * from './utils/YEvent.js' export * from './types/AbstractType.js' diff --git a/src/utils/updates.js b/src/utils/updates.js new file mode 100644 index 00000000..946ff9cb --- /dev/null +++ b/src/utils/updates.js @@ -0,0 +1,16 @@ + +/** + * @param {Array} updates + * @return {Uint8Array} + */ +export const mergeUpdates = updates => { + return updates[0] +} + +/** + * @param {Uint8Array} update + * @param {Uint8Array} sv + */ +export const diffUpdate = (update, sv) => { + return update +} diff --git a/tests/index.js b/tests/index.js index aec3ae5a..f8b453ed 100644 --- a/tests/index.js +++ b/tests/index.js @@ -8,6 +8,7 @@ import * as undoredo from './undo-redo.tests.js' import * as compatibility from './compatibility.tests.js' import * as doc from './doc.tests.js' import * as snapshot from './snapshot.tests.js' +import * as updates from './updates.tests.js' import { runTests } from 'lib0/testing.js' import { isBrowser, isNode } from 'lib0/environment.js' @@ -17,7 +18,7 @@ if (isBrowser) { log.createVConsole(document.body) } runTests({ - doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot + doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates }).then(success => { /* istanbul ignore next */ if (isNode) { diff --git a/tests/updates.tests.js b/tests/updates.tests.js new file mode 100644 index 00000000..70084273 --- /dev/null +++ b/tests/updates.tests.js @@ -0,0 +1,64 @@ +import * as t from 'lib0/testing.js' +import { init, compare } from './testHelper.js' // eslint-disable-line +import * as Y from '../src/index.js' + +/** + * @param {Array} users + */ +const fromUpdates = users => { + const updates = users.map(user => + Y.encodeStateAsUpdate(user) + ) + const ydoc = new Y.Doc() + Y.applyUpdate(ydoc, Y.mergeUpdates(updates)) + return ydoc +} + +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdates = tc => { + const { users, array0, array1 } = init(tc, { users: 3 }) + + array0.insert(0, [1]) + array1.insert(0, [2]) + + const merged = fromUpdates(users) + compare(users) + t.compareArrays(array0.toArray(), merged.getArray('array').toArray()) +} + +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdatesWrongOrder = tc => { + const ydoc = new Y.Doc() + const updates = /** @type {Array} */ ([]) + ydoc.on('update', update => { updates.push(update) }) + + const array = ydoc.getArray() + array.insert(0, [1]) + array.insert(0, [2]) + array.insert(0, [3]) + array.insert(0, [4]) + + const wrongOrder = Y.mergeUpdates([ + Y.mergeUpdates(updates.slice(2)), + Y.mergeUpdates(updates.slice(0, 2)) + ]) + const overlapping = Y.mergeUpdates([ + Y.mergeUpdates(updates.slice(2)), + Y.mergeUpdates(updates.slice(1, 3)), + updates[0] + ]) + const separated = Y.mergeUpdates([ + Y.mergeUpdates([updates[0], updates[2]]), + Y.mergeUpdates([updates[1], updates[3]]) + ]) + + ;[wrongOrder, overlapping, separated].forEach(updates => { + const merged = new Y.Doc() + Y.applyUpdate(merged, updates) + t.compareArrays(merged.getArray().toArray(), array.toArray()) + }) +} From 2c708b647d6bb96f0c91398abb9683c20f296f8e Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Tue, 8 Dec 2020 20:20:40 +0100 Subject: [PATCH 02/23] write lazy encoder & decoder - #263 --- src/utils/updates.js | 140 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 140 insertions(+) diff --git a/src/utils/updates.js b/src/utils/updates.js index 946ff9cb..b7cab8c7 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -1,4 +1,13 @@ +import * as binary from 'lib0/binary.js' +import * as decoding from 'lib0/decoding.js' +import * as encoding from 'lib0/encoding.js' +import { + createID, + readItemContent, + Item, GC, AbstractUpdateDecoder, AbstractUpdateEncoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line +} from '../internals.js' + /** * @param {Array} updates * @return {Uint8Array} @@ -14,3 +23,134 @@ export const mergeUpdates = updates => { export const diffUpdate = (update, sv) => { return update } + +/** + * @param {AbstractUpdateDecoder} decoder + */ +export function * lazyStructReaderGenerator (decoder) { + const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) + for (let i = 0; i < numOfStateUpdates; i++) { + const numberOfStructs = decoding.readVarUint(decoder.restDecoder) + const client = decoder.readClient() + let clock = decoding.readVarUint(decoder.restDecoder) + for (let i = 0; i < numberOfStructs; i++) { + const info = decoder.readInfo() + if ((binary.BITS5 & info) !== 0) { + /** + * The optimized implementation doesn't use any variables because inlining variables is faster. + * Below a non-optimized version is shown that implements the basic algorithm with + * a few comments + */ + const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 + // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` + // and we read the next string as parentYKey. + // It indicates how we store/retrieve parent from `y.share` + // @type {string|null} + const struct = new Item( + createID(client, clock), + null, // leftd + (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin + null, // right + (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin + // @ts-ignore Force writing a string here. + cantCopyParentInfo ? (decoder.readParentInfo() ? decoder.readString() : decoder.readLeftID()) : null, // parent + cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub + readItemContent(decoder, info) // item content + ) + yield struct + clock += struct.length + } else { + const len = decoder.readLen() + yield new GC(createID(client, clock), len) + clock += len + } + } + } +} + +export class LazyStructReader { + /** + * @param {AbstractUpdateDecoder} decoder + */ + constructor (decoder) { + this.gen = lazyStructReaderGenerator(decoder) + /** + * @type {null | Item | GC} + */ + this.curr = null + this.done = false + this.next() + } + + /** + * @return {Item | GC | null} + */ + next () { + return (this.curr = this.gen.next().value || null) + } +} + +export class LazyStructWriter { + /** + * @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} YEncoder + */ + constructor (YEncoder) { + this.fresh = true + this.currClient = 0 + this.startClock = 0 + this.written = 0 + this.encoder = new YEncoder() + /** + * @type {Array<{ client: number, clock: number, written: number, encoder: UpdateEncoderV1 | UpdateEncoderV2 }>} + */ + this.clientStructs = [] + this.YEncoder = YEncoder + } + + flushCurr () { + if (!this.fresh) { + this.clientStructs.push({ client: this.currClient, clock: this.startClock, written: this.written, encoder: this.encoder }) + this.fresh = true + } + } + + /** + * @param {Item | GC} struct + * @param {number} offset + */ + write (struct, offset) { + // flush curr if we start another client + if (!this.fresh && this.currClient !== struct.id.client) { + this.flushCurr() + this.currClient = struct.id.client + this.startClock = struct.id.clock + } + struct.write(this.encoder, offset) + this.written++ + } + + toUint8Array () { + const encoder = new this.YEncoder() + + /** + * Works similarly to `writeClientsStructs` + */ + + // write # states that were updated - i.e. the clients + encoding.writeVarUint(encoder.restEncoder, this.clientStructs.length) + + for (let i = 0; i < this.clientStructs.length; i++) { + const partStructs = this.clientStructs[i] + /** + * Works similarly to `writeStructs` + */ + // write # encoded structs + encoding.writeVarUint(encoder.restEncoder, partStructs.written) + encoder.writeClient(partStructs.client) + encoding.writeVarUint(encoder.restEncoder, partStructs.clock) + // append what we have been written so far + encoder.append(partStructs.encoder) + } + return encoder.toUint8Array() + } +} From 783c4d8209dd243e71b856f7759e8dc8def1e145 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 9 Dec 2020 17:48:45 +0100 Subject: [PATCH 03/23] write #263 append logic --- src/utils/updates.js | 44 ++++++++++++++++++++++++++++---------------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/src/utils/updates.js b/src/utils/updates.js index b7cab8c7..9bfcfc01 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -92,24 +92,31 @@ export class LazyStructReader { export class LazyStructWriter { /** - * @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} YEncoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ - constructor (YEncoder) { + constructor (encoder) { this.fresh = true this.currClient = 0 this.startClock = 0 this.written = 0 - this.encoder = new YEncoder() + this.encoder = encoder /** - * @type {Array<{ client: number, clock: number, written: number, encoder: UpdateEncoderV1 | UpdateEncoderV2 }>} + * We want to write operations lazily, but also we need to know beforehand how many operations we want to write for each client. + * + * This kind of meta-information (#clients, #structs-per-client-written) is written to the restEncoder. + * + * We fragment the restEncoder and store a slice of it per-client until we know how many clients there are. + * When we flush (toUint8Array) we write the restEncoder using the fragments and the meta-information. + * + * @type {Array<{ written: number, restEncoder: Uint8Array }>} */ this.clientStructs = [] - this.YEncoder = YEncoder } flushCurr () { if (!this.fresh) { - this.clientStructs.push({ client: this.currClient, clock: this.startClock, written: this.written, encoder: this.encoder }) + this.clientStructs.push({ written: this.written, restEncoder: encoding.toUint8Array(this.encoder.restEncoder) }) + this.encoder.restEncoder = encoding.createEncoder() this.fresh = true } } @@ -123,21 +130,28 @@ export class LazyStructWriter { if (!this.fresh && this.currClient !== struct.id.client) { this.flushCurr() this.currClient = struct.id.client - this.startClock = struct.id.clock + // write next client + this.encoder.writeClient(struct.id.client) + // write startClock + encoding.writeVarUint(this.encoder.restEncoder, struct.id.clock) } struct.write(this.encoder, offset) this.written++ } toUint8Array () { - const encoder = new this.YEncoder() + this.flushCurr() + + // this is a fresh encoder because we called flushCurr + const restEncoder = this.encoder.restEncoder /** - * Works similarly to `writeClientsStructs` + * Now we put all the fragments together. + * This works similarly to `writeClientsStructs` */ // write # states that were updated - i.e. the clients - encoding.writeVarUint(encoder.restEncoder, this.clientStructs.length) + encoding.writeVarUint(restEncoder, this.clientStructs.length) for (let i = 0; i < this.clientStructs.length; i++) { const partStructs = this.clientStructs[i] @@ -145,12 +159,10 @@ export class LazyStructWriter { * Works similarly to `writeStructs` */ // write # encoded structs - encoding.writeVarUint(encoder.restEncoder, partStructs.written) - encoder.writeClient(partStructs.client) - encoding.writeVarUint(encoder.restEncoder, partStructs.clock) - // append what we have been written so far - encoder.append(partStructs.encoder) + encoding.writeVarUint(restEncoder, partStructs.written) + // write the rest of the fragment + encoding.writeUint8Array(restEncoder, partStructs.restEncoder) } - return encoder.toUint8Array() + return this.encoder.toUint8Array() } } From 320da29b69d7128e28f726c0deb00f9b58534256 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Thu, 10 Dec 2020 18:06:35 +0100 Subject: [PATCH 04/23] implement merge-logic - #263 --- src/utils/updates.js | 262 ++++++++++++++++++++++++++++++++----------- 1 file changed, 194 insertions(+), 68 deletions(-) diff --git a/src/utils/updates.js b/src/utils/updates.js index 9bfcfc01..40de0152 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -5,29 +5,13 @@ import * as encoding from 'lib0/encoding.js' import { createID, readItemContent, - Item, GC, AbstractUpdateDecoder, AbstractUpdateEncoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line + Item, GC, AbstractUpdateDecoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' -/** - * @param {Array} updates - * @return {Uint8Array} - */ -export const mergeUpdates = updates => { - return updates[0] -} - -/** - * @param {Uint8Array} update - * @param {Uint8Array} sv - */ -export const diffUpdate = (update, sv) => { - return update -} - /** * @param {AbstractUpdateDecoder} decoder */ -export function * lazyStructReaderGenerator (decoder) { +function * lazyStructReaderGenerator (decoder) { const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) for (let i = 0; i < numOfStateUpdates; i++) { const numberOfStructs = decoding.readVarUint(decoder.restDecoder) @@ -96,6 +80,13 @@ export class LazyStructWriter { */ constructor (encoder) { this.fresh = true + /** + * We keep the last written struct around in case we want to + * merge it with the next written struct. + * When a new struct is received: if mergeable ⇒ merge; otherwise ⇒ write curr and keep new struct around. + * @type {null | Item | GC} + */ + this.curr = null this.currClient = 0 this.startClock = 0 this.written = 0 @@ -112,57 +103,192 @@ export class LazyStructWriter { */ this.clientStructs = [] } +} - flushCurr () { - if (!this.fresh) { - this.clientStructs.push({ written: this.written, restEncoder: encoding.toUint8Array(this.encoder.restEncoder) }) - this.encoder.restEncoder = encoding.createEncoder() - this.fresh = true - } - } +/** + * @param {Array} updates + * @return {Uint8Array} + */ +export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV2) - /** - * @param {Item | GC} struct - * @param {number} offset - */ - write (struct, offset) { - // flush curr if we start another client - if (!this.fresh && this.currClient !== struct.id.client) { - this.flushCurr() - this.currClient = struct.id.client - // write next client - this.encoder.writeClient(struct.id.client) - // write startClock - encoding.writeVarUint(this.encoder.restEncoder, struct.id.clock) - } - struct.write(this.encoder, offset) - this.written++ - } - - toUint8Array () { - this.flushCurr() - - // this is a fresh encoder because we called flushCurr - const restEncoder = this.encoder.restEncoder - - /** - * Now we put all the fragments together. - * This works similarly to `writeClientsStructs` - */ - - // write # states that were updated - i.e. the clients - encoding.writeVarUint(restEncoder, this.clientStructs.length) - - for (let i = 0; i < this.clientStructs.length; i++) { - const partStructs = this.clientStructs[i] - /** - * Works similarly to `writeStructs` - */ - // write # encoded structs - encoding.writeVarUint(restEncoder, partStructs.written) - // write the rest of the fragment - encoding.writeUint8Array(restEncoder, partStructs.restEncoder) - } - return this.encoder.toUint8Array() +/** + * This method is intended to slice any kind of struct and retrieve the right part. + * It does not handle side-effects, so it should only be used by the lazy-encoder. + * + * @param {Item | GC} left + * @param {number} diff + * @return {Item | GC} + */ +const sliceStruct = (left, diff) => { + if (left.constructor === GC) { + const { client, clock } = left.id + return new GC(createID(client, clock + diff), left.length - diff) + } else { + const leftItem = /** @type {Item} */ (left) + const { client, clock } = leftItem.id + return new Item( + createID(client, clock + diff), + null, + createID(client, clock + diff - 1), + null, + leftItem.rightOrigin, + leftItem.parent, + leftItem.parentSub, + leftItem.content.splice(diff) + ) + } +} + +/** + * + * This function works similarly to `readUpdateV2`. + * + * @param {Array} updates + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder] + * @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder] + * @return {Uint8Array} + */ +export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => { + const updateDecoders = updates.map(update => new UpdateDecoderV1(decoding.createDecoder(update))) + let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder)) + + /** + * @todo we don't need offset because we always slice before + * @type {null | { struct: Item | GC, offset: number }} + */ + let currWrite = null + + const updateEncoder = new YEncoder() + // write structs lazily + const lazyStructEncoder = new LazyStructWriter(updateEncoder) + + // Note: We need to ensure that all lazyStructDecoders are fully consumed + // Note: Should merge document updates whenever possible - even from different updates + // Note: Should handle that some operations cannot be applied yet () + + while (true) { + // Write higher clients first ⇒ sort by clientID & clock and remove decoders without content + lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null) + lazyStructDecoders.sort( + /** @type {function(any,any):number} */ (dec1, dec2) => + dec1.curr.id.client === dec2.curr.id.client + ? dec1.curr.id.clock - dec2.curr.id.clock + : dec1.curr.id.client - dec2.curr.id.client + ) + if (lazyStructDecoders.length === 0) { + break + } + const currDecoder = lazyStructDecoders[0] + // write from currDecoder until the next operation is from another client or if filler-struct + // then we need to reorder the decoders and find the next operation to write + const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client + if (currWrite !== null) { + let curr = /** @type {Item | GC} */ (currDecoder.curr) + if (firstClient !== currWrite.struct.id.client) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: curr, offset: 0 } + currDecoder.next() + } else if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) { + // @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock) + throw new Error('unhandled case') // @Todo ! + } else if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) { + const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock + if (diff > 0) { + curr = sliceStruct(curr, diff) + } + if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: curr, offset: 0 } + currDecoder.next() + } + } + } else { + currWrite = { struct: /** @type {Item | GC} */ (currDecoder.curr), offset: 0 } + currDecoder.next() + } + for ( + let next = currDecoder.curr; + next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length; // @Todo && next.constructor !== skippable + next = currDecoder.next() + ) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: next, offset: 0 } + } + } + finishLazyStructWriting(lazyStructEncoder) + + // Read DeleteSets and merge them. + // Write merged deleteset. + // -- updateEncoder.writeDs() + return updateEncoder.toUint8Array() +} + +/** + * @param {Uint8Array} update + * @param {Uint8Array} sv + */ +export const diffUpdate = (update, sv) => { + return update +} + +/** + * @param {LazyStructWriter} lazyWriter + */ +const flushLazyStructWriter = lazyWriter => { + if (!lazyWriter.fresh) { + lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: encoding.toUint8Array(lazyWriter.encoder.restEncoder) }) + lazyWriter.encoder.restEncoder = encoding.createEncoder() + lazyWriter.fresh = true + } +} + +/** + * @param {LazyStructWriter} lazyWriter + * @param {Item | GC} struct + * @param {number} offset + */ +const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => { + // flush curr if we start another client + if (!lazyWriter.fresh && lazyWriter.currClient !== struct.id.client) { + flushLazyStructWriter(lazyWriter) + lazyWriter.currClient = struct.id.client + // write next client + lazyWriter.encoder.writeClient(struct.id.client) + // write startClock + encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock) + } + struct.write(lazyWriter.encoder, offset) + lazyWriter.written++ +} +/** + * Call this function when we collected all parts and want to + * put all the parts together. After calling this method, + * you can continue using the UpdateEncoder. + * + * @param {LazyStructWriter} lazyWriter + */ +const finishLazyStructWriting = (lazyWriter) => { + flushLazyStructWriter(lazyWriter) + + // this is a fresh encoder because we called flushCurr + const restEncoder = lazyWriter.encoder.restEncoder + + /** + * Now we put all the fragments together. + * This works similarly to `writeClientsStructs` + */ + + // write # states that were updated - i.e. the clients + encoding.writeVarUint(restEncoder, lazyWriter.clientStructs.length) + + for (let i = 0; i < lazyWriter.clientStructs.length; i++) { + const partStructs = lazyWriter.clientStructs[i] + /** + * Works similarly to `writeStructs` + */ + // write # encoded structs + encoding.writeVarUint(restEncoder, partStructs.written) + // write the rest of the fragment + encoding.writeUint8Array(restEncoder, partStructs.restEncoder) } } From 1e0fd60df4a8535d95dbbc40f8ea93e2d288faad Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Sat, 12 Dec 2020 22:40:55 +0100 Subject: [PATCH 05/23] proper merge for deletesets --- src/utils/DeleteSet.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/utils/DeleteSet.js b/src/utils/DeleteSet.js index 658f5087..60b2af1f 100644 --- a/src/utils/DeleteSet.js +++ b/src/utils/DeleteSet.js @@ -121,8 +121,8 @@ export const sortAndMergeDeleteSet = ds => { for (i = 1, j = 1; i < dels.length; i++) { const left = dels[j - 1] const right = dels[i] - if (left.clock + left.len === right.clock) { - left.len += right.len + if (left.clock + left.len >= right.clock) { + left.len = math.max(left.len, right.clock + right.len - left.clock) } else { if (j < i) { dels[j] = right From c8534ea6bcb09efe461fd48593d747ef471868ed Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Sat, 12 Dec 2020 22:48:10 +0100 Subject: [PATCH 06/23] merging delete-sets #263 --- src/utils/updates.js | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/utils/updates.js b/src/utils/updates.js index 40de0152..827eb9c2 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -5,6 +5,9 @@ import * as encoding from 'lib0/encoding.js' import { createID, readItemContent, + readDeleteSet, + writeDeleteSet, + mergeDeleteSets, Item, GC, AbstractUpdateDecoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' @@ -20,11 +23,6 @@ function * lazyStructReaderGenerator (decoder) { for (let i = 0; i < numberOfStructs; i++) { const info = decoder.readInfo() if ((binary.BITS5 & info) !== 0) { - /** - * The optimized implementation doesn't use any variables because inlining variables is faster. - * Below a non-optimized version is shown that implements the basic algorithm with - * a few comments - */ const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` // and we read the next string as parentYKey. @@ -32,7 +30,7 @@ function * lazyStructReaderGenerator (decoder) { // @type {string|null} const struct = new Item( createID(client, clock), - null, // leftd + null, // left (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin null, // right (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin @@ -217,9 +215,9 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U } finishLazyStructWriting(lazyStructEncoder) - // Read DeleteSets and merge them. - // Write merged deleteset. - // -- updateEncoder.writeDs() + const dss = updateDecoders.map(decoder => readDeleteSet(decoder)) + const ds = mergeDeleteSets(dss) + writeDeleteSet(updateEncoder, ds) return updateEncoder.toUint8Array() } From 004a781a5635ce370f1cbc157512df3a72c76d93 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Sun, 13 Dec 2020 16:24:43 +0100 Subject: [PATCH 07/23] basic merge works. fixes first test #263 --- src/structs/Item.js | 21 +++++++++++++-------- src/utils/updates.js | 15 ++++++++++----- 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/src/structs/Item.js b/src/structs/Item.js index 4e171224..b4c10074 100644 --- a/src/structs/Item.js +++ b/src/structs/Item.js @@ -639,16 +639,21 @@ export class Item extends AbstractStruct { } if (origin === null && rightOrigin === null) { const parent = /** @type {AbstractType} */ (this.parent) - const parentItem = parent._item - if (parentItem === null) { - // parent type on y._map - // find the correct key - const ykey = findRootTypeKey(parent) + if (parent.constructor === String) { // this edge case was added by differential updates encoder.writeParentInfo(true) // write parentYKey - encoder.writeString(ykey) + encoder.writeString(parent) } else { - encoder.writeParentInfo(false) // write parent id - encoder.writeLeftID(parentItem.id) + const parentItem = parent._item + if (parentItem === null) { + // parent type on y._map + // find the correct key + const ykey = findRootTypeKey(parent) + encoder.writeParentInfo(true) // write parentYKey + encoder.writeString(ykey) + } else { + encoder.writeParentInfo(false) // write parent id + encoder.writeLeftID(parentItem.id) + } } if (parentSub !== null) { encoder.writeString(parentSub) diff --git a/src/utils/updates.js b/src/utils/updates.js index 827eb9c2..c2285cce 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -77,7 +77,6 @@ export class LazyStructWriter { * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ constructor (encoder) { - this.fresh = true /** * We keep the last written struct around in case we want to * merge it with the next written struct. @@ -107,7 +106,7 @@ export class LazyStructWriter { * @param {Array} updates * @return {Uint8Array} */ -export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV2) +export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1) /** * This method is intended to slice any kind of struct and retrieve the right part. @@ -213,6 +212,10 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U currWrite = { struct: next, offset: 0 } } } + if (currWrite !== null) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = null + } finishLazyStructWriting(lazyStructEncoder) const dss = updateDecoders.map(decoder => readDeleteSet(decoder)) @@ -233,10 +236,10 @@ export const diffUpdate = (update, sv) => { * @param {LazyStructWriter} lazyWriter */ const flushLazyStructWriter = lazyWriter => { - if (!lazyWriter.fresh) { + if (lazyWriter.written > 0) { lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: encoding.toUint8Array(lazyWriter.encoder.restEncoder) }) lazyWriter.encoder.restEncoder = encoding.createEncoder() - lazyWriter.fresh = true + lazyWriter.written = 0 } } @@ -247,8 +250,10 @@ const flushLazyStructWriter = lazyWriter => { */ const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => { // flush curr if we start another client - if (!lazyWriter.fresh && lazyWriter.currClient !== struct.id.client) { + if (lazyWriter.written > 0 && lazyWriter.currClient !== struct.id.client) { flushLazyStructWriter(lazyWriter) + } + if (lazyWriter.written === 0) { lazyWriter.currClient = struct.id.client // write next client lazyWriter.encoder.writeClient(struct.id.client) From f8341220c3e51d01e81987bc029d0e9ae5cca0fc Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Tue, 15 Dec 2020 15:39:08 +0100 Subject: [PATCH 08/23] first working version that also considers holes in document updates - #263 --- src/internals.js | 1 + src/structs/GC.js | 3 ++ src/structs/Item.js | 6 ++-- src/utils/updates.js | 68 +++++++++++++++++++++++++++++++----------- tests/updates.tests.js | 8 ++++- 5 files changed, 65 insertions(+), 21 deletions(-) diff --git a/src/internals.js b/src/internals.js index 5d32f0ad..bc386f0a 100644 --- a/src/internals.js +++ b/src/internals.js @@ -40,3 +40,4 @@ export * from './structs/ContentAny.js' export * from './structs/ContentString.js' export * from './structs/ContentType.js' export * from './structs/Item.js' +export * from './structs/Skip.js' diff --git a/src/structs/GC.js b/src/structs/GC.js index 0b9e4244..110f68b6 100644 --- a/src/structs/GC.js +++ b/src/structs/GC.js @@ -22,6 +22,9 @@ export class GC extends AbstractStruct { * @return {boolean} */ mergeWith (right) { + if (this.constructor !== right.constructor) { + return false + } this.length += right.length return true } diff --git a/src/structs/Item.js b/src/structs/Item.js index b4c10074..c63dff84 100644 --- a/src/structs/Item.js +++ b/src/structs/Item.js @@ -554,6 +554,7 @@ export class Item extends AbstractStruct { */ mergeWith (right) { if ( + this.constructor === right.constructor && compareIDs(right.origin, this.lastId) && this.right === right && compareIDs(this.rightOrigin, right.rightOrigin) && @@ -675,7 +676,7 @@ export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS * @type {Array} */ export const contentRefs = [ - () => { throw error.unexpectedCase() }, // GC is not ItemContent + () => { error.unexpectedCase() }, // GC is not ItemContent readContentDeleted, // 1 readContentJSON, // 2 readContentBinary, // 3 @@ -684,7 +685,8 @@ export const contentRefs = [ readContentFormat, // 6 readContentType, // 7 readContentAny, // 8 - readContentDoc // 9 + readContentDoc, // 9 + () => { error.unexpectedCase() } // 10 - Skip is not ItemContent ] /** diff --git a/src/utils/updates.js b/src/utils/updates.js index c2285cce..e0b6733c 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -7,6 +7,7 @@ import { readItemContent, readDeleteSet, writeDeleteSet, + Skip, mergeDeleteSets, Item, GC, AbstractUpdateDecoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' @@ -22,7 +23,12 @@ function * lazyStructReaderGenerator (decoder) { let clock = decoding.readVarUint(decoder.restDecoder) for (let i = 0; i < numberOfStructs; i++) { const info = decoder.readInfo() - if ((binary.BITS5 & info) !== 0) { + // @todo use switch instead of ifs + if (info === 10) { + const len = decoder.readLen() + yield new Skip(createID(client, clock), len) + clock += len + } else if ((binary.BITS5 & info) !== 0) { const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` // and we read the next string as parentYKey. @@ -68,7 +74,11 @@ export class LazyStructReader { * @return {Item | GC | null} */ next () { - return (this.curr = this.gen.next().value || null) + // ignore "Skip" structs + do { + this.curr = this.gen.next().value || null + } while (this.curr !== null && this.curr.constructor === Skip) + return this.curr } } @@ -77,13 +87,6 @@ export class LazyStructWriter { * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ constructor (encoder) { - /** - * We keep the last written struct around in case we want to - * merge it with the next written struct. - * When a new struct is received: if mergeable ⇒ merge; otherwise ⇒ write curr and keep new struct around. - * @type {null | Item | GC} - */ - this.curr = null this.currClient = 0 this.startClock = 0 this.written = 0 @@ -112,7 +115,7 @@ export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, * This method is intended to slice any kind of struct and retrieve the right part. * It does not handle side-effects, so it should only be used by the lazy-encoder. * - * @param {Item | GC} left + * @param {Item | GC | Skip} left * @param {number} diff * @return {Item | GC} */ @@ -120,6 +123,9 @@ const sliceStruct = (left, diff) => { if (left.constructor === GC) { const { client, clock } = left.id return new GC(createID(client, clock + diff), left.length - diff) + } else if (left.constructor === Skip) { + const { client, clock } = left.id + return new Skip(createID(client, clock + diff), left.length - diff) } else { const leftItem = /** @type {Item} */ (left) const { client, clock } = leftItem.id @@ -151,7 +157,7 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U /** * @todo we don't need offset because we always slice before - * @type {null | { struct: Item | GC, offset: number }} + * @type {null | { struct: Item | GC | Skip, offset: number }} */ let currWrite = null @@ -167,10 +173,20 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U // Write higher clients first ⇒ sort by clientID & clock and remove decoders without content lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null) lazyStructDecoders.sort( - /** @type {function(any,any):number} */ (dec1, dec2) => - dec1.curr.id.client === dec2.curr.id.client - ? dec1.curr.id.clock - dec2.curr.id.clock - : dec1.curr.id.client - dec2.curr.id.client + /** @type {function(any,any):number} */ (dec1, dec2) => { + if (dec1.curr.id.client === dec2.curr.id.client) { + const clockDiff = dec1.curr.id.clock - dec2.curr.id.clock + if (clockDiff === 0) { + return dec1.curr.constructor === dec2.curr.constructor ? 0 : ( + dec1.curr.constructor === Skip ? 1 : -1 + ) + } else { + return clockDiff + } + } else { + return dec2.curr.id.client - dec1.curr.id.client + } + } ) if (lazyStructDecoders.length === 0) { break @@ -187,11 +203,27 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U currDecoder.next() } else if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) { // @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock) - throw new Error('unhandled case') // @Todo ! + if (currWrite.struct.constructor === Skip) { + // extend existing skip + currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock + } else { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length + /** + * @type {Skip} + */ + const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff) + currWrite = { struct, offset: 0 } + } } else if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) { const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock if (diff > 0) { - curr = sliceStruct(curr, diff) + if (currWrite.struct.constructor === Skip) { + // prefer to slice Skip because the other struct might contain more information + currWrite.struct.length -= diff + } else { + curr = sliceStruct(curr, diff) + } } if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) { writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) @@ -205,7 +237,7 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U } for ( let next = currDecoder.curr; - next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length; // @Todo && next.constructor !== skippable + next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length && next.constructor !== Skip; next = currDecoder.next() ) { writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) diff --git a/tests/updates.tests.js b/tests/updates.tests.js index 70084273..108c5535 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -56,9 +56,15 @@ export const testMergeUpdatesWrongOrder = tc => { Y.mergeUpdates([updates[1], updates[3]]) ]) - ;[wrongOrder, overlapping, separated].forEach(updates => { + const targetState = Y.encodeStateAsUpdate(ydoc) + ;[wrongOrder, overlapping, separated].forEach((updates, i) => { const merged = new Y.Doc() Y.applyUpdate(merged, updates) t.compareArrays(merged.getArray().toArray(), array.toArray()) + t.compare(updates, targetState) }) } + +/** + * @todo be able to apply Skip structs to Yjs docs + */ From 22aef63d8ae92697f1367731a68ac125d739629d Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 16 Dec 2020 21:08:18 +0100 Subject: [PATCH 09/23] add Skip struct --- src/structs/Skip.js | 58 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 src/structs/Skip.js diff --git a/src/structs/Skip.js b/src/structs/Skip.js new file mode 100644 index 00000000..5b0f7b35 --- /dev/null +++ b/src/structs/Skip.js @@ -0,0 +1,58 @@ + +import { + AbstractStruct, + AbstractUpdateEncoder, StructStore, Transaction, ID // eslint-disable-line +} from '../internals.js' +import * as error from 'lib0/error.js' + +export const structSkipRefNumber = 10 + +/** + * @private + */ +export class Skip extends AbstractStruct { + get deleted () { + return true + } + + delete () {} + + /** + * @param {Skip} right + * @return {boolean} + */ + mergeWith (right) { + if (this.constructor !== right.constructor) { + return false + } + this.length += right.length + return true + } + + /** + * @param {Transaction} transaction + * @param {number} offset + */ + integrate (transaction, offset) { + // skip structs cannot be integrated + error.unexpectedCase() + } + + /** + * @param {AbstractUpdateEncoder} encoder + * @param {number} offset + */ + write (encoder, offset) { + encoder.writeInfo(structSkipRefNumber) + encoder.writeLen(this.length - offset) + } + + /** + * @param {Transaction} transaction + * @param {StructStore} store + * @return {null | number} + */ + getMissing (transaction, store) { + return null + } +} From 072947c0bb7af4dce773194e1bb5481d5b14993a Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 16 Dec 2020 21:25:00 +0100 Subject: [PATCH 10/23] implement update logging --- src/index.js | 2 ++ src/utils/updates.js | 21 +++++++++++++++++++++ tests/updates.tests.js | 4 ++++ 3 files changed, 27 insertions(+) diff --git a/src/index.js b/src/index.js index af555a23..831461bf 100644 --- a/src/index.js +++ b/src/index.js @@ -65,6 +65,8 @@ export { encodeSnapshotV2, decodeStateVector, decodeStateVectorV2, + logUpdate, + logUpdateV2, isDeleted, isParentOf, equalSnapshots, diff --git a/src/utils/updates.js b/src/utils/updates.js index e0b6733c..9fa49f8a 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -2,6 +2,7 @@ import * as binary from 'lib0/binary.js' import * as decoding from 'lib0/decoding.js' import * as encoding from 'lib0/encoding.js' +import * as logging from 'lib0/logging.js' import { createID, readItemContent, @@ -82,6 +83,26 @@ export class LazyStructReader { } } +/** + * @param {Uint8Array} update + * + */ +export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1) + +/** + * @param {Uint8Array} update + * @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder] + * + */ +export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => { + const structs = [] + const lazyDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update))) + for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) { + structs.push(curr) + } + logging.print(structs) +} + export class LazyStructWriter { /** * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder diff --git a/tests/updates.tests.js b/tests/updates.tests.js index 108c5535..25fec654 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -57,7 +57,11 @@ export const testMergeUpdatesWrongOrder = tc => { ]) const targetState = Y.encodeStateAsUpdate(ydoc) + t.info('Target State: ') + Y.logUpdate(targetState) ;[wrongOrder, overlapping, separated].forEach((updates, i) => { + t.info('State $' + i + ':') + Y.logUpdate(updates) const merged = new Y.Doc() Y.applyUpdate(merged, updates) t.compareArrays(merged.getArray().toArray(), array.toArray()) From 0b23d5aeeb72d42fcb2e61d7fcc82672a7c8d948 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 16 Dec 2020 22:53:11 +0100 Subject: [PATCH 11/23] First working version of differential updates - #263 --- src/utils/updates.js | 65 +++++++++++++++++++++++++----------------- tests/updates.tests.js | 5 +++- 2 files changed, 43 insertions(+), 27 deletions(-) diff --git a/src/utils/updates.js b/src/utils/updates.js index 9fa49f8a..90441b0b 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -216,40 +216,53 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U // write from currDecoder until the next operation is from another client or if filler-struct // then we need to reorder the decoders and find the next operation to write const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client + if (currWrite !== null) { - let curr = /** @type {Item | GC} */ (currDecoder.curr) + let curr = /** @type {Item | GC | null} */ (currDecoder.curr) + + // iterate until we find something that we haven't written already + // remember: first the high client-ids are written + while (curr !== null && curr.id.clock + curr.length <= currWrite.struct.id.clock + currWrite.struct.length && curr.id.client >= currWrite.struct.id.client) { + curr = currDecoder.next() + } + if (curr === null || curr.id.client !== firstClient) { + continue + } + if (firstClient !== currWrite.struct.id.client) { writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) currWrite = { struct: curr, offset: 0 } currDecoder.next() - } else if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) { - // @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock) - if (currWrite.struct.constructor === Skip) { - // extend existing skip - currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock - } else { - writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) - const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length - /** - * @type {Skip} - */ - const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff) - currWrite = { struct, offset: 0 } - } - } else if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) { - const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock - if (diff > 0) { + } else { + if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) { + // @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock) if (currWrite.struct.constructor === Skip) { - // prefer to slice Skip because the other struct might contain more information - currWrite.struct.length -= diff + // extend existing skip + currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock } else { - curr = sliceStruct(curr, diff) + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length + /** + * @type {Skip} + */ + const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff) + currWrite = { struct, offset: 0 } + } + } else { // if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) { + const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock + if (diff > 0) { + if (currWrite.struct.constructor === Skip) { + // prefer to slice Skip because the other struct might contain more information + currWrite.struct.length -= diff + } else { + curr = sliceStruct(curr, diff) + } + } + if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) { + writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) + currWrite = { struct: curr, offset: 0 } + currDecoder.next() } - } - if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) { - writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset) - currWrite = { struct: curr, offset: 0 } - currDecoder.next() } } } else { diff --git a/tests/updates.tests.js b/tests/updates.tests.js index 25fec654..d52388c8 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -59,7 +59,10 @@ export const testMergeUpdatesWrongOrder = tc => { const targetState = Y.encodeStateAsUpdate(ydoc) t.info('Target State: ') Y.logUpdate(targetState) - ;[wrongOrder, overlapping, separated].forEach((updates, i) => { + const allcases = [wrongOrder, overlapping, separated] + // case 4: merging all cases above + allcases.push(Y.mergeUpdates(allcases)) + allcases.forEach((updates, i) => { t.info('State $' + i + ':') Y.logUpdate(updates) const merged = new Y.Doc() From ba833983740a43bf11bf121f4425c937e117e0e8 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 16 Dec 2020 22:58:22 +0100 Subject: [PATCH 12/23] fix tests --- tests/encoding.tests.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/encoding.tests.js b/tests/encoding.tests.js index 65563e9d..c5afd833 100644 --- a/tests/encoding.tests.js +++ b/tests/encoding.tests.js @@ -22,7 +22,7 @@ import { * @param {t.TestCase} tc */ export const testStructReferences = tc => { - t.assert(contentRefs.length === 10) + t.assert(contentRefs.length === 11) t.assert(contentRefs[1] === readContentDeleted) t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json? t.assert(contentRefs[3] === readContentBinary) @@ -32,6 +32,7 @@ export const testStructReferences = tc => { t.assert(contentRefs[7] === readContentType) t.assert(contentRefs[8] === readContentAny) t.assert(contentRefs[9] === readContentDoc) + // contentRefs[10] is reserved for Skip structs } /** From 47221c26c49b5d4b12ca14d6edaf7860c6e4b4b4 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 16 Dec 2020 23:26:38 +0100 Subject: [PATCH 13/23] test with v1 and v2 encoding --- src/index.js | 3 +- src/utils/updates.js | 2 +- tests/updates.tests.js | 69 +++++++++++++++++++++++++++--------------- 3 files changed, 47 insertions(+), 27 deletions(-) diff --git a/src/index.js b/src/index.js index 831461bf..e4ad99e1 100644 --- a/src/index.js +++ b/src/index.js @@ -75,5 +75,6 @@ export { transact, AbstractConnector, logType, - mergeUpdates + mergeUpdates, + mergeUpdatesV2 } from './internals.js' diff --git a/src/utils/updates.js b/src/utils/updates.js index 90441b0b..4c524d45 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -173,7 +173,7 @@ const sliceStruct = (left, diff) => { * @return {Uint8Array} */ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => { - const updateDecoders = updates.map(update => new UpdateDecoderV1(decoding.createDecoder(update))) + const updateDecoders = updates.map(update => new YDecoder(decoding.createDecoder(update))) let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder)) /** diff --git a/tests/updates.tests.js b/tests/updates.tests.js index d52388c8..e91401b7 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -2,15 +2,23 @@ import * as t from 'lib0/testing.js' import { init, compare } from './testHelper.js' // eslint-disable-line import * as Y from '../src/index.js' +const useV2 = true + +const encodeStateAsUpdate = useV2 ? Y.encodeStateAsUpdateV2 : Y.encodeStateAsUpdate +const mergeUpdates = useV2 ? Y.mergeUpdatesV2 : Y.mergeUpdates +const applyUpdate = useV2 ? Y.applyUpdateV2 : Y.applyUpdate +const logUpdate = useV2 ? Y.logUpdateV2 : Y.logUpdate +const updateEventName = useV2 ? 'updateV2' : 'update' + /** * @param {Array} users */ const fromUpdates = users => { const updates = users.map(user => - Y.encodeStateAsUpdate(user) + encodeStateAsUpdate(user) ) const ydoc = new Y.Doc() - Y.applyUpdate(ydoc, Y.mergeUpdates(updates)) + applyUpdate(ydoc, mergeUpdates(updates)) return ydoc } @@ -34,7 +42,7 @@ export const testMergeUpdates = tc => { export const testMergeUpdatesWrongOrder = tc => { const ydoc = new Y.Doc() const updates = /** @type {Array} */ ([]) - ydoc.on('update', update => { updates.push(update) }) + ydoc.on(updateEventName, update => { updates.push(update) }) const array = ydoc.getArray() array.insert(0, [1]) @@ -42,31 +50,42 @@ export const testMergeUpdatesWrongOrder = tc => { array.insert(0, [3]) array.insert(0, [4]) - const wrongOrder = Y.mergeUpdates([ - Y.mergeUpdates(updates.slice(2)), - Y.mergeUpdates(updates.slice(0, 2)) - ]) - const overlapping = Y.mergeUpdates([ - Y.mergeUpdates(updates.slice(2)), - Y.mergeUpdates(updates.slice(1, 3)), - updates[0] - ]) - const separated = Y.mergeUpdates([ - Y.mergeUpdates([updates[0], updates[2]]), - Y.mergeUpdates([updates[1], updates[3]]) - ]) + const cases = [] - const targetState = Y.encodeStateAsUpdate(ydoc) + // Case 1: Simple case, simply merge everything + cases.push(mergeUpdates(updates)) + + // Case 2: Overlapping updates + cases.push(mergeUpdates([ + mergeUpdates(updates.slice(2)), + mergeUpdates(updates.slice(0, 2)) + ])) + + // Case 3: Overlapping updates + cases.push(mergeUpdates([ + mergeUpdates(updates.slice(2)), + mergeUpdates(updates.slice(1, 3)), + updates[0] + ])) + + // Case 4: Separated updates (containing skips) + cases.push(mergeUpdates([ + mergeUpdates([updates[0], updates[2]]), + mergeUpdates([updates[1], updates[3]]) + ])) + + // Case 5: overlapping with many duplicates + cases.push(mergeUpdates(cases)) + + const targetState = encodeStateAsUpdate(ydoc) t.info('Target State: ') - Y.logUpdate(targetState) - const allcases = [wrongOrder, overlapping, separated] - // case 4: merging all cases above - allcases.push(Y.mergeUpdates(allcases)) - allcases.forEach((updates, i) => { - t.info('State $' + i + ':') - Y.logUpdate(updates) + logUpdate(targetState) + + cases.forEach((updates, i) => { + t.info('State Case $' + i + ':') + logUpdate(updates) const merged = new Y.Doc() - Y.applyUpdate(merged, updates) + applyUpdate(merged, updates) t.compareArrays(merged.getArray().toArray(), array.toArray()) t.compare(updates, targetState) }) From d8868c47e14444e3a60759a40037b02c761170f1 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 16 Dec 2020 23:45:28 +0100 Subject: [PATCH 14/23] test case for deletes + fix --- src/utils/updates.js | 7 ++++-- tests/updates.tests.js | 55 ++++++++++++++++++++++++++++++------------ 2 files changed, 45 insertions(+), 17 deletions(-) diff --git a/src/utils/updates.js b/src/utils/updates.js index 4c524d45..ce46fa5d 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -96,11 +96,14 @@ export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1) */ export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => { const structs = [] - const lazyDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update))) + const updateDecoder = new YDecoder(decoding.createDecoder(update)) + const lazyDecoder = new LazyStructReader(updateDecoder) for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) { structs.push(curr) } - logging.print(structs) + logging.print('Structs: ', structs) + const ds = readDeleteSet(updateDecoder) + logging.print('DeleteSet: ', ds) } export class LazyStructWriter { diff --git a/tests/updates.tests.js b/tests/updates.tests.js index e91401b7..dfa08420 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -37,19 +37,10 @@ export const testMergeUpdates = tc => { } /** - * @param {t.TestCase} tc + * @param {Y.Doc} ydoc + * @param {Array} updates - expecting at least 4 updates */ -export const testMergeUpdatesWrongOrder = tc => { - const ydoc = new Y.Doc() - const updates = /** @type {Array} */ ([]) - ydoc.on(updateEventName, update => { updates.push(update) }) - - const array = ydoc.getArray() - array.insert(0, [1]) - array.insert(0, [2]) - array.insert(0, [3]) - array.insert(0, [4]) - +const checkUpdateCases = (ydoc, updates) => { const cases = [] // Case 1: Simple case, simply merge everything @@ -71,7 +62,8 @@ export const testMergeUpdatesWrongOrder = tc => { // Case 4: Separated updates (containing skips) cases.push(mergeUpdates([ mergeUpdates([updates[0], updates[2]]), - mergeUpdates([updates[1], updates[3]]) + mergeUpdates([updates[1], updates[3]]), + mergeUpdates(updates.slice(4)) ])) // Case 5: overlapping with many duplicates @@ -86,11 +78,44 @@ export const testMergeUpdatesWrongOrder = tc => { logUpdate(updates) const merged = new Y.Doc() applyUpdate(merged, updates) - t.compareArrays(merged.getArray().toArray(), array.toArray()) - t.compare(updates, targetState) + t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray()) }) } +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdates1 = tc => { + const ydoc = new Y.Doc({ gc: false }) + const updates = /** @type {Array} */ ([]) + ydoc.on(updateEventName, update => { updates.push(update) }) + + const array = ydoc.getArray() + array.insert(0, [1]) + array.insert(0, [2]) + array.insert(0, [3]) + array.insert(0, [4]) + + checkUpdateCases(ydoc, updates) +} + +/** + * @param {t.TestCase} tc + */ +export const testMergeUpdates2 = tc => { + const ydoc = new Y.Doc({ gc: false }) + const updates = /** @type {Array} */ ([]) + ydoc.on(updateEventName, update => { updates.push(update) }) + + const array = ydoc.getArray() + array.insert(0, [1, 2]) + array.delete(1, 1) + array.insert(0, [3, 4]) + array.delete(1, 2) + + checkUpdateCases(ydoc, updates) +} + /** * @todo be able to apply Skip structs to Yjs docs */ From fbbf085278e842c9f5c173f7f591b2d2f3863465 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Thu, 17 Dec 2020 21:50:39 +0100 Subject: [PATCH 15/23] add mergeUpdates tests to comparison framework --- src/structs/Item.js | 13 ++++++--- src/utils/Snapshot.js | 4 +-- src/utils/Transaction.js | 5 ++-- src/utils/encoding.js | 29 ++++--------------- tests/testHelper.js | 61 +++++++++++++++++++++++++++++++++++++--- 5 files changed, 76 insertions(+), 36 deletions(-) diff --git a/src/structs/Item.js b/src/structs/Item.js index c63dff84..adc64023 100644 --- a/src/structs/Item.js +++ b/src/structs/Item.js @@ -640,10 +640,7 @@ export class Item extends AbstractStruct { } if (origin === null && rightOrigin === null) { const parent = /** @type {AbstractType} */ (this.parent) - if (parent.constructor === String) { // this edge case was added by differential updates - encoder.writeParentInfo(true) // write parentYKey - encoder.writeString(parent) - } else { + if (parent._item !== undefined) { const parentItem = parent._item if (parentItem === null) { // parent type on y._map @@ -655,6 +652,14 @@ export class Item extends AbstractStruct { encoder.writeParentInfo(false) // write parent id encoder.writeLeftID(parentItem.id) } + } else if (parent.constructor === String) { // this edge case was added by differential updates + encoder.writeParentInfo(true) // write parentYKey + encoder.writeString(parent) + } else if (parent.constructor === ID) { + encoder.writeParentInfo(false) // write parent id + encoder.writeLeftID(parent) + } else { + error.unexpectedCase() } if (parentSub !== null) { encoder.writeString(parentSub) diff --git a/src/utils/Snapshot.js b/src/utils/Snapshot.js index ccd923e8..263bfa23 100644 --- a/src/utils/Snapshot.js +++ b/src/utils/Snapshot.js @@ -14,7 +14,6 @@ import { getState, findIndexSS, UpdateEncoderV2, - DefaultDSEncoder, applyUpdateV2, AbstractDSDecoder, AbstractDSEncoder, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line } from '../internals.js' @@ -23,6 +22,7 @@ import * as map from 'lib0/map.js' import * as set from 'lib0/set.js' import * as decoding from 'lib0/decoding.js' import * as encoding from 'lib0/encoding.js' +import { DSEncoderV1 } from './UpdateEncoder.js' export class Snapshot { /** @@ -91,7 +91,7 @@ export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => { * @param {Snapshot} snapshot * @return {Uint8Array} */ -export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DefaultDSEncoder()) +export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncoderV1()) /** * @param {Uint8Array} buf diff --git a/src/utils/Transaction.js b/src/utils/Transaction.js index 4a1746ac..929d061b 100644 --- a/src/utils/Transaction.js +++ b/src/utils/Transaction.js @@ -11,7 +11,7 @@ import { Item, generateNewClientId, createID, - AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV2, DefaultUpdateEncoder, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line + AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV2, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -19,6 +19,7 @@ import * as math from 'lib0/math.js' import * as set from 'lib0/set.js' import * as logging from 'lib0/logging.js' import { callAll } from 'lib0/function.js' +import { UpdateEncoderV1 } from './UpdateEncoder.js' /** * A transaction is created for every change on the Yjs model. It is possible @@ -337,7 +338,7 @@ const cleanupTransactions = (transactionCleanups, i) => { // @todo Merge all the transactions into one and provide send the data as a single update message doc.emit('afterTransactionCleanup', [transaction, doc]) if (doc._observers.has('update')) { - const encoder = new DefaultUpdateEncoder() + const encoder = new UpdateEncoderV1() const hasContent = writeUpdateMessageFromTransaction(encoder, transaction) if (hasContent) { doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc]) diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 9efd16ee..56cb6836 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -41,25 +41,6 @@ import * as decoding from 'lib0/decoding.js' import * as binary from 'lib0/binary.js' import * as map from 'lib0/map.js' -export let DefaultDSEncoder = DSEncoderV1 -export let DefaultDSDecoder = DSDecoderV1 -export let DefaultUpdateEncoder = UpdateEncoderV1 -export let DefaultUpdateDecoder = UpdateDecoderV1 - -export const useV1Encoding = () => { - DefaultDSEncoder = DSEncoderV1 - DefaultDSDecoder = DSDecoderV1 - DefaultUpdateEncoder = UpdateEncoderV1 - DefaultUpdateDecoder = UpdateDecoderV1 -} - -export const useV2Encoding = () => { - DefaultDSEncoder = DSEncoderV2 - DefaultDSDecoder = DSDecoderV2 - DefaultUpdateEncoder = UpdateEncoderV2 - DefaultUpdateDecoder = UpdateDecoderV2 -} - /** * @param {AbstractUpdateEncoder} encoder * @param {Array} structs All structs by `client` @@ -445,7 +426,7 @@ export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = n * * @function */ -export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new DefaultUpdateDecoder(decoder)) +export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new UpdateDecoderV1(decoder)) /** * Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`. @@ -475,7 +456,7 @@ export const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = Update * * @function */ -export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, DefaultUpdateDecoder) +export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, UpdateDecoderV1) /** * Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will @@ -523,7 +504,7 @@ export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = n * * @function */ -export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new DefaultUpdateEncoder()) +export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new UpdateEncoderV1()) /** * Read state vector from Decoder and return as Map @@ -562,7 +543,7 @@ export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoder * * @function */ -export const decodeStateVector = decodedState => readStateVector(new DefaultDSDecoder(decoding.createDecoder(decodedState))) +export const decodeStateVector = decodedState => readStateVector(new DSDecoderV1(decoding.createDecoder(decodedState))) /** * @param {AbstractDSEncoder} encoder @@ -608,4 +589,4 @@ export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => { * * @function */ -export const encodeStateVector = doc => encodeStateVectorV2(doc, new DefaultDSEncoder()) +export const encodeStateVector = doc => encodeStateVectorV2(doc, new DSEncoderV1()) diff --git a/tests/testHelper.js b/tests/testHelper.js index 2409af79..e7186268 100644 --- a/tests/testHelper.js +++ b/tests/testHelper.js @@ -27,6 +27,33 @@ const broadcastMessage = (y, m) => { } } +let useV2 = false + +export let encodeStateAsUpdate = Y.encodeStateAsUpdate +export let mergeUpdates = Y.mergeUpdates +export let applyUpdate = Y.applyUpdate +export let logUpdate = Y.logUpdate +export let updateEventName = 'update' + +const setEncoders = () => { + encodeStateAsUpdate = useV2 ? Y.encodeStateAsUpdateV2 : Y.encodeStateAsUpdate + mergeUpdates = useV2 ? Y.mergeUpdatesV2 : Y.mergeUpdates + applyUpdate = useV2 ? Y.applyUpdateV2 : Y.applyUpdate + logUpdate = useV2 ? Y.logUpdateV2 : Y.logUpdate + updateEventName = useV2 ? 'updateV2' : 'update' +} + +const useV1Encoding = () => { + useV2 = false + setEncoders() +} + +const useV2Encoding = () => { + useV2 = false + console.error('sync protocol doesnt support v2 protocol yet, fallback to v1 encoding') // @Todo + setEncoders() +} + export class TestYInstance extends Y.Doc { /** * @param {TestConnector} testConnector @@ -44,12 +71,19 @@ export class TestYInstance extends Y.Doc { */ this.receiving = new Map() testConnector.allConns.add(this) + /** + * The list of received updates. + * We are going to merge them later using Y.mergeUpdates and check if the resulting document is correct. + * @type {Array} + */ + this.updates = [] // set up observe on local model - this.on('update', /** @param {Uint8Array} update @param {any} origin */ (update, origin) => { + this.on(updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => { if (origin !== testConnector) { const encoder = encoding.createEncoder() syncProtocol.writeUpdate(encoder, update) broadcastMessage(this, encoding.toUint8Array(encoder)) + this.updates.push(update) } }) this.connect() @@ -162,6 +196,17 @@ export class TestConnector { // send reply message sender._receive(encoding.toUint8Array(encoder), receiver) } + { + // If update message, add the received message to the list of received messages + const decoder = decoding.createDecoder(m) + const messageType = decoding.readVarUint(decoder) + switch (messageType) { + case syncProtocol.messageYjsUpdate: + case syncProtocol.messageYjsSyncStep2: + receiver.updates.push(decoding.readVarUint8Array(decoder)) + break + } + } return true } return false @@ -240,9 +285,9 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => { const gen = tc.prng // choose an encoding approach at random if (prng.bool(gen)) { - Y.useV2Encoding() + useV2Encoding() } else { - Y.useV1Encoding() + useV1Encoding() } const testConnector = new TestConnector(gen) @@ -258,7 +303,7 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => { } testConnector.syncAll() result.testObjects = result.users.map(initTestObject || (() => null)) - Y.useV1Encoding() + useV1Encoding() return /** @type {any} */ (result) } @@ -274,6 +319,14 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => { export const compare = users => { users.forEach(u => u.connect()) while (users[0].tc.flushAllMessages()) {} + // For each document, merge all received document updates with Y.mergeUpdates and create a new document which will be added to the list of "users" + // This ensures that mergeUpdates works correctly + const mergedDocs = users.map(user => { + const ydoc = new Y.Doc() + applyUpdate(ydoc, mergeUpdates(user.updates)) + return ydoc + }) + users.push(.../** @type {any} */(mergedDocs)) const userArrayValues = users.map(u => u.getArray('array').toJSON()) const userMapValues = users.map(u => u.getMap('map').toJSON()) const userXmlValues = users.map(u => u.get('xml', Y.YXmlElement).toString()) From 4c929c68086c9186202ee67b6c2952dd1fb632fa Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Sat, 19 Dec 2020 16:29:17 +0100 Subject: [PATCH 16/23] lint & refactoring --- README.md | 3 +- funding.cjs | 2 +- src/utils/DeleteSet.js | 8 +-- src/utils/Snapshot.js | 4 +- src/utils/UpdateDecoder.js | 5 +- src/utils/UpdateEncoder.js | 107 ++----------------------------------- src/utils/encoding.js | 18 +++---- src/utils/updates.js | 48 +++++++++++++++++ tests/y-array.tests.js | 30 +++++------ tests/y-map.tests.js | 16 +++--- 10 files changed, 96 insertions(+), 145 deletions(-) diff --git a/README.md b/README.md index eca608ec..2e2b1833 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,8 @@ Sponsorship also comes with special perks! [![Become a Sponsor](https://img.shie collaboratively organize radio broadcasts. * [Cattaz](http://cattaz.io/) A wiki that can run custom applications in the wiki pages. -* [Alldone](https://alldoneapp.com/) A next-gen project management and collaboration platform. +* [Alldone](https://alldoneapp.com/) A next-gen project management and + collaboration platform. ## Table of Contents diff --git a/funding.cjs b/funding.cjs index 66083e5b..aaf7d47f 100644 --- a/funding.cjs +++ b/funding.cjs @@ -7,4 +7,4 @@ log.print( log.GREY, 'The project has grown considerably in the past year. Too much for me to maintain\nin my spare time. Several companies built their products with Yjs.\nYet, this project receives very little funding. Yjs is far from done. I want to\ncreate more awesome extensions and work on the growing number of open issues.\n', log.BOLD, 'Dear user, the future of this project entirely depends on you.\n') log.print(log.BLUE, log.BOLD, 'Please start funding the project now: https://github.com/sponsors/dmonad \n') -log.print(log.GREY, '(This message will be removed when I achieved my funding goal)\n\n') \ No newline at end of file +log.print(log.GREY, '(This message will be removed when I achieved my funding goal)\n\n') diff --git a/src/utils/DeleteSet.js b/src/utils/DeleteSet.js index 60b2af1f..2c50f781 100644 --- a/src/utils/DeleteSet.js +++ b/src/utils/DeleteSet.js @@ -4,7 +4,7 @@ import { getState, splitItem, iterateStructs, - AbstractUpdateDecoder, AbstractDSDecoder, AbstractDSEncoder, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line + DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' import * as array from 'lib0/array.js' @@ -210,7 +210,7 @@ export const createDeleteSetFromStructStore = ss => { } /** - * @param {AbstractDSEncoder} encoder + * @param {DSEncoderV1 | DSEncoderV2} encoder * @param {DeleteSet} ds * * @private @@ -232,7 +232,7 @@ export const writeDeleteSet = (encoder, ds) => { } /** - * @param {AbstractDSDecoder} decoder + * @param {DSDecoderV1 | DSDecoderV2} decoder * @return {DeleteSet} * * @private @@ -260,7 +260,7 @@ export const readDeleteSet = decoder => { */ /** - * @param {AbstractDSDecoder} decoder + * @param {DSDecoderV1 | DSDecoderV2} decoder * @param {Transaction} transaction * @param {StructStore} store * diff --git a/src/utils/Snapshot.js b/src/utils/Snapshot.js index 263bfa23..102a9c35 100644 --- a/src/utils/Snapshot.js +++ b/src/utils/Snapshot.js @@ -15,7 +15,7 @@ import { findIndexSS, UpdateEncoderV2, applyUpdateV2, - AbstractDSDecoder, AbstractDSEncoder, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line + AbstractDSDecoder, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -78,7 +78,7 @@ export const equalSnapshots = (snap1, snap2) => { /** * @param {Snapshot} snapshot - * @param {AbstractDSEncoder} [encoder] + * @param {DSEncoderV1 | DSEncoderV2} [encoder] * @return {Uint8Array} */ export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => { diff --git a/src/utils/UpdateDecoder.js b/src/utils/UpdateDecoder.js index 372c7749..9f50c7a7 100644 --- a/src/utils/UpdateDecoder.js +++ b/src/utils/UpdateDecoder.js @@ -10,6 +10,9 @@ export class AbstractDSDecoder { * @param {decoding.Decoder} decoder */ constructor (decoder) { + /** + * @type {decoding.Decoder} + */ this.restDecoder = decoder error.methodUnimplemented() } @@ -280,7 +283,7 @@ export class UpdateDecoderV2 extends DSDecoderV2 { * @type {Array} */ this.keys = [] - decoding.readUint8(decoder) // read feature flag - currently unused + decoding.readVarUint(decoder) // read feature flag - currently unused this.keyClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder)) this.clientDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder)) this.leftClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder)) diff --git a/src/utils/UpdateEncoder.js b/src/utils/UpdateEncoder.js index eb13fdc3..764cf7f0 100644 --- a/src/utils/UpdateEncoder.js +++ b/src/utils/UpdateEncoder.js @@ -6,110 +6,9 @@ import { ID // eslint-disable-line } from '../internals.js' -export class AbstractDSEncoder { - constructor () { - this.restEncoder = encoding.createEncoder() - } - - /** - * @return {Uint8Array} - */ - toUint8Array () { - error.methodUnimplemented() - } - - /** - * Resets the ds value to 0. - * The v2 encoder uses this information to reset the initial diff value. - */ - resetDsCurVal () { } - - /** - * @param {number} clock - */ - writeDsClock (clock) { } - - /** - * @param {number} len - */ - writeDsLen (len) { } -} - -export class AbstractUpdateEncoder extends AbstractDSEncoder { - /** - * @return {Uint8Array} - */ - toUint8Array () { - error.methodUnimplemented() - } - - /** - * @param {ID} id - */ - writeLeftID (id) { } - - /** - * @param {ID} id - */ - writeRightID (id) { } - - /** - * Use writeClient and writeClock instead of writeID if possible. - * @param {number} client - */ - writeClient (client) { } - - /** - * @param {number} info An unsigned 8-bit integer - */ - writeInfo (info) { } - - /** - * @param {string} s - */ - writeString (s) { } - - /** - * @param {boolean} isYKey - */ - writeParentInfo (isYKey) { } - - /** - * @param {number} info An unsigned 8-bit integer - */ - writeTypeRef (info) { } - - /** - * Write len of a struct - well suited for Opt RLE encoder. - * - * @param {number} len - */ - writeLen (len) { } - - /** - * @param {any} any - */ - writeAny (any) { } - - /** - * @param {Uint8Array} buf - */ - writeBuf (buf) { } - - /** - * @param {any} embed - */ - writeJSON (embed) { } - - /** - * @param {string} key - */ - writeKey (key) { } -} - export class DSEncoderV1 { constructor () { - this.restEncoder = new encoding.Encoder() + this.restEncoder = encoding.createEncoder() } toUint8Array () { @@ -228,7 +127,7 @@ export class UpdateEncoderV1 extends DSEncoderV1 { export class DSEncoderV2 { constructor () { - this.restEncoder = new encoding.Encoder() // encodes all the rest / non-optimized + this.restEncoder = encoding.createEncoder() // encodes all the rest / non-optimized this.dsCurrVal = 0 } @@ -288,7 +187,7 @@ export class UpdateEncoderV2 extends DSEncoderV2 { toUint8Array () { const encoder = encoding.createEncoder() - encoding.writeUint8(encoder, 0) // this is a feature flag that we might use in the future + encoding.writeVarUint(encoder, 0) // this is a feature flag that we might use in the future encoding.writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array()) encoding.writeVarUint8Array(encoder, this.clientEncoder.toUint8Array()) encoding.writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array()) diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 56cb6836..7a29e7d4 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -33,7 +33,7 @@ import { DSEncoderV2, DSDecoderV1, DSEncoderV1, - AbstractDSEncoder, AbstractDSDecoder, AbstractUpdateEncoder, AbstractUpdateDecoder, AbstractContent, Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line + AbstractDSDecoder, AbstractUpdateDecoder, Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line } from '../internals.js' import * as encoding from 'lib0/encoding.js' @@ -42,7 +42,7 @@ import * as binary from 'lib0/binary.js' import * as map from 'lib0/map.js' /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Array} structs All structs by `client` * @param {number} client * @param {number} clock write structs starting with `ID(client,clock)` @@ -65,7 +65,7 @@ const writeStructs = (encoder, structs, client, clock) => { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {StructStore} store * @param {Map} _sm * @@ -317,7 +317,7 @@ export const tryResumePendingDeleteReaders = (transaction, store) => { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Transaction} transaction * * @private @@ -462,7 +462,7 @@ export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(yd * Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will * only write the operations that are missing. * - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Doc} doc * @param {Map} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs * @@ -481,7 +481,7 @@ export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) * * @param {Doc} doc * @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs - * @param {AbstractUpdateEncoder} [encoder] + * @param {UpdateEncoderV1 | UpdateEncoderV2} [encoder] * @return {Uint8Array} * * @function @@ -546,7 +546,7 @@ export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoder export const decodeStateVector = decodedState => readStateVector(new DSDecoderV1(decoding.createDecoder(decodedState))) /** - * @param {AbstractDSEncoder} encoder + * @param {DSEncoderV1 | DSEncoderV2} encoder * @param {Map} sv * @function */ @@ -560,7 +560,7 @@ export const writeStateVector = (encoder, sv) => { } /** - * @param {AbstractDSEncoder} encoder + * @param {DSEncoderV1 | DSEncoderV2} encoder * @param {Doc} doc * * @function @@ -571,7 +571,7 @@ export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encod * Encode State as Uint8Array. * * @param {Doc} doc - * @param {AbstractDSEncoder} [encoder] + * @param {DSEncoderV1 | DSEncoderV2} [encoder] * @return {Uint8Array} * * @function diff --git a/src/utils/updates.js b/src/utils/updates.js index ce46fa5d..9794366f 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -10,6 +10,8 @@ import { writeDeleteSet, Skip, mergeDeleteSets, + DSEncoderV1, + DSEncoderV2, Item, GC, AbstractUpdateDecoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' @@ -135,6 +137,52 @@ export class LazyStructWriter { */ export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1) +/** + * @param {Uint8Array} update + * @param {typeof DSEncoderV1 | typeof DSEncoderV2} YEncoder + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder + * @return {Uint8Array} + */ +export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YDecoder = UpdateDecoderV2) => { + const encoder = new YEncoder() + const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update))) + let curr = updateDecoder.curr + if (curr !== null) { + let size = 1 + let currClient = curr.id.client + let currClock = curr.id.clock + for (; curr !== null; curr = updateDecoder.next()) { + if (currClient !== curr.id.client) { + size++ + // We found a new client + // write what we have to the encoder + encoding.writeVarUint(encoder.restEncoder, currClient) + encoding.writeVarUint(encoder.restEncoder, currClock) + currClient = curr.id.client + } + currClock = curr.id.clock + curr.length + } + // write what we have + encoding.writeVarUint(encoder.restEncoder, currClient) + encoding.writeVarUint(encoder.restEncoder, currClock) + // prepend the size of the state vector + const enc = encoding.createEncoder() + encoding.writeVarUint(enc, size) + encoding.writeBinaryEncoder(enc, encoder.restEncoder) + encoder.restEncoder = enc + return encoder.toUint8Array() + } else { + encoding.writeVarUint(encoder.restEncoder, 0) + return encoder.toUint8Array() + } +} + +/** + * @param {Uint8Array} update + * @return {Uint8Array} + */ +export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV2) + /** * This method is intended to slice any kind of struct and retrieve the right part. * It does not handle side-effects, so it should only be used by the lazy-encoder. diff --git a/tests/y-array.tests.js b/tests/y-array.tests.js index 45022ed6..f2c0e86f 100644 --- a/tests/y-array.tests.js +++ b/tests/y-array.tests.js @@ -64,7 +64,7 @@ export const testInsertThreeElementsTryRegetProperty = tc => { * @param {t.TestCase} tc */ export const testConcurrentInsertWithThreeConflicts = tc => { - var { users, array0, array1, array2 } = init(tc, { users: 3 }) + const { users, array0, array1, array2 } = init(tc, { users: 3 }) array0.insert(0, [0]) array1.insert(0, [1]) array2.insert(0, [2]) @@ -107,7 +107,7 @@ export const testInsertionsInLateSync = tc => { * @param {t.TestCase} tc */ export const testDisconnectReallyPreventsSendingMessages = tc => { - var { testConnector, users, array0, array1 } = init(tc, { users: 3 }) + const { testConnector, users, array0, array1 } = init(tc, { users: 3 }) array0.insert(0, ['x', 'y']) testConnector.flushAllMessages() users[1].disconnect() @@ -388,13 +388,13 @@ const getUniqueNumber = () => _uniqueNumber++ const arrayTransactions = [ function insert (user, gen) { const yarray = user.getArray('array') - var uniqueNumber = getUniqueNumber() - var content = [] - var len = prng.int32(gen, 1, 4) - for (var i = 0; i < len; i++) { + const uniqueNumber = getUniqueNumber() + const content = [] + const len = prng.int32(gen, 1, 4) + for (let i = 0; i < len; i++) { content.push(uniqueNumber) } - var pos = prng.int32(gen, 0, yarray.length) + const pos = prng.int32(gen, 0, yarray.length) const oldContent = yarray.toArray() yarray.insert(pos, content) oldContent.splice(pos, 0, ...content) @@ -402,28 +402,28 @@ const arrayTransactions = [ }, function insertTypeArray (user, gen) { const yarray = user.getArray('array') - var pos = prng.int32(gen, 0, yarray.length) + const pos = prng.int32(gen, 0, yarray.length) yarray.insert(pos, [new Y.Array()]) - var array2 = yarray.get(pos) + const array2 = yarray.get(pos) array2.insert(0, [1, 2, 3, 4]) }, function insertTypeMap (user, gen) { const yarray = user.getArray('array') - var pos = prng.int32(gen, 0, yarray.length) + const pos = prng.int32(gen, 0, yarray.length) yarray.insert(pos, [new Y.Map()]) - var map = yarray.get(pos) + const map = yarray.get(pos) map.set('someprop', 42) map.set('someprop', 43) map.set('someprop', 44) }, function _delete (user, gen) { const yarray = user.getArray('array') - var length = yarray.length + const length = yarray.length if (length > 0) { - var somePos = prng.int32(gen, 0, length - 1) - var delLength = prng.int32(gen, 1, math.min(2, length - somePos)) + let somePos = prng.int32(gen, 0, length - 1) + let delLength = prng.int32(gen, 1, math.min(2, length - somePos)) if (prng.bool(gen)) { - var type = yarray.get(somePos) + const type = yarray.get(somePos) if (type.length > 0) { somePos = prng.int32(gen, 0, type.length - 1) delLength = prng.int32(gen, 0, math.min(2, type.length - somePos)) diff --git a/tests/y-map.tests.js b/tests/y-map.tests.js index 2c3d128a..c1540a76 100644 --- a/tests/y-map.tests.js +++ b/tests/y-map.tests.js @@ -138,7 +138,7 @@ export const testGetAndSetOfMapPropertySyncs = tc => { t.compare(map0.get('stuff'), 'stuffy') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.compare(u.get('stuff'), 'stuffy') } compare(users) @@ -153,7 +153,7 @@ export const testGetAndSetOfMapPropertyWithConflict = tc => { map1.set('stuff', 'c1') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.compare(u.get('stuff'), 'c1') } compare(users) @@ -183,7 +183,7 @@ export const testGetAndSetAndDeleteOfMapProperty = tc => { map1.delete('stuff') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.assert(u.get('stuff') === undefined) } compare(users) @@ -200,7 +200,7 @@ export const testGetAndSetOfMapPropertyWithThreeConflicts = tc => { map2.set('stuff', 'c3') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.compare(u.get('stuff'), 'c3') } compare(users) @@ -223,7 +223,7 @@ export const testGetAndSetAndDeleteOfMapPropertyWithThreeConflicts = tc => { map3.delete('stuff') testConnector.flushAllMessages() for (const user of users) { - var u = user.getMap('map') + const u = user.getMap('map') t.assert(u.get('stuff') === undefined) } compare(users) @@ -296,7 +296,7 @@ export const testObserversUsingObservedeep = tc => { * @param {Object} should */ const compareEvent = (is, should) => { - for (var key in should) { + for (const key in should) { t.compare(should[key], is[key]) } } @@ -474,12 +474,12 @@ export const testYmapEventHasCorrectValueWhenSettingAPrimitiveFromOtherUser = tc const mapTransactions = [ function set (user, gen) { const key = prng.oneOf(gen, ['one', 'two']) - var value = prng.utf16String(gen) + const value = prng.utf16String(gen) user.getMap('map').set(key, value) }, function setType (user, gen) { const key = prng.oneOf(gen, ['one', 'two']) - var type = prng.oneOf(gen, [new Y.Array(), new Y.Map()]) + const type = prng.oneOf(gen, [new Y.Array(), new Y.Map()]) user.getMap('map').set(key, type) if (type instanceof Y.Array) { type.insert(0, [1, 2, 3, 4]) From 0a40b541e80b24d5ac03292bc4befe8bb64d5ba4 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Tue, 29 Dec 2020 16:59:27 +0100 Subject: [PATCH 17/23] test with all encoders --- src/structs/AbstractStruct.js | 4 +- src/structs/ContentAny.js | 6 +- src/structs/ContentBinary.js | 6 +- src/structs/ContentDeleted.js | 6 +- src/structs/ContentDoc.js | 6 +- src/structs/ContentEmbed.js | 6 +- src/structs/ContentFormat.js | 6 +- src/structs/ContentJSON.js | 6 +- src/structs/ContentString.js | 6 +- src/structs/ContentType.js | 8 +-- src/structs/GC.js | 4 +- src/structs/Item.js | 10 +-- src/structs/Skip.js | 4 +- src/types/AbstractType.js | 4 +- src/types/YArray.js | 6 +- src/types/YMap.js | 6 +- src/types/YText.js | 6 +- src/types/YXmlElement.js | 6 +- src/types/YXmlFragment.js | 6 +- src/types/YXmlHook.js | 6 +- src/types/YXmlText.js | 6 +- src/utils/Snapshot.js | 5 +- src/utils/Transaction.js | 5 +- src/utils/UpdateDecoder.js | 132 +++------------------------------- src/utils/encoding.js | 10 +-- src/utils/updates.js | 6 +- tests/updates.tests.js | 112 +++++++++++++++++------------ 27 files changed, 149 insertions(+), 245 deletions(-) diff --git a/src/structs/AbstractStruct.js b/src/structs/AbstractStruct.js index fc335a73..889be3ab 100644 --- a/src/structs/AbstractStruct.js +++ b/src/structs/AbstractStruct.js @@ -1,6 +1,6 @@ import { - AbstractUpdateEncoder, ID, Transaction // eslint-disable-line + UpdateEncoderV1, UpdateEncoderV2, ID, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -34,7 +34,7 @@ export class AbstractStruct { } /** - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. * @param {number} offset * @param {number} encodingRef */ diff --git a/src/structs/ContentAny.js b/src/structs/ContentAny.js index f00128df..613144d8 100644 --- a/src/structs/ContentAny.js +++ b/src/structs/ContentAny.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line + UpdateEncoderV1, UpdateEncoderV2, UpdateDecoderV1, UpdateDecoderV2, Transaction, Item, StructStore // eslint-disable-line } from '../internals.js' export class ContentAny { @@ -74,7 +74,7 @@ export class ContentAny { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -95,7 +95,7 @@ export class ContentAny { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentAny} */ export const readContentAny = decoder => { diff --git a/src/structs/ContentBinary.js b/src/structs/ContentBinary.js index 15d92aa2..30e55fe5 100644 --- a/src/structs/ContentBinary.js +++ b/src/structs/ContentBinary.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -70,7 +70,7 @@ export class ContentBinary { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -86,7 +86,7 @@ export class ContentBinary { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder * @return {ContentBinary} */ export const readContentBinary = decoder => new ContentBinary(decoder.readBuf()) diff --git a/src/structs/ContentDeleted.js b/src/structs/ContentDeleted.js index 7e00bebe..7225e1f6 100644 --- a/src/structs/ContentDeleted.js +++ b/src/structs/ContentDeleted.js @@ -1,7 +1,7 @@ import { addToDeleteSet, - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line } from '../internals.js' export class ContentDeleted { @@ -77,7 +77,7 @@ export class ContentDeleted { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -95,7 +95,7 @@ export class ContentDeleted { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder * @return {ContentDeleted} */ export const readContentDeleted = decoder => new ContentDeleted(decoder.readLen()) diff --git a/src/structs/ContentDoc.js b/src/structs/ContentDoc.js index 10101896..939734c1 100644 --- a/src/structs/ContentDoc.js +++ b/src/structs/ContentDoc.js @@ -1,6 +1,6 @@ import { - Doc, AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Transaction, Item // eslint-disable-line + Doc, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -110,7 +110,7 @@ export class ContentDoc { gc (store) { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -129,7 +129,7 @@ export class ContentDoc { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentDoc} */ export const readContentDoc = decoder => new ContentDoc(new Doc({ guid: decoder.readString(), ...decoder.readAny() })) diff --git a/src/structs/ContentEmbed.js b/src/structs/ContentEmbed.js index 66b922e3..27dcd3fa 100644 --- a/src/structs/ContentEmbed.js +++ b/src/structs/ContentEmbed.js @@ -1,6 +1,6 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -74,7 +74,7 @@ export class ContentEmbed { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -92,7 +92,7 @@ export class ContentEmbed { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentEmbed} */ export const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON()) diff --git a/src/structs/ContentFormat.js b/src/structs/ContentFormat.js index 43716408..fb2610d2 100644 --- a/src/structs/ContentFormat.js +++ b/src/structs/ContentFormat.js @@ -1,6 +1,6 @@ import { - AbstractType, AbstractUpdateDecoder, AbstractUpdateEncoder, Item, StructStore, Transaction // eslint-disable-line + AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Item, StructStore, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -80,7 +80,7 @@ export class ContentFormat { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -97,7 +97,7 @@ export class ContentFormat { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentFormat} */ export const readContentFormat = decoder => new ContentFormat(decoder.readString(), decoder.readJSON()) diff --git a/src/structs/ContentJSON.js b/src/structs/ContentJSON.js index baf4c46c..29b2f754 100644 --- a/src/structs/ContentJSON.js +++ b/src/structs/ContentJSON.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line } from '../internals.js' /** @@ -77,7 +77,7 @@ export class ContentJSON { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -100,7 +100,7 @@ export class ContentJSON { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentJSON} */ export const readContentJSON = decoder => { diff --git a/src/structs/ContentString.js b/src/structs/ContentString.js index 10151e67..fa023ef9 100644 --- a/src/structs/ContentString.js +++ b/src/structs/ContentString.js @@ -1,5 +1,5 @@ import { - AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line } from '../internals.js' /** @@ -88,7 +88,7 @@ export class ContentString { */ gc (store) {} /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -106,7 +106,7 @@ export class ContentString { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentString} */ export const readContentString = decoder => new ContentString(decoder.readString()) diff --git a/src/structs/ContentType.js b/src/structs/ContentType.js index 7fd45570..3be38298 100644 --- a/src/structs/ContentType.js +++ b/src/structs/ContentType.js @@ -7,13 +7,13 @@ import { readYXmlFragment, readYXmlHook, readYXmlText, - AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' /** - * @type {Array>} + * @type {Array>} * @private */ export const typeRefs = [ @@ -148,7 +148,7 @@ export class ContentType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { @@ -166,7 +166,7 @@ export class ContentType { /** * @private * - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {ContentType} */ export const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder)) diff --git a/src/structs/GC.js b/src/structs/GC.js index 110f68b6..42d71f73 100644 --- a/src/structs/GC.js +++ b/src/structs/GC.js @@ -2,7 +2,7 @@ import { AbstractStruct, addStruct, - AbstractUpdateEncoder, StructStore, Transaction, ID // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' export const structGCRefNumber = 0 @@ -42,7 +42,7 @@ export class GC extends AbstractStruct { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { diff --git a/src/structs/Item.js b/src/structs/Item.js index adc64023..95ef3f6a 100644 --- a/src/structs/Item.js +++ b/src/structs/Item.js @@ -22,7 +22,7 @@ import { readContentFormat, readContentType, addChangedTypeToTransaction, - AbstractUpdateDecoder, AbstractUpdateEncoder, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -620,7 +620,7 @@ export class Item extends AbstractStruct { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. * @param {number} offset */ write (encoder, offset) { @@ -670,7 +670,7 @@ export class Item extends AbstractStruct { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @param {number} info */ export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder) @@ -678,7 +678,7 @@ export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS /** * A lookup map for reading Item content. * - * @type {Array} + * @type {Array} */ export const contentRefs = [ () => { error.unexpectedCase() }, // GC is not ItemContent @@ -771,7 +771,7 @@ export class AbstractContent { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { diff --git a/src/structs/Skip.js b/src/structs/Skip.js index 5b0f7b35..eaec5c06 100644 --- a/src/structs/Skip.js +++ b/src/structs/Skip.js @@ -1,7 +1,7 @@ import { AbstractStruct, - AbstractUpdateEncoder, StructStore, Transaction, ID // eslint-disable-line + UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -39,7 +39,7 @@ export class Skip extends AbstractStruct { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {number} offset */ write (encoder, offset) { diff --git a/src/types/AbstractType.js b/src/types/AbstractType.js index 3b6aa581..76a88a33 100644 --- a/src/types/AbstractType.js +++ b/src/types/AbstractType.js @@ -11,7 +11,7 @@ import { ContentAny, ContentBinary, getItemCleanStart, - ContentDoc, YText, YArray, AbstractUpdateEncoder, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line + ContentDoc, YText, YArray, UpdateEncoderV1, UpdateEncoderV2, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -324,7 +324,7 @@ export class AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { } diff --git a/src/types/YArray.js b/src/types/YArray.js index cadb16b5..cefc908b 100644 --- a/src/types/YArray.js +++ b/src/types/YArray.js @@ -15,7 +15,7 @@ import { YArrayRefID, callTypeObservers, transact, - ArraySearchMarker, AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, Transaction, Item // eslint-disable-line + ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line } from '../internals.js' import { typeListSlice } from './AbstractType.js' @@ -241,7 +241,7 @@ export class YArray extends AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YArrayRefID) @@ -249,7 +249,7 @@ export class YArray extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * * @private * @function diff --git a/src/types/YMap.js b/src/types/YMap.js index efaa16f5..2cb0342a 100644 --- a/src/types/YMap.js +++ b/src/types/YMap.js @@ -14,7 +14,7 @@ import { YMapRefID, callTypeObservers, transact, - AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, Transaction, Item // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line } from '../internals.js' import * as iterator from 'lib0/iterator.js' @@ -238,7 +238,7 @@ export class YMap extends AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YMapRefID) @@ -246,7 +246,7 @@ export class YMap extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * * @private * @function diff --git a/src/types/YText.js b/src/types/YText.js index a12d0747..8457ea4d 100644 --- a/src/types/YText.js +++ b/src/types/YText.js @@ -26,7 +26,7 @@ import { typeMapGet, typeMapGetAll, updateMarkerChanges, - ArraySearchMarker, AbstractUpdateDecoder, AbstractUpdateEncoder, ID, Doc, Item, Snapshot, Transaction // eslint-disable-line + ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ID, Doc, Item, Snapshot, Transaction // eslint-disable-line } from '../internals.js' import * as object from 'lib0/object.js' @@ -1203,7 +1203,7 @@ export class YText extends AbstractType { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YTextRefID) @@ -1211,7 +1211,7 @@ export class YText extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YText} * * @private diff --git a/src/types/YXmlElement.js b/src/types/YXmlElement.js index 3b64fa51..b3ce6ef9 100644 --- a/src/types/YXmlElement.js +++ b/src/types/YXmlElement.js @@ -8,7 +8,7 @@ import { typeMapGetAll, typeListForEach, YXmlElementRefID, - YXmlText, ContentType, AbstractType, AbstractUpdateDecoder, AbstractUpdateEncoder, Snapshot, Doc, Item // eslint-disable-line + YXmlText, ContentType, AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Snapshot, Doc, Item // eslint-disable-line } from '../internals.js' /** @@ -208,7 +208,7 @@ export class YXmlElement extends YXmlFragment { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. */ _write (encoder) { encoder.writeTypeRef(YXmlElementRefID) @@ -217,7 +217,7 @@ export class YXmlElement extends YXmlFragment { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlElement} * * @function diff --git a/src/types/YXmlFragment.js b/src/types/YXmlFragment.js index 21c6a3a0..1fd060b0 100644 --- a/src/types/YXmlFragment.js +++ b/src/types/YXmlFragment.js @@ -17,7 +17,7 @@ import { transact, typeListGet, typeListSlice, - AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' @@ -410,7 +410,7 @@ export class YXmlFragment extends AbstractType { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. */ _write (encoder) { encoder.writeTypeRef(YXmlFragmentRefID) @@ -418,7 +418,7 @@ export class YXmlFragment extends AbstractType { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlFragment} * * @private diff --git a/src/types/YXmlHook.js b/src/types/YXmlHook.js index e28f70a3..be8c759b 100644 --- a/src/types/YXmlHook.js +++ b/src/types/YXmlHook.js @@ -2,7 +2,7 @@ import { YMap, YXmlHookRefID, - AbstractUpdateDecoder, AbstractUpdateEncoder // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' /** @@ -76,7 +76,7 @@ export class YXmlHook extends YMap { * * This is called when this Item is sent to a remote peer. * - * @param {AbstractUpdateEncoder} encoder The encoder to write data to. + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to. */ _write (encoder) { encoder.writeTypeRef(YXmlHookRefID) @@ -85,7 +85,7 @@ export class YXmlHook extends YMap { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlHook} * * @private diff --git a/src/types/YXmlText.js b/src/types/YXmlText.js index dd8d892d..470ce70f 100644 --- a/src/types/YXmlText.js +++ b/src/types/YXmlText.js @@ -2,7 +2,7 @@ import { YText, YXmlTextRefID, - ContentType, YXmlElement, AbstractUpdateDecoder, AbstractUpdateEncoder // eslint-disable-line + ContentType, YXmlElement, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, // eslint-disable-line } from '../internals.js' /** @@ -104,7 +104,7 @@ export class YXmlText extends YText { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder */ _write (encoder) { encoder.writeTypeRef(YXmlTextRefID) @@ -112,7 +112,7 @@ export class YXmlText extends YText { } /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder * @return {YXmlText} * * @private diff --git a/src/utils/Snapshot.js b/src/utils/Snapshot.js index 102a9c35..50c64772 100644 --- a/src/utils/Snapshot.js +++ b/src/utils/Snapshot.js @@ -15,14 +15,13 @@ import { findIndexSS, UpdateEncoderV2, applyUpdateV2, - AbstractDSDecoder, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line + DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' import * as set from 'lib0/set.js' import * as decoding from 'lib0/decoding.js' import * as encoding from 'lib0/encoding.js' -import { DSEncoderV1 } from './UpdateEncoder.js' export class Snapshot { /** @@ -95,7 +94,7 @@ export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncod /** * @param {Uint8Array} buf - * @param {AbstractDSDecoder} [decoder] + * @param {DSDecoderV1 | DSDecoderV2} [decoder] * @return {Snapshot} */ export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => { diff --git a/src/utils/Transaction.js b/src/utils/Transaction.js index 929d061b..1273d26e 100644 --- a/src/utils/Transaction.js +++ b/src/utils/Transaction.js @@ -11,7 +11,7 @@ import { Item, generateNewClientId, createID, - AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV2, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line + UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line } from '../internals.js' import * as map from 'lib0/map.js' @@ -19,7 +19,6 @@ import * as math from 'lib0/math.js' import * as set from 'lib0/set.js' import * as logging from 'lib0/logging.js' import { callAll } from 'lib0/function.js' -import { UpdateEncoderV1 } from './UpdateEncoder.js' /** * A transaction is created for every change on the Yjs model. It is possible @@ -119,7 +118,7 @@ export class Transaction { } /** - * @param {AbstractUpdateEncoder} encoder + * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Transaction} transaction * @return {boolean} Whether data was written. */ diff --git a/src/utils/UpdateDecoder.js b/src/utils/UpdateDecoder.js index 9f50c7a7..74568650 100644 --- a/src/utils/UpdateDecoder.js +++ b/src/utils/UpdateDecoder.js @@ -1,132 +1,9 @@ import * as buffer from 'lib0/buffer.js' -import * as error from 'lib0/error.js' import * as decoding from 'lib0/decoding.js' import { ID, createID } from '../internals.js' -export class AbstractDSDecoder { - /** - * @param {decoding.Decoder} decoder - */ - constructor (decoder) { - /** - * @type {decoding.Decoder} - */ - this.restDecoder = decoder - error.methodUnimplemented() - } - - resetDsCurVal () { } - - /** - * @return {number} - */ - readDsClock () { - error.methodUnimplemented() - } - - /** - * @return {number} - */ - readDsLen () { - error.methodUnimplemented() - } -} - -export class AbstractUpdateDecoder extends AbstractDSDecoder { - /** - * @return {ID} - */ - readLeftID () { - error.methodUnimplemented() - } - - /** - * @return {ID} - */ - readRightID () { - error.methodUnimplemented() - } - - /** - * Read the next client id. - * Use this in favor of readID whenever possible to reduce the number of objects created. - * - * @return {number} - */ - readClient () { - error.methodUnimplemented() - } - - /** - * @return {number} info An unsigned 8-bit integer - */ - readInfo () { - error.methodUnimplemented() - } - - /** - * @return {string} - */ - readString () { - error.methodUnimplemented() - } - - /** - * @return {boolean} isKey - */ - readParentInfo () { - error.methodUnimplemented() - } - - /** - * @return {number} info An unsigned 8-bit integer - */ - readTypeRef () { - error.methodUnimplemented() - } - - /** - * Write len of a struct - well suited for Opt RLE encoder. - * - * @return {number} len - */ - readLen () { - error.methodUnimplemented() - } - - /** - * @return {any} - */ - readAny () { - error.methodUnimplemented() - } - - /** - * @return {Uint8Array} - */ - readBuf () { - error.methodUnimplemented() - } - - /** - * Legacy implementation uses JSON parse. We use any-decoding in v2. - * - * @return {any} - */ - readJSON () { - error.methodUnimplemented() - } - - /** - * @return {string} - */ - readKey () { - error.methodUnimplemented() - } -} - export class DSDecoderV1 { /** * @param {decoding.Decoder} decoder @@ -250,6 +127,9 @@ export class DSDecoderV2 { * @param {decoding.Decoder} decoder */ constructor (decoder) { + /** + * @private + */ this.dsCurrVal = 0 this.restDecoder = decoder } @@ -258,11 +138,17 @@ export class DSDecoderV2 { this.dsCurrVal = 0 } + /** + * @return {number} + */ readDsClock () { this.dsCurrVal += decoding.readVarUint(this.restDecoder) return this.dsCurrVal } + /** + * @return {number} + */ readDsLen () { const diff = decoding.readVarUint(this.restDecoder) + 1 this.dsCurrVal += diff diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 7a29e7d4..1318cd3d 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -33,7 +33,7 @@ import { DSEncoderV2, DSDecoderV1, DSEncoderV1, - AbstractDSDecoder, AbstractUpdateDecoder, Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line + Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line } from '../internals.js' import * as encoding from 'lib0/encoding.js' @@ -97,7 +97,7 @@ export const writeClientsStructs = (encoder, store, _sm) => { } /** - * @param {AbstractUpdateDecoder} decoder The decoder object to read data from. + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from. * @param {Map>} clientRefs * @param {Doc} doc * @return {Map>} @@ -370,7 +370,7 @@ const cleanupPendingStructs = pendingClientsStructRefs => { * * This is called when data is received from a remote peer. * - * @param {AbstractUpdateDecoder} decoder The decoder object to read data from. + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from. * @param {Transaction} transaction * @param {StructStore} store * @@ -405,7 +405,7 @@ export const readStructs = (decoder, transaction, store) => { * @param {decoding.Decoder} decoder * @param {Doc} ydoc * @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))` - * @param {AbstractUpdateDecoder} [structDecoder] + * @param {UpdateDecoderV1 | UpdateDecoderV2} [structDecoder] * * @function */ @@ -509,7 +509,7 @@ export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStat /** * Read state vector from Decoder and return as Map * - * @param {AbstractDSDecoder} decoder + * @param {DSDecoderV1 | DSDecoderV2} decoder * @return {Map} Maps `client` to the number next expected `clock` from that client. * * @function diff --git a/src/utils/updates.js b/src/utils/updates.js index 9794366f..dfdd0bf0 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -12,11 +12,11 @@ import { mergeDeleteSets, DSEncoderV1, DSEncoderV2, - Item, GC, AbstractUpdateDecoder, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line + Item, GC, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder */ function * lazyStructReaderGenerator (decoder) { const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) @@ -61,7 +61,7 @@ function * lazyStructReaderGenerator (decoder) { export class LazyStructReader { /** - * @param {AbstractUpdateDecoder} decoder + * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder */ constructor (decoder) { this.gen = lazyStructReaderGenerator(decoder) diff --git a/tests/updates.tests.js b/tests/updates.tests.js index dfa08420..20960d09 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -2,23 +2,34 @@ import * as t from 'lib0/testing.js' import { init, compare } from './testHelper.js' // eslint-disable-line import * as Y from '../src/index.js' -const useV2 = true +const encV1 = { + mergeUpdates: Y.mergeUpdates, + encodeStateAsUpdate: Y.encodeStateAsUpdate, + applyUpdate: Y.applyUpdate, + logUpdate: Y.logUpdate, + updateEventName: 'update' +} -const encodeStateAsUpdate = useV2 ? Y.encodeStateAsUpdateV2 : Y.encodeStateAsUpdate -const mergeUpdates = useV2 ? Y.mergeUpdatesV2 : Y.mergeUpdates -const applyUpdate = useV2 ? Y.applyUpdateV2 : Y.applyUpdate -const logUpdate = useV2 ? Y.logUpdateV2 : Y.logUpdate -const updateEventName = useV2 ? 'updateV2' : 'update' +const encV2 = { + mergeUpdates: Y.mergeUpdatesV2, + encodeStateAsUpdate: Y.encodeStateAsUpdateV2, + applyUpdate: Y.applyUpdateV2, + logUpdate: Y.logUpdateV2, + updateEventName: 'updateV2' +} + +const encoders = [encV1, encV2] /** * @param {Array} users + * @param {encV1 | encV2} enc */ -const fromUpdates = users => { +const fromUpdates = (users, enc) => { const updates = users.map(user => - encodeStateAsUpdate(user) + enc.encodeStateAsUpdate(user) ) const ydoc = new Y.Doc() - applyUpdate(ydoc, mergeUpdates(updates)) + enc.applyUpdate(ydoc, enc.mergeUpdates(updates)) return ydoc } @@ -31,53 +42,56 @@ export const testMergeUpdates = tc => { array0.insert(0, [1]) array1.insert(0, [2]) - const merged = fromUpdates(users) compare(users) - t.compareArrays(array0.toArray(), merged.getArray('array').toArray()) + encoders.forEach(enc => { + const merged = fromUpdates(users, enc) + t.compareArrays(array0.toArray(), merged.getArray('array').toArray()) + }) } /** * @param {Y.Doc} ydoc * @param {Array} updates - expecting at least 4 updates + * @param {encV1 | encV2} enc */ -const checkUpdateCases = (ydoc, updates) => { +const checkUpdateCases = (ydoc, updates, enc) => { const cases = [] // Case 1: Simple case, simply merge everything - cases.push(mergeUpdates(updates)) + cases.push(enc.mergeUpdates(updates)) // Case 2: Overlapping updates - cases.push(mergeUpdates([ - mergeUpdates(updates.slice(2)), - mergeUpdates(updates.slice(0, 2)) + cases.push(enc.mergeUpdates([ + enc.mergeUpdates(updates.slice(2)), + enc.mergeUpdates(updates.slice(0, 2)) ])) // Case 3: Overlapping updates - cases.push(mergeUpdates([ - mergeUpdates(updates.slice(2)), - mergeUpdates(updates.slice(1, 3)), + cases.push(enc.mergeUpdates([ + enc.mergeUpdates(updates.slice(2)), + enc.mergeUpdates(updates.slice(1, 3)), updates[0] ])) // Case 4: Separated updates (containing skips) - cases.push(mergeUpdates([ - mergeUpdates([updates[0], updates[2]]), - mergeUpdates([updates[1], updates[3]]), - mergeUpdates(updates.slice(4)) + cases.push(enc.mergeUpdates([ + enc.mergeUpdates([updates[0], updates[2]]), + enc.mergeUpdates([updates[1], updates[3]]), + enc.mergeUpdates(updates.slice(4)) ])) // Case 5: overlapping with many duplicates - cases.push(mergeUpdates(cases)) + cases.push(enc.mergeUpdates(cases)) - const targetState = encodeStateAsUpdate(ydoc) + const targetState = enc.encodeStateAsUpdate(ydoc) t.info('Target State: ') - logUpdate(targetState) + enc.logUpdate(targetState) cases.forEach((updates, i) => { t.info('State Case $' + i + ':') - logUpdate(updates) + enc.logUpdate(updates) const merged = new Y.Doc() - applyUpdate(merged, updates) + enc.applyUpdate(merged, updates) t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray()) }) } @@ -86,34 +100,40 @@ const checkUpdateCases = (ydoc, updates) => { * @param {t.TestCase} tc */ export const testMergeUpdates1 = tc => { - const ydoc = new Y.Doc({ gc: false }) - const updates = /** @type {Array} */ ([]) - ydoc.on(updateEventName, update => { updates.push(update) }) + encoders.forEach((enc, i) => { + t.info(`Using V${i + 1} encoder.`) + const ydoc = new Y.Doc({ gc: false }) + const updates = /** @type {Array} */ ([]) + ydoc.on(enc.updateEventName, update => { updates.push(update) }) - const array = ydoc.getArray() - array.insert(0, [1]) - array.insert(0, [2]) - array.insert(0, [3]) - array.insert(0, [4]) + const array = ydoc.getArray() + array.insert(0, [1]) + array.insert(0, [2]) + array.insert(0, [3]) + array.insert(0, [4]) - checkUpdateCases(ydoc, updates) + checkUpdateCases(ydoc, updates, enc) + }) } /** * @param {t.TestCase} tc */ export const testMergeUpdates2 = tc => { - const ydoc = new Y.Doc({ gc: false }) - const updates = /** @type {Array} */ ([]) - ydoc.on(updateEventName, update => { updates.push(update) }) + encoders.forEach((enc, i) => { + t.info(`Using V${i + 1} encoder.`) + const ydoc = new Y.Doc({ gc: false }) + const updates = /** @type {Array} */ ([]) + ydoc.on(enc.updateEventName, update => { updates.push(update) }) - const array = ydoc.getArray() - array.insert(0, [1, 2]) - array.delete(1, 1) - array.insert(0, [3, 4]) - array.delete(1, 2) + const array = ydoc.getArray() + array.insert(0, [1, 2]) + array.delete(1, 1) + array.insert(0, [3, 4]) + array.delete(1, 2) - checkUpdateCases(ydoc, updates) + checkUpdateCases(ydoc, updates, enc) + }) } /** From 8013b4ef5c9697ce4ce77f33e52ee2cffaa36259 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Tue, 29 Dec 2020 17:07:25 +0100 Subject: [PATCH 18/23] lint --- tests/testHelper.js | 40 ++++++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/tests/testHelper.js b/tests/testHelper.js index e7186268..d02b5ae2 100644 --- a/tests/testHelper.js +++ b/tests/testHelper.js @@ -27,31 +27,35 @@ const broadcastMessage = (y, m) => { } } -let useV2 = false +export let useV2 = false -export let encodeStateAsUpdate = Y.encodeStateAsUpdate -export let mergeUpdates = Y.mergeUpdates -export let applyUpdate = Y.applyUpdate -export let logUpdate = Y.logUpdate -export let updateEventName = 'update' - -const setEncoders = () => { - encodeStateAsUpdate = useV2 ? Y.encodeStateAsUpdateV2 : Y.encodeStateAsUpdate - mergeUpdates = useV2 ? Y.mergeUpdatesV2 : Y.mergeUpdates - applyUpdate = useV2 ? Y.applyUpdateV2 : Y.applyUpdate - logUpdate = useV2 ? Y.logUpdateV2 : Y.logUpdate - updateEventName = useV2 ? 'updateV2' : 'update' +export const encV1 = { + encodeStateAsUpdate: Y.encodeStateAsUpdate, + mergeUpdates: Y.mergeUpdates, + applyUpdate: Y.applyUpdate, + logUpdate: Y.logUpdate, + updateEventName: 'update' } +export const encV2 = { + encodeStateAsUpdate: Y.encodeStateAsUpdateV2, + mergeUpdates: Y.mergeUpdatesV2, + applyUpdate: Y.applyUpdateV2, + logUpdate: Y.logUpdateV2, + updateEventName: 'updateV2' +} + +export let enc = encV1 + const useV1Encoding = () => { useV2 = false - setEncoders() + enc = encV1 } const useV2Encoding = () => { - useV2 = false console.error('sync protocol doesnt support v2 protocol yet, fallback to v1 encoding') // @Todo - setEncoders() + useV2 = false + enc = encV1 } export class TestYInstance extends Y.Doc { @@ -78,7 +82,7 @@ export class TestYInstance extends Y.Doc { */ this.updates = [] // set up observe on local model - this.on(updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => { + this.on(enc.updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => { if (origin !== testConnector) { const encoder = encoding.createEncoder() syncProtocol.writeUpdate(encoder, update) @@ -323,7 +327,7 @@ export const compare = users => { // This ensures that mergeUpdates works correctly const mergedDocs = users.map(user => { const ydoc = new Y.Doc() - applyUpdate(ydoc, mergeUpdates(user.updates)) + enc.applyUpdate(ydoc, enc.mergeUpdates(user.updates)) return ydoc }) users.push(.../** @type {any} */(mergedDocs)) From 8fadec4dcdbf6ba60302bff6e67f892bd20e3184 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 30 Dec 2020 19:32:00 +0100 Subject: [PATCH 19/23] add test for merging via Y.Doc instance (should encode pending updates as well) --- tests/updates.tests.js | 50 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 43 insertions(+), 7 deletions(-) diff --git a/tests/updates.tests.js b/tests/updates.tests.js index 20960d09..d733823c 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -2,27 +2,63 @@ import * as t from 'lib0/testing.js' import { init, compare } from './testHelper.js' // eslint-disable-line import * as Y from '../src/index.js' +/** + * @typedef {Object} Enc + * @property {function(Array):Uint8Array} Enc.mergeUpdates + * @property {function(Y.Doc):Uint8Array} Enc.encodeStateAsUpdate + * @property {function(Y.Doc, Uint8Array):void} Enc.applyUpdate + * @property {function(Uint8Array):void} Enc.logUpdate + * @property {string} Enc.updateEventName + * @property {string} Enc.description + */ + +/** + * @type {Enc} + */ const encV1 = { mergeUpdates: Y.mergeUpdates, encodeStateAsUpdate: Y.encodeStateAsUpdate, applyUpdate: Y.applyUpdate, logUpdate: Y.logUpdate, - updateEventName: 'update' + updateEventName: 'update', + description: 'V1' } +/** + * @type {Enc} + */ const encV2 = { mergeUpdates: Y.mergeUpdatesV2, encodeStateAsUpdate: Y.encodeStateAsUpdateV2, applyUpdate: Y.applyUpdateV2, logUpdate: Y.logUpdateV2, - updateEventName: 'updateV2' + updateEventName: 'updateV2', + description: 'V2' } -const encoders = [encV1, encV2] +/** + * @type {Enc} + */ +const encDoc = { + mergeUpdates: (updates) => { + const ydoc = new Y.Doc() + updates.forEach(update => { + Y.applyUpdate(ydoc, update) + }) + return Y.encodeStateAsUpdate(ydoc) + }, + encodeStateAsUpdate: Y.encodeStateAsUpdate, + applyUpdate: Y.applyUpdate, + logUpdate: Y.logUpdate, + updateEventName: 'update', + description: 'Merge via Y.Doc' +} + +const encoders = [encV1, encV2, encDoc] /** * @param {Array} users - * @param {encV1 | encV2} enc + * @param {Enc} enc */ const fromUpdates = (users, enc) => { const updates = users.map(user => @@ -52,7 +88,7 @@ export const testMergeUpdates = tc => { /** * @param {Y.Doc} ydoc * @param {Array} updates - expecting at least 4 updates - * @param {encV1 | encV2} enc + * @param {Enc} enc */ const checkUpdateCases = (ydoc, updates, enc) => { const cases = [] @@ -101,7 +137,7 @@ const checkUpdateCases = (ydoc, updates, enc) => { */ export const testMergeUpdates1 = tc => { encoders.forEach((enc, i) => { - t.info(`Using V${i + 1} encoder.`) + t.info(`Using encoder: ${enc.description}`) const ydoc = new Y.Doc({ gc: false }) const updates = /** @type {Array} */ ([]) ydoc.on(enc.updateEventName, update => { updates.push(update) }) @@ -121,7 +157,7 @@ export const testMergeUpdates1 = tc => { */ export const testMergeUpdates2 = tc => { encoders.forEach((enc, i) => { - t.info(`Using V${i + 1} encoder.`) + t.info(`Using encoder: ${enc.description}`) const ydoc = new Y.Doc({ gc: false }) const updates = /** @type {Array} */ ([]) ydoc.on(enc.updateEventName, update => { updates.push(update) }) From dbd1b3cb59f1feac0a19e071e12d0a26976af2f0 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Wed, 30 Dec 2020 20:21:14 +0100 Subject: [PATCH 20/23] add tests for meta decoding of updates and state vector comparison of update and ydoc approach --- src/index.js | 6 +++++- src/utils/updates.js | 45 ++++++++++++++++++++++++++++++++++++++++++ tests/updates.tests.js | 20 +++++++++++++++++++ 3 files changed, 70 insertions(+), 1 deletion(-) diff --git a/src/index.js b/src/index.js index e4ad99e1..5a2893f2 100644 --- a/src/index.js +++ b/src/index.js @@ -76,5 +76,9 @@ export { AbstractConnector, logType, mergeUpdates, - mergeUpdatesV2 + mergeUpdatesV2, + parseUpdateMeta, + parseUpdateMetaV2, + encodeStateVectorFromUpdate, + encodeStateVectorFromUpdateV2 } from './internals.js' diff --git a/src/utils/updates.js b/src/utils/updates.js index dfdd0bf0..7cb4a2fb 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -183,6 +183,51 @@ export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YD */ export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV2) +/** + * @param {Uint8Array} update + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder + * @return {{ from: Map, to: Map }} + */ +export const parseUpdateMetaV2 = (update, YDecoder = UpdateDecoderV2) => { + /** + * @type {Map} + */ + const from = new Map() + /** + * @type {Map} + */ + const to = new Map() + const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update))) + let curr = updateDecoder.curr + if (curr !== null) { + let currClient = curr.id.client + let currClock = curr.id.clock + // write the beginning to `from` + from.set(currClient, currClock) + for (; curr !== null; curr = updateDecoder.next()) { + if (currClient !== curr.id.client) { + // We found a new client + // write the end to `to` + to.set(currClient, currClock) + // write the beginning to `from` + from.set(curr.id.client, curr.id.clock) + // update currClient + currClient = curr.id.client + } + currClock = curr.id.clock + curr.length + } + // write the end to `to` + to.set(currClient, currClock) + } + return { from, to } +} + +/** + * @param {Uint8Array} update + * @return {{ from: Map, to: Map }} + */ +export const parseUpdateMeta = update => parseUpdateMetaV2(update, UpdateDecoderV1) + /** * This method is intended to slice any kind of struct and retrieve the right part. * It does not handle side-effects, so it should only be used by the lazy-encoder. diff --git a/tests/updates.tests.js b/tests/updates.tests.js index d733823c..06d708c0 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -8,6 +8,9 @@ import * as Y from '../src/index.js' * @property {function(Y.Doc):Uint8Array} Enc.encodeStateAsUpdate * @property {function(Y.Doc, Uint8Array):void} Enc.applyUpdate * @property {function(Uint8Array):void} Enc.logUpdate + * @property {function(Uint8Array):{from:Map,to:Map}} Enc.parseUpdateMeta + * @property {function(Y.Doc):Uint8Array} Enc.encodeStateVector + * @property {function(Uint8Array):Uint8Array} Enc.encodeStateVectorFromUpdate * @property {string} Enc.updateEventName * @property {string} Enc.description */ @@ -20,6 +23,9 @@ const encV1 = { encodeStateAsUpdate: Y.encodeStateAsUpdate, applyUpdate: Y.applyUpdate, logUpdate: Y.logUpdate, + parseUpdateMeta: Y.parseUpdateMeta, + encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdate, + encodeStateVector: Y.encodeStateVector, updateEventName: 'update', description: 'V1' } @@ -32,6 +38,9 @@ const encV2 = { encodeStateAsUpdate: Y.encodeStateAsUpdateV2, applyUpdate: Y.applyUpdateV2, logUpdate: Y.logUpdateV2, + parseUpdateMeta: Y.parseUpdateMetaV2, + encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, + encodeStateVector: Y.encodeStateVectorV2, updateEventName: 'updateV2', description: 'V2' } @@ -50,6 +59,9 @@ const encDoc = { encodeStateAsUpdate: Y.encodeStateAsUpdate, applyUpdate: Y.applyUpdate, logUpdate: Y.logUpdate, + parseUpdateMeta: Y.parseUpdateMetaV2, + encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, + encodeStateVector: Y.encodeStateVector, updateEventName: 'update', description: 'Merge via Y.Doc' } @@ -129,6 +141,14 @@ const checkUpdateCases = (ydoc, updates, enc) => { const merged = new Y.Doc() enc.applyUpdate(merged, updates) t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray()) + t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(updates)) + const meta = enc.parseUpdateMeta(updates) + meta.from.forEach((clock, client) => t.assert(clock === 0)) + meta.to.forEach((clock, client) => { + const structs = /** @type {Array} */ (merged.store.clients.get(client)) + const lastStruct = structs[structs.length - 1] + t.assert(lastStruct.id.clock + lastStruct.length === clock) + }) }) } From 304812fb07680f7c4c3265561bf57d7fade1cac2 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Sun, 17 Jan 2021 15:22:36 +0100 Subject: [PATCH 21/23] concept for improved implementation of pending updates --- src/index.js | 4 +- src/utils/StructStore.js | 19 ++---- src/utils/encoding.js | 129 +++++++++++++++++++-------------------- 3 files changed, 69 insertions(+), 83 deletions(-) diff --git a/src/index.js b/src/index.js index 5a2893f2..ec2fe7f9 100644 --- a/src/index.js +++ b/src/index.js @@ -80,5 +80,7 @@ export { parseUpdateMeta, parseUpdateMetaV2, encodeStateVectorFromUpdate, - encodeStateVectorFromUpdateV2 + encodeStateVectorFromUpdateV2, + encodeRelativePosition, + decodeRelativePosition } from './internals.js' diff --git a/src/utils/StructStore.js b/src/utils/StructStore.js index f2c7fd8a..4a773127 100644 --- a/src/utils/StructStore.js +++ b/src/utils/StructStore.js @@ -15,24 +15,13 @@ export class StructStore { */ this.clients = new Map() /** - * Store incompleted struct reads here - * `i` denotes to the next read operation - * We could shift the array of refs instead, but shift is incredible - * slow in Chrome for arrays with more than 100k elements - * @see tryResumePendingStructRefs - * @type {Map}>} + * @type {null | { missing: Map, update: Uint8Array }} */ - this.pendingClientsStructRefs = new Map() + this.pendingStructs = null /** - * Stack of pending structs waiting for struct dependencies - * Maximum length of stack is structReaders.size - * @type {Array} + * @type {null | Uint8Array} */ - this.pendingStack = [] - /** - * @type {Array} - */ - this.pendingDeleteReaders = [] + this.pendingDs = null } } diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 1318cd3d..3d43e579 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -33,6 +33,7 @@ import { DSEncoderV2, DSDecoderV1, DSEncoderV1, + mergeUpdates, Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line } from '../internals.js' @@ -40,6 +41,7 @@ import * as encoding from 'lib0/encoding.js' import * as decoding from 'lib0/decoding.js' import * as binary from 'lib0/binary.js' import * as map from 'lib0/map.js' +import { mergeUpdatesV2 } from './updates.js' /** * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder @@ -98,14 +100,17 @@ export const writeClientsStructs = (encoder, store, _sm) => { /** * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from. - * @param {Map>} clientRefs * @param {Doc} doc - * @return {Map>} + * @return {Map>} * * @private * @function */ -export const readClientsStructRefs = (decoder, clientRefs, doc) => { +export const readClientsStructRefs = (decoder, doc) => { + /** + * @type {Map>} + */ + const clientRefs = map.create() const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) for (let i = 0; i < numOfStateUpdates; i++) { const numberOfStructs = decoding.readVarUint(decoder.restDecoder) @@ -199,17 +204,18 @@ export const readClientsStructRefs = (decoder, clientRefs, doc) => { * * @param {Transaction} transaction * @param {StructStore} store + * @param {Map { +const integrateStructs = (transaction, store, clientsStructRefs) => { const stack = store.pendingStack // @todo don't forget to append stackhead at the end - const clientsStructRefs = store.pendingClientsStructRefs // sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user. const clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b) if (clientsStructRefsIds.length === 0) { - return + return null } const getNextStructTarget = () => { let nextStructsTarget = /** @type {{i:number,refs:Array}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1])) @@ -301,21 +307,6 @@ const resumeStructIntegration = (transaction, store) => { store.pendingClientsStructRefs.clear() } -/** - * @param {Transaction} transaction - * @param {StructStore} store - * - * @private - * @function - */ -export const tryResumePendingDeleteReaders = (transaction, store) => { - const pendingReaders = store.pendingDeleteReaders - store.pendingDeleteReaders = [] - for (let i = 0; i < pendingReaders.length; i++) { - readAndApplyDeleteSet(pendingReaders[i], transaction, store) - } -} - /** * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder * @param {Transaction} transaction @@ -325,46 +316,6 @@ export const tryResumePendingDeleteReaders = (transaction, store) => { */ export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState) -/** - * @param {StructStore} store - * @param {Map>} clientsStructsRefs - * - * @private - * @function - */ -const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => { - const pendingClientsStructRefs = store.pendingClientsStructRefs - clientsStructsRefs.forEach((structRefs, client) => { - const pendingStructRefs = pendingClientsStructRefs.get(client) - if (pendingStructRefs === undefined) { - pendingClientsStructRefs.set(client, { refs: structRefs, i: 0 }) - } else { - // merge into existing structRefs - const merged = pendingStructRefs.i > 0 ? pendingStructRefs.refs.slice(pendingStructRefs.i) : pendingStructRefs.refs - for (let i = 0; i < structRefs.length; i++) { - merged.push(structRefs[i]) - } - pendingStructRefs.i = 0 - pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock) - } - }) -} - -/** - * @param {Map,i:number}>} pendingClientsStructRefs - */ -const cleanupPendingStructs = pendingClientsStructRefs => { - // cleanup pendingClientsStructs if not fully finished - pendingClientsStructRefs.forEach((refs, client) => { - if (refs.i === refs.refs.length) { - pendingClientsStructRefs.delete(client) - } else { - refs.refs.splice(0, refs.i) - refs.i = 0 - } - }) -} - /** * Read the next Item in a Decoder and fill this Item with the read data. * @@ -378,23 +329,67 @@ const cleanupPendingStructs = pendingClientsStructRefs => { * @function */ export const readStructs = (decoder, transaction, store) => { - const clientsStructRefs = new Map() + let retry = false + const doc = transaction.doc // let start = performance.now() - readClientsStructRefs(decoder, clientsStructRefs, transaction.doc) + const ss = readClientsStructRefs(decoder, doc) // console.log('time to read structs: ', performance.now() - start) // @todo remove // start = performance.now() - mergeReadStructsIntoPendingReads(store, clientsStructRefs) + // console.log('time to merge: ', performance.now() - start) // @todo remove // start = performance.now() - resumeStructIntegration(transaction, store) + const { missing, ssRest } = integrateStructs(transaction, store, ss) + const pending = store.pendingStructs + if (pending) { + // check if we can apply something + for (const [clock, client] of pending.missing) { + if (clock < getState(store, client)) { + retry = true + break + } + } + } + if (missing) { + if (pending) { + pending.missing.set(missing.client, missing.clock) + // @todo support v2 as well + pending.update = mergeUpdates([pending.update, ssRest]) + } else { + store.pendingStructs = { missing: map.create(), update: ssRest } + store.pendingStructs.missing.set(missing.client, missing.clock) + } + } + + // console.log('time to integrate: ', performance.now() - start) // @todo remove // start = performance.now() - cleanupPendingStructs(store.pendingClientsStructRefs) + const dsRest = readAndApplyDeleteSet(decoder, transaction, store) + if (store.pendingDs) { + // @todo we could make a lower-bound state-vector check as we do above + const dsRest2 = readAndApplyDeleteSet(store.pendingDs, transaction, store) + if (dsRest1 && dsRest2) { + // case 1: ds1 != null && ds2 != null + store.pendingDs = mergeUpdatesV2([dsRest, dsRest2]) + } else { + // case 2: ds1 != null + // case 3: ds2 != null + // case 4: ds1 == null && ds2 == null + store.pendingDs = dsRest || dsRest2 + } + } else { + // Either dsRest == null && pendingDs == null OR dsRest != null + store.pendingDs = dsRest + } // console.log('time to cleanup: ', performance.now() - start) // @todo remove // start = performance.now() - tryResumePendingDeleteReaders(transaction, store) + // console.log('time to resume delete readers: ', performance.now() - start) // @todo remove // start = performance.now() + if (retry) { + const update = store.pendingStructs.update + store.pendingStructs = null + applyUpdate(update) + } } /** From 7edbb2485fb239c6169b1b98add437060ad94479 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Thu, 28 Jan 2021 20:28:30 +0100 Subject: [PATCH 22/23] complete refactor of update mechanism to allow encoding of pending updates - #263 --- src/utils/DeleteSet.js | 11 +- src/utils/encoding.js | 452 +++++++++++++++++++++++------------------ src/utils/updates.js | 16 +- tests/testHelper.js | 5 +- tests/updates.tests.js | 24 +-- 5 files changed, 284 insertions(+), 224 deletions(-) diff --git a/src/utils/DeleteSet.js b/src/utils/DeleteSet.js index 2c50f781..d7251faa 100644 --- a/src/utils/DeleteSet.js +++ b/src/utils/DeleteSet.js @@ -4,6 +4,7 @@ import { getState, splitItem, iterateStructs, + UpdateEncoderV2, DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' @@ -263,6 +264,7 @@ export const readDeleteSet = decoder => { * @param {DSDecoderV1 | DSDecoderV2} decoder * @param {Transaction} transaction * @param {StructStore} store + * @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully. * * @private * @function @@ -315,9 +317,10 @@ export const readAndApplyDeleteSet = (decoder, transaction, store) => { } } if (unappliedDS.clients.size > 0) { - // TODO: no need for encoding+decoding ds anymore - const unappliedDSEncoder = new DSEncoderV2() - writeDeleteSet(unappliedDSEncoder, unappliedDS) - store.pendingDeleteReaders.push(new DSDecoderV2(decoding.createDecoder((unappliedDSEncoder.toUint8Array())))) + const ds = new UpdateEncoderV2() + encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs + writeDeleteSet(ds, unappliedDS) + return ds.toUint8Array() } + return null } diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 3d43e579..2f0b3b40 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -33,15 +33,17 @@ import { DSEncoderV2, DSDecoderV1, DSEncoderV1, - mergeUpdates, - Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line + mergeUpdatesV2, + Skip, + diffUpdate, + Doc, Transaction, GC, Item, StructStore // eslint-disable-line } from '../internals.js' import * as encoding from 'lib0/encoding.js' import * as decoding from 'lib0/decoding.js' import * as binary from 'lib0/binary.js' import * as map from 'lib0/map.js' -import { mergeUpdatesV2 } from './updates.js' +import * as math from 'lib0/math.js' /** * @param {UpdateEncoderV1 | UpdateEncoderV2} encoder @@ -53,6 +55,7 @@ import { mergeUpdatesV2 } from './updates.js' */ const writeStructs = (encoder, structs, client, clock) => { // write first id + clock = math.max(clock, structs[0].id.clock) // make sure the first id exists const startNewStructs = findIndexSS(structs, clock) // write # encoded structs encoding.writeVarUint(encoder.restEncoder, structs.length - startNewStructs) @@ -101,14 +104,14 @@ export const writeClientsStructs = (encoder, store, _sm) => { /** * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from. * @param {Doc} doc - * @return {Map>} + * @return {Map }>} * * @private * @function */ export const readClientsStructRefs = (decoder, doc) => { /** - * @type {Map>} + * @type {Map }>} */ const clientRefs = map.create() const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder) @@ -121,61 +124,72 @@ export const readClientsStructRefs = (decoder, doc) => { const client = decoder.readClient() let clock = decoding.readVarUint(decoder.restDecoder) // const start = performance.now() - clientRefs.set(client, refs) + clientRefs.set(client, { i: 0, refs }) for (let i = 0; i < numberOfStructs; i++) { const info = decoder.readInfo() - if ((binary.BITS5 & info) !== 0) { - /** - * The optimized implementation doesn't use any variables because inlining variables is faster. - * Below a non-optimized version is shown that implements the basic algorithm with - * a few comments - */ - const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 - // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` - // and we read the next string as parentYKey. - // It indicates how we store/retrieve parent from `y.share` - // @type {string|null} - const struct = new Item( - createID(client, clock), - null, // leftd - (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin - null, // right - (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin - cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent - cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub - readItemContent(decoder, info) // item content - ) - /* A non-optimized implementation of the above algorithm: + switch (binary.BITS5 & info) { + case 0: { // GC + const len = decoder.readLen() + refs[i] = new GC(createID(client, clock), len) + clock += len + break + } + case 10: { // Skip Struct (nothing to apply) + // @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing. + const len = decoder.readLen() + refs[i] = new Skip(createID(client, clock), len) + clock += len + break + } + default: { // Item with content + /** + * The optimized implementation doesn't use any variables because inlining variables is faster. + * Below a non-optimized version is shown that implements the basic algorithm with + * a few comments + */ + const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 + // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` + // and we read the next string as parentYKey. + // It indicates how we store/retrieve parent from `y.share` + // @type {string|null} + const struct = new Item( + createID(client, clock), + null, // leftd + (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin + null, // right + (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin + cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent + cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub + readItemContent(decoder, info) // item content + ) + /* A non-optimized implementation of the above algorithm: - // The item that was originally to the left of this item. - const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null - // The item that was originally to the right of this item. - const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null - const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 - const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false - // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` - // and we read the next string as parentYKey. - // It indicates how we store/retrieve parent from `y.share` - // @type {string|null} - const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null + // The item that was originally to the left of this item. + const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null + // The item that was originally to the right of this item. + const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null + const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0 + const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false + // If parent = null and neither left nor right are defined, then we know that `parent` is child of `y` + // and we read the next string as parentYKey. + // It indicates how we store/retrieve parent from `y.share` + // @type {string|null} + const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null - const struct = new Item( - createID(client, clock), - null, // leftd - origin, // origin - null, // right - rightOrigin, // right origin - cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent - cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub - readItemContent(decoder, info) // item content - ) - */ - refs[i] = struct - clock += struct.length - } else { - const len = decoder.readLen() - refs[i] = new GC(createID(client, clock), len) - clock += len + const struct = new Item( + createID(client, clock), + null, // leftd + origin, // origin + null, // right + rightOrigin, // right origin + cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent + cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub + readItemContent(decoder, info) // item content + ) + */ + refs[i] = struct + clock += struct.length + } } } // console.log('time to read: ', performance.now() - start) // @todo remove @@ -204,27 +218,32 @@ export const readClientsStructRefs = (decoder, doc) => { * * @param {Transaction} transaction * @param {StructStore} store - * @param {Map} clientsStructRefs + * @return { null | { update: Uint8Array, missing: Map } } * * @private * @function */ const integrateStructs = (transaction, store, clientsStructRefs) => { - const stack = store.pendingStack // @todo don't forget to append stackhead at the end + /** + * @type {Array} + */ + const stack = [] // sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user. - const clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b) + let clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b) if (clientsStructRefsIds.length === 0) { return null } const getNextStructTarget = () => { + if (clientsStructRefsIds.length === 0) { + return null + } let nextStructsTarget = /** @type {{i:number,refs:Array}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1])) while (nextStructsTarget.refs.length === nextStructsTarget.i) { clientsStructRefsIds.pop() if (clientsStructRefsIds.length > 0) { nextStructsTarget = /** @type {{i:number,refs:Array}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1])) } else { - store.pendingClientsStructRefs.clear() return null } } @@ -232,79 +251,111 @@ const integrateStructs = (transaction, store, clientsStructRefs) => { } let curStructsTarget = getNextStructTarget() if (curStructsTarget === null && stack.length === 0) { - return + return null + } + + /** + * @type {StructStore} + */ + const restStructs = new StructStore() + const missingSV = new Map() + /** + * @param {number} client + * @param {number} clock + */ + const updateMissingSv = (client, clock) => { + const mclock = missingSV.get(client) + if (mclock == null || mclock > clock) { + missingSV.set(client, clock) + } } /** * @type {GC|Item} */ - let stackHead = stack.length > 0 - ? /** @type {GC|Item} */ (stack.pop()) - : /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++] + let stackHead = /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++] // caching the state because it is used very often const state = new Map() + + const addStackToRestSS = () => { + for (const item of stack) { + const client = item.id.client + const unapplicableItems = clientsStructRefs.get(client) + if (unapplicableItems) { + // decrement because we weren't able to apply previous operation + unapplicableItems.i-- + restStructs.clients.set(client, unapplicableItems.refs.slice(unapplicableItems.i)) + clientsStructRefs.delete(client) + unapplicableItems.i = 0 + unapplicableItems.refs = [] + } else { + // item was the last item on clientsStructRefs and the field was already cleared. Add item to restStructs and continue + restStructs.clients.set(client, [item]) + } + // remove client from clientsStructRefsIds to prevent users from applying the same update again + clientsStructRefsIds = clientsStructRefsIds.filter(c => c !== client) + } + stack.length = 0 + } + // iterate over all struct readers until we are done while (true) { - const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client)) - const offset = stackHead.id.clock < localClock ? localClock - stackHead.id.clock : 0 - if (stackHead.id.clock + offset !== localClock) { - // A previous message from this client is missing - // check if there is a pending structRef with a smaller clock and switch them - /** - * @type {{ refs: Array, i: number }} - */ - const structRefs = clientsStructRefs.get(stackHead.id.client) || { refs: [], i: 0 } - if (structRefs.refs.length !== structRefs.i) { - const r = structRefs.refs[structRefs.i] - if (r.id.clock < stackHead.id.clock) { - // put ref with smaller clock on stack instead and continue - structRefs.refs[structRefs.i] = stackHead - stackHead = r - // sort the set because this approach might bring the list out of order - structRefs.refs = structRefs.refs.slice(structRefs.i).sort((r1, r2) => r1.id.clock - r2.id.clock) - structRefs.i = 0 - continue - } - } - // wait until missing struct is available - stack.push(stackHead) - return - } - const missing = stackHead.getMissing(transaction, store) - if (missing === null) { - if (offset === 0 || offset < stackHead.length) { - stackHead.integrate(transaction, offset) - state.set(stackHead.id.client, stackHead.id.clock + stackHead.length) - } - // iterate to next stackHead - if (stack.length > 0) { - stackHead = /** @type {GC|Item} */ (stack.pop()) - } else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) { - stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) - } else { - curStructsTarget = getNextStructTarget() - if (curStructsTarget === null) { - // we are done! - break - } else { - stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) - } - } - } else { - // get the struct reader that has the missing struct - /** - * @type {{ refs: Array, i: number }} - */ - const structRefs = clientsStructRefs.get(missing) || { refs: [], i: 0 } - if (structRefs.refs.length === structRefs.i) { - // This update message causally depends on another update message. + if (stackHead.constructor !== Skip) { + const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client)) + const offset = localClock - stackHead.id.clock + if (offset < 0) { + // update from the same client is missing stack.push(stackHead) - return + updateMissingSv(stackHead.id.client, stackHead.id.clock - 1) + // hid a dead wall, add all items from stack to restSS + addStackToRestSS() + } else { + const missing = stackHead.getMissing(transaction, store) + if (missing !== null) { + stack.push(stackHead) + // get the struct reader that has the missing struct + /** + * @type {{ refs: Array, i: number }} + */ + const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 } + if (structRefs.refs.length === structRefs.i) { + // This update message causally depends on another update message that doesn't exist yet + updateMissingSv(/** @type {number} */ (missing), getState(store, missing)) + addStackToRestSS() + } else { + stackHead = structRefs.refs[structRefs.i++] + continue + } + } else if (offset === 0 || offset < stackHead.length) { + // all fine, apply the stackhead + stackHead.integrate(transaction, offset) + state.set(stackHead.id.client, stackHead.id.clock + stackHead.length) + } + } + } + // iterate to next stackHead + if (stack.length > 0) { + stackHead = /** @type {GC|Item} */ (stack.pop()) + } else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) { + stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) + } else { + curStructsTarget = getNextStructTarget() + if (curStructsTarget === null) { + // we are done! + break + } else { + stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++]) } - stack.push(stackHead) - stackHead = structRefs.refs[structRefs.i++] } } - store.pendingClientsStructRefs.clear() + if (restStructs.clients.size > 0) { + const encoder = new UpdateEncoderV2() + writeClientsStructs(encoder, restStructs, new Map()) + // write empty deleteset + // writeDeleteSet(encoder, new DeleteSet()) + encoding.writeVarUint(encoder.restEncoder, 0) // => no need for an extra function call, just write 0 deletes + return { missing: missingSV, update: encoder.toUint8Array() } + } + return null } /** @@ -316,82 +367,6 @@ const integrateStructs = (transaction, store, clientsStructRefs) => { */ export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState) -/** - * Read the next Item in a Decoder and fill this Item with the read data. - * - * This is called when data is received from a remote peer. - * - * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from. - * @param {Transaction} transaction - * @param {StructStore} store - * - * @private - * @function - */ -export const readStructs = (decoder, transaction, store) => { - let retry = false - const doc = transaction.doc - // let start = performance.now() - const ss = readClientsStructRefs(decoder, doc) - // console.log('time to read structs: ', performance.now() - start) // @todo remove - // start = performance.now() - - // console.log('time to merge: ', performance.now() - start) // @todo remove - // start = performance.now() - const { missing, ssRest } = integrateStructs(transaction, store, ss) - const pending = store.pendingStructs - if (pending) { - // check if we can apply something - for (const [clock, client] of pending.missing) { - if (clock < getState(store, client)) { - retry = true - break - } - } - } - if (missing) { - if (pending) { - pending.missing.set(missing.client, missing.clock) - // @todo support v2 as well - pending.update = mergeUpdates([pending.update, ssRest]) - } else { - store.pendingStructs = { missing: map.create(), update: ssRest } - store.pendingStructs.missing.set(missing.client, missing.clock) - } - } - - - // console.log('time to integrate: ', performance.now() - start) // @todo remove - // start = performance.now() - const dsRest = readAndApplyDeleteSet(decoder, transaction, store) - if (store.pendingDs) { - // @todo we could make a lower-bound state-vector check as we do above - const dsRest2 = readAndApplyDeleteSet(store.pendingDs, transaction, store) - if (dsRest1 && dsRest2) { - // case 1: ds1 != null && ds2 != null - store.pendingDs = mergeUpdatesV2([dsRest, dsRest2]) - } else { - // case 2: ds1 != null - // case 3: ds2 != null - // case 4: ds1 == null && ds2 == null - store.pendingDs = dsRest || dsRest2 - } - } else { - // Either dsRest == null && pendingDs == null OR dsRest != null - store.pendingDs = dsRest - } - // console.log('time to cleanup: ', performance.now() - start) // @todo remove - // start = performance.now() - - // console.log('time to resume delete readers: ', performance.now() - start) // @todo remove - // start = performance.now() - if (retry) { - const update = store.pendingStructs.update - store.pendingStructs = null - applyUpdate(update) - } -} - /** * Read and apply a document update. * @@ -406,8 +381,69 @@ export const readStructs = (decoder, transaction, store) => { */ export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) => transact(ydoc, transaction => { - readStructs(structDecoder, transaction, ydoc.store) - readAndApplyDeleteSet(structDecoder, transaction, ydoc.store) + let retry = false + const doc = transaction.doc + const store = doc.store + // let start = performance.now() + const ss = readClientsStructRefs(structDecoder, doc) + // console.log('time to read structs: ', performance.now() - start) // @todo remove + // start = performance.now() + // console.log('time to merge: ', performance.now() - start) // @todo remove + // start = performance.now() + const restStructs = integrateStructs(transaction, store, ss) + const pending = store.pendingStructs + if (pending) { + // check if we can apply something + for (const [client, clock] of pending.missing) { + if (clock < getState(store, client)) { + retry = true + break + } + } + if (restStructs) { + // merge restStructs into store.pending + for (const [client, clock] of restStructs.missing) { + const mclock = pending.missing.get(client) + if (mclock == null || mclock > clock) { + pending.missing.set(client, clock) + } + } + pending.update = mergeUpdatesV2([pending.update, restStructs.update]) + } + } else { + store.pendingStructs = restStructs + } + // console.log('time to integrate: ', performance.now() - start) // @todo remove + // start = performance.now() + const dsRest = readAndApplyDeleteSet(structDecoder, transaction, store) + if (store.pendingDs) { + // @todo we could make a lower-bound state-vector check as we do above + const pendingDSUpdate = new UpdateDecoderV2(decoding.createDecoder(store.pendingDs)) + decoding.readVarUint(pendingDSUpdate.restDecoder) // read 0 structs, because we only encode deletes in pendingdsupdate + const dsRest2 = readAndApplyDeleteSet(pendingDSUpdate, transaction, store) + if (dsRest && dsRest2) { + // case 1: ds1 != null && ds2 != null + store.pendingDs = mergeUpdatesV2([dsRest, dsRest2]) + } else { + // case 2: ds1 != null + // case 3: ds2 != null + // case 4: ds1 == null && ds2 == null + store.pendingDs = dsRest || dsRest2 + } + } else { + // Either dsRest == null && pendingDs == null OR dsRest != null + store.pendingDs = dsRest + } + // console.log('time to cleanup: ', performance.now() - start) // @todo remove + // start = performance.now() + + // console.log('time to resume delete readers: ', performance.now() - start) // @todo remove + // start = performance.now() + if (retry) { + const update = /** @type {{update: Uint8Array}} */ (store.pendingStructs).update + store.pendingStructs = null + applyUpdateV2(transaction.doc, update) + } }, transactionOrigin, false) /** @@ -484,7 +520,21 @@ export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = new UpdateEncoderV2()) => { const targetStateVector = encodedTargetStateVector == null ? new Map() : decodeStateVector(encodedTargetStateVector) writeStateAsUpdate(encoder, doc, targetStateVector) - return encoder.toUint8Array() + const updates = [encoder.toUint8Array()] + // also add the pending updates (if there are any) + // @todo support diffirent encoders + if (encoder.constructor === UpdateEncoderV2) { + if (doc.store.pendingDs) { + updates.push(doc.store.pendingDs) + } + if (doc.store.pendingStructs) { + updates.push(diffUpdate(doc.store.pendingStructs.update, encodedTargetStateVector)) + } + if (updates.length > 1) { + return mergeUpdatesV2(updates) + } + } + return updates[0] } /** diff --git a/src/utils/updates.js b/src/utils/updates.js index 7cb4a2fb..74acdb10 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -151,6 +151,7 @@ export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YD let size = 1 let currClient = curr.id.client let currClock = curr.id.clock + let stopCounting = false for (; curr !== null; curr = updateDecoder.next()) { if (currClient !== curr.id.client) { size++ @@ -159,8 +160,14 @@ export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YD encoding.writeVarUint(encoder.restEncoder, currClient) encoding.writeVarUint(encoder.restEncoder, currClock) currClient = curr.id.client + stopCounting = false + } + if (curr.constructor === Skip) { + stopCounting = true + } + if (!stopCounting) { + currClock = curr.id.clock + curr.length } - currClock = curr.id.clock + curr.length } // write what we have encoding.writeVarUint(encoder.restEncoder, currClient) @@ -181,7 +188,7 @@ export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YD * @param {Uint8Array} update * @return {Uint8Array} */ -export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV2) +export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV1) /** * @param {Uint8Array} update @@ -388,9 +395,10 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U /** * @param {Uint8Array} update - * @param {Uint8Array} sv + * @param {Uint8Array} [sv] */ -export const diffUpdate = (update, sv) => { +export const diffUpdate = (update, sv = new Uint8Array([0])) => { + // @todo!!! return update } diff --git a/tests/testHelper.js b/tests/testHelper.js index d02b5ae2..a438a31e 100644 --- a/tests/testHelper.js +++ b/tests/testHelper.js @@ -336,9 +336,8 @@ export const compare = users => { const userXmlValues = users.map(u => u.get('xml', Y.YXmlElement).toString()) const userTextValues = users.map(u => u.getText('text').toDelta()) for (const u of users) { - t.assert(u.store.pendingDeleteReaders.length === 0) - t.assert(u.store.pendingStack.length === 0) - t.assert(u.store.pendingClientsStructRefs.size === 0) + t.assert(u.store.pendingDs === null) + t.assert(u.store.pendingStructs === null) } // Test Array iterator t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array'))) diff --git a/tests/updates.tests.js b/tests/updates.tests.js index 06d708c0..0fd831ef 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -52,17 +52,17 @@ const encDoc = { mergeUpdates: (updates) => { const ydoc = new Y.Doc() updates.forEach(update => { - Y.applyUpdate(ydoc, update) + Y.applyUpdateV2(ydoc, update) }) - return Y.encodeStateAsUpdate(ydoc) + return Y.encodeStateAsUpdateV2(ydoc) }, - encodeStateAsUpdate: Y.encodeStateAsUpdate, - applyUpdate: Y.applyUpdate, - logUpdate: Y.logUpdate, + encodeStateAsUpdate: Y.encodeStateAsUpdateV2, + applyUpdate: Y.applyUpdateV2, + logUpdate: Y.logUpdateV2, parseUpdateMeta: Y.parseUpdateMetaV2, encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, - encodeStateVector: Y.encodeStateVector, - updateEventName: 'update', + encodeStateVector: Y.encodeStateVectorV2, + updateEventName: 'updateV2', description: 'Merge via Y.Doc' } @@ -131,13 +131,13 @@ const checkUpdateCases = (ydoc, updates, enc) => { // Case 5: overlapping with many duplicates cases.push(enc.mergeUpdates(cases)) - const targetState = enc.encodeStateAsUpdate(ydoc) - t.info('Target State: ') - enc.logUpdate(targetState) + // const targetState = enc.encodeStateAsUpdate(ydoc) + // t.info('Target State: ') + // enc.logUpdate(targetState) cases.forEach((updates, i) => { - t.info('State Case $' + i + ':') - enc.logUpdate(updates) + // t.info('State Case $' + i + ':') + // enc.logUpdate(updates) const merged = new Y.Doc() enc.applyUpdate(merged, updates) t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray()) From 275d52b19d5dd665937fa5b337cb246fdf996cd0 Mon Sep 17 00:00:00 2001 From: Kevin Jahns Date: Fri, 29 Jan 2021 18:18:09 +0100 Subject: [PATCH 23/23] implement diffUpdates with tests - #263 --- src/index.js | 6 ++-- src/structs/Skip.js | 4 ++- src/utils/encoding.js | 25 ++++++++------ src/utils/updates.js | 71 ++++++++++++++++++++++++++++++-------- tests/testHelper.js | 6 ++-- tests/updates.tests.js | 77 ++++++++++++++++++++++++++++++++++-------- 6 files changed, 144 insertions(+), 45 deletions(-) diff --git a/src/index.js b/src/index.js index ec2fe7f9..53eb066d 100644 --- a/src/index.js +++ b/src/index.js @@ -57,14 +57,12 @@ export { encodeStateAsUpdate, encodeStateAsUpdateV2, encodeStateVector, - encodeStateVectorV2, UndoManager, decodeSnapshot, encodeSnapshot, decodeSnapshotV2, encodeSnapshotV2, decodeStateVector, - decodeStateVectorV2, logUpdate, logUpdateV2, isDeleted, @@ -82,5 +80,7 @@ export { encodeStateVectorFromUpdate, encodeStateVectorFromUpdateV2, encodeRelativePosition, - decodeRelativePosition + decodeRelativePosition, + diffUpdate, + diffUpdateV2 } from './internals.js' diff --git a/src/structs/Skip.js b/src/structs/Skip.js index eaec5c06..c763f853 100644 --- a/src/structs/Skip.js +++ b/src/structs/Skip.js @@ -4,6 +4,7 @@ import { UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line } from '../internals.js' import * as error from 'lib0/error.js' +import * as encoding from 'lib0/encoding.js' export const structSkipRefNumber = 10 @@ -44,7 +45,8 @@ export class Skip extends AbstractStruct { */ write (encoder, offset) { encoder.writeInfo(structSkipRefNumber) - encoder.writeLen(this.length - offset) + // write as VarUint because Skips can't make use of predictable length-encoding + encoding.writeVarUint(encoder.restEncoder, this.length - offset) } /** diff --git a/src/utils/encoding.js b/src/utils/encoding.js index 2f0b3b40..c81bdab8 100644 --- a/src/utils/encoding.js +++ b/src/utils/encoding.js @@ -29,14 +29,13 @@ import { UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, - DSDecoderV2, DSEncoderV2, DSDecoderV1, DSEncoderV1, mergeUpdatesV2, Skip, - diffUpdate, - Doc, Transaction, GC, Item, StructStore // eslint-disable-line + diffUpdateV2, + DSDecoderV2, Doc, Transaction, GC, Item, StructStore // eslint-disable-line } from '../internals.js' import * as encoding from 'lib0/encoding.js' @@ -136,7 +135,7 @@ export const readClientsStructRefs = (decoder, doc) => { } case 10: { // Skip Struct (nothing to apply) // @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing. - const len = decoder.readLen() + const len = decoding.readVarUint(decoder.restDecoder) refs[i] = new Skip(createID(client, clock), len) clock += len break @@ -517,8 +516,8 @@ export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) * * @function */ -export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = new UpdateEncoderV2()) => { - const targetStateVector = encodedTargetStateVector == null ? new Map() : decodeStateVector(encodedTargetStateVector) +export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector = new Uint8Array([0]), encoder = new UpdateEncoderV2()) => { + const targetStateVector = decodeStateVector(encodedTargetStateVector) writeStateAsUpdate(encoder, doc, targetStateVector) const updates = [encoder.toUint8Array()] // also add the pending updates (if there are any) @@ -528,7 +527,7 @@ export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = n updates.push(doc.store.pendingDs) } if (doc.store.pendingStructs) { - updates.push(diffUpdate(doc.store.pendingStructs.update, encodedTargetStateVector)) + updates.push(diffUpdateV2(doc.store.pendingStructs.update, encodedTargetStateVector)) } if (updates.length > 1) { return mergeUpdatesV2(updates) @@ -578,7 +577,7 @@ export const readStateVector = decoder => { * * @function */ -export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState))) +// export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState))) /** * Read decodedState and return State as Map. @@ -615,21 +614,25 @@ export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encod /** * Encode State as Uint8Array. * - * @param {Doc} doc + * @param {Doc|Map} doc * @param {DSEncoderV1 | DSEncoderV2} [encoder] * @return {Uint8Array} * * @function */ export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => { - writeDocumentStateVector(encoder, doc) + if (doc instanceof Map) { + writeStateVector(encoder, doc) + } else { + writeDocumentStateVector(encoder, doc) + } return encoder.toUint8Array() } /** * Encode State as Uint8Array. * - * @param {Doc} doc + * @param {Doc|Map} doc * @return {Uint8Array} * * @function diff --git a/src/utils/updates.js b/src/utils/updates.js index 74acdb10..4bac25bc 100644 --- a/src/utils/updates.js +++ b/src/utils/updates.js @@ -3,6 +3,7 @@ import * as binary from 'lib0/binary.js' import * as decoding from 'lib0/decoding.js' import * as encoding from 'lib0/encoding.js' import * as logging from 'lib0/logging.js' +import * as math from 'lib0/math.js' import { createID, readItemContent, @@ -12,6 +13,7 @@ import { mergeDeleteSets, DSEncoderV1, DSEncoderV2, + decodeStateVector, Item, GC, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line } from '../internals.js' @@ -28,7 +30,7 @@ function * lazyStructReaderGenerator (decoder) { const info = decoder.readInfo() // @todo use switch instead of ifs if (info === 10) { - const len = decoder.readLen() + const len = decoding.readVarUint(decoder.restDecoder) yield new Skip(createID(client, clock), len) clock += len } else if ((binary.BITS5 & info) !== 0) { @@ -62,25 +64,27 @@ function * lazyStructReaderGenerator (decoder) { export class LazyStructReader { /** * @param {UpdateDecoderV1 | UpdateDecoderV2} decoder + * @param {boolean} filterSkips */ - constructor (decoder) { + constructor (decoder, filterSkips) { this.gen = lazyStructReaderGenerator(decoder) /** - * @type {null | Item | GC} + * @type {null | Item | Skip | GC} */ this.curr = null this.done = false + this.filterSkips = filterSkips this.next() } /** - * @return {Item | GC | null} + * @return {Item | GC | Skip |null} */ next () { // ignore "Skip" structs do { this.curr = this.gen.next().value || null - } while (this.curr !== null && this.curr.constructor === Skip) + } while (this.filterSkips && this.curr !== null && this.curr.constructor === Skip) return this.curr } } @@ -99,7 +103,7 @@ export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1) export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => { const structs = [] const updateDecoder = new YDecoder(decoding.createDecoder(update)) - const lazyDecoder = new LazyStructReader(updateDecoder) + const lazyDecoder = new LazyStructReader(updateDecoder, false) for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) { structs.push(curr) } @@ -145,7 +149,7 @@ export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, */ export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YDecoder = UpdateDecoderV2) => { const encoder = new YEncoder() - const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update))) + const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), true) let curr = updateDecoder.curr if (curr !== null) { let size = 1 @@ -204,7 +208,7 @@ export const parseUpdateMetaV2 = (update, YDecoder = UpdateDecoderV2) => { * @type {Map} */ const to = new Map() - const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update))) + const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false) let curr = updateDecoder.curr if (curr !== null) { let currClient = curr.id.client @@ -277,7 +281,7 @@ const sliceStruct = (left, diff) => { */ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => { const updateDecoders = updates.map(update => new YDecoder(decoding.createDecoder(update))) - let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder)) + let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder, true)) /** * @todo we don't need offset because we always slice before @@ -395,13 +399,52 @@ export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = U /** * @param {Uint8Array} update - * @param {Uint8Array} [sv] + * @param {Uint8Array} sv + * @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder] + * @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder] */ -export const diffUpdate = (update, sv = new Uint8Array([0])) => { - // @todo!!! - return update +export const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => { + const state = decodeStateVector(sv) + const encoder = new YEncoder() + const lazyStructWriter = new LazyStructWriter(encoder) + const decoder = new YDecoder(decoding.createDecoder(update)) + const reader = new LazyStructReader(decoder, false) + while (reader.curr) { + const curr = reader.curr + const currClient = curr.id.client + const svClock = state.get(currClient) || 0 + if (reader.curr.constructor === Skip) { + // the first written struct shouldn't be a skip + reader.next() + continue + } + if (curr.id.clock + curr.length > svClock) { + writeStructToLazyStructWriter(lazyStructWriter, curr, math.max(svClock - curr.id.clock, 0)) + reader.next() + while (reader.curr && reader.curr.id.client === currClient) { + writeStructToLazyStructWriter(lazyStructWriter, reader.curr, 0) + reader.next() + } + } else { + // read until something new comes up + while (reader.curr && reader.curr.id.client === currClient && reader.curr.id.clock + reader.curr.length <= svClock) { + reader.next() + } + } + } + finishLazyStructWriting(lazyStructWriter) + // write ds + const ds = readDeleteSet(decoder) + writeDeleteSet(encoder, ds) + return encoder.toUint8Array() } +/** + * @param {Uint8Array} update + * @param {Uint8Array} sv + */ +export const diffUpdate = (update, sv) => diffUpdateV2(update, sv, UpdateDecoderV1, UpdateEncoderV1) + /** * @param {LazyStructWriter} lazyWriter */ @@ -428,7 +471,7 @@ const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => { // write next client lazyWriter.encoder.writeClient(struct.id.client) // write startClock - encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock) + encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock + offset) } struct.write(lazyWriter.encoder, offset) lazyWriter.written++ diff --git a/tests/testHelper.js b/tests/testHelper.js index a438a31e..5ebb7836 100644 --- a/tests/testHelper.js +++ b/tests/testHelper.js @@ -34,7 +34,8 @@ export const encV1 = { mergeUpdates: Y.mergeUpdates, applyUpdate: Y.applyUpdate, logUpdate: Y.logUpdate, - updateEventName: 'update' + updateEventName: 'update', + diffUpdate: Y.diffUpdate } export const encV2 = { @@ -42,7 +43,8 @@ export const encV2 = { mergeUpdates: Y.mergeUpdatesV2, applyUpdate: Y.applyUpdateV2, logUpdate: Y.logUpdateV2, - updateEventName: 'updateV2' + updateEventName: 'updateV2', + diffUpdate: Y.diffUpdateV2 } export let enc = encV1 diff --git a/tests/updates.tests.js b/tests/updates.tests.js index 0fd831ef..9c006642 100644 --- a/tests/updates.tests.js +++ b/tests/updates.tests.js @@ -1,6 +1,9 @@ import * as t from 'lib0/testing.js' import { init, compare } from './testHelper.js' // eslint-disable-line import * as Y from '../src/index.js' +import { readClientsStructRefs, readDeleteSet, UpdateDecoderV2, UpdateEncoderV2, writeDeleteSet } from '../src/internals.js' +import * as encoding from 'lib0/encoding.js' +import * as decoding from 'lib0/decoding.js' /** * @typedef {Object} Enc @@ -13,6 +16,7 @@ import * as Y from '../src/index.js' * @property {function(Uint8Array):Uint8Array} Enc.encodeStateVectorFromUpdate * @property {string} Enc.updateEventName * @property {string} Enc.description + * @property {function(Uint8Array, Uint8Array):Uint8Array} Enc.diffUpdate */ /** @@ -27,7 +31,8 @@ const encV1 = { encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdate, encodeStateVector: Y.encodeStateVector, updateEventName: 'update', - description: 'V1' + description: 'V1', + diffUpdate: Y.diffUpdate } /** @@ -40,9 +45,10 @@ const encV2 = { logUpdate: Y.logUpdateV2, parseUpdateMeta: Y.parseUpdateMetaV2, encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, - encodeStateVector: Y.encodeStateVectorV2, + encodeStateVector: Y.encodeStateVector, updateEventName: 'updateV2', - description: 'V2' + description: 'V2', + diffUpdate: Y.diffUpdateV2 } /** @@ -50,7 +56,7 @@ const encV2 = { */ const encDoc = { mergeUpdates: (updates) => { - const ydoc = new Y.Doc() + const ydoc = new Y.Doc({ gc: false }) updates.forEach(update => { Y.applyUpdateV2(ydoc, update) }) @@ -61,9 +67,18 @@ const encDoc = { logUpdate: Y.logUpdateV2, parseUpdateMeta: Y.parseUpdateMetaV2, encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2, - encodeStateVector: Y.encodeStateVectorV2, + encodeStateVector: Y.encodeStateVector, updateEventName: 'updateV2', - description: 'Merge via Y.Doc' + description: 'Merge via Y.Doc', + /** + * @param {Uint8Array} update + * @param {Uint8Array} sv + */ + diffUpdate: (update, sv) => { + const ydoc = new Y.Doc({ gc: false }) + Y.applyUpdateV2(ydoc, update) + return Y.encodeStateAsUpdateV2(ydoc, sv) + } } const encoders = [encV1, encV2, encDoc] @@ -101,8 +116,9 @@ export const testMergeUpdates = tc => { * @param {Y.Doc} ydoc * @param {Array} updates - expecting at least 4 updates * @param {Enc} enc + * @param {boolean} hasDeletes */ -const checkUpdateCases = (ydoc, updates, enc) => { +const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => { const cases = [] // Case 1: Simple case, simply merge everything @@ -135,14 +151,47 @@ const checkUpdateCases = (ydoc, updates, enc) => { // t.info('Target State: ') // enc.logUpdate(targetState) - cases.forEach((updates, i) => { + cases.forEach((mergedUpdates, i) => { // t.info('State Case $' + i + ':') // enc.logUpdate(updates) - const merged = new Y.Doc() - enc.applyUpdate(merged, updates) + const merged = new Y.Doc({ gc: false }) + enc.applyUpdate(merged, mergedUpdates) t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray()) - t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(updates)) - const meta = enc.parseUpdateMeta(updates) + t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(mergedUpdates)) + + if (enc.updateEventName !== 'update') { // @todo should this also work on legacy updates? + for (let j = 1; j < updates.length; j++) { + const partMerged = enc.mergeUpdates(updates.slice(j)) + const partMeta = enc.parseUpdateMeta(partMerged) + const targetSV = Y.encodeStateVectorFromUpdateV2(Y.mergeUpdatesV2(updates.slice(0, j))) + const diffed = enc.diffUpdate(mergedUpdates, targetSV) + const diffedMeta = enc.parseUpdateMeta(diffed) + const decDiffedSV = Y.decodeStateVector(enc.encodeStateVectorFromUpdate(diffed)) + t.compare(partMeta, diffedMeta) + t.compare(decDiffedSV, partMeta.to) + { + // We can'd do the following + // - t.compare(diffed, mergedDeletes) + // because diffed contains the set of all deletes. + // So we add all deletes from `diffed` to `partDeletes` and compare then + const decoder = decoding.createDecoder(diffed) + const updateDecoder = new UpdateDecoderV2(decoder) + readClientsStructRefs(updateDecoder, new Y.Doc()) + const ds = readDeleteSet(updateDecoder) + const updateEncoder = new UpdateEncoderV2() + encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs + writeDeleteSet(updateEncoder, ds) + const deletesUpdate = updateEncoder.toUint8Array() + const mergedDeletes = Y.mergeUpdatesV2([deletesUpdate, partMerged]) + if (!hasDeletes || enc !== encDoc) { + // deletes will almost definitely lead to different encoders because of the mergeStruct feature that is present in encDoc + t.compare(diffed, mergedDeletes) + } + } + } + } + + const meta = enc.parseUpdateMeta(mergedUpdates) meta.from.forEach((clock, client) => t.assert(clock === 0)) meta.to.forEach((clock, client) => { const structs = /** @type {Array} */ (merged.store.clients.get(client)) @@ -168,7 +217,7 @@ export const testMergeUpdates1 = tc => { array.insert(0, [3]) array.insert(0, [4]) - checkUpdateCases(ydoc, updates, enc) + checkUpdateCases(ydoc, updates, enc, false) }) } @@ -188,7 +237,7 @@ export const testMergeUpdates2 = tc => { array.insert(0, [3, 4]) array.delete(1, 2) - checkUpdateCases(ydoc, updates, enc) + checkUpdateCases(ydoc, updates, enc, true) }) }