Merge pull request #274 from yjs/differential-updates-263
Differential updates
This commit is contained in:
commit
fdf632f03e
18
src/index.js
18
src/index.js
@ -55,16 +55,14 @@ export {
|
|||||||
encodeStateAsUpdate,
|
encodeStateAsUpdate,
|
||||||
encodeStateAsUpdateV2,
|
encodeStateAsUpdateV2,
|
||||||
encodeStateVector,
|
encodeStateVector,
|
||||||
encodeStateVectorV2,
|
|
||||||
UndoManager,
|
UndoManager,
|
||||||
decodeSnapshot,
|
decodeSnapshot,
|
||||||
encodeSnapshot,
|
encodeSnapshot,
|
||||||
decodeSnapshotV2,
|
decodeSnapshotV2,
|
||||||
encodeSnapshotV2,
|
encodeSnapshotV2,
|
||||||
decodeStateVector,
|
decodeStateVector,
|
||||||
decodeStateVectorV2,
|
logUpdate,
|
||||||
encodeRelativePosition,
|
logUpdateV2,
|
||||||
decodeRelativePosition,
|
|
||||||
relativePositionToJSON,
|
relativePositionToJSON,
|
||||||
isDeleted,
|
isDeleted,
|
||||||
isParentOf,
|
isParentOf,
|
||||||
@ -73,5 +71,15 @@ export {
|
|||||||
tryGc,
|
tryGc,
|
||||||
transact,
|
transact,
|
||||||
AbstractConnector,
|
AbstractConnector,
|
||||||
logType
|
logType,
|
||||||
|
mergeUpdates,
|
||||||
|
mergeUpdatesV2,
|
||||||
|
parseUpdateMeta,
|
||||||
|
parseUpdateMetaV2,
|
||||||
|
encodeStateVectorFromUpdate,
|
||||||
|
encodeStateVectorFromUpdateV2,
|
||||||
|
encodeRelativePosition,
|
||||||
|
decodeRelativePosition,
|
||||||
|
diffUpdate,
|
||||||
|
diffUpdateV2
|
||||||
} from './internals.js'
|
} from './internals.js'
|
||||||
|
@ -15,6 +15,7 @@ export * from './utils/Snapshot.js'
|
|||||||
export * from './utils/StructStore.js'
|
export * from './utils/StructStore.js'
|
||||||
export * from './utils/Transaction.js'
|
export * from './utils/Transaction.js'
|
||||||
export * from './utils/UndoManager.js'
|
export * from './utils/UndoManager.js'
|
||||||
|
export * from './utils/updates.js'
|
||||||
export * from './utils/YEvent.js'
|
export * from './utils/YEvent.js'
|
||||||
|
|
||||||
export * from './types/AbstractType.js'
|
export * from './types/AbstractType.js'
|
||||||
@ -39,3 +40,4 @@ export * from './structs/ContentAny.js'
|
|||||||
export * from './structs/ContentString.js'
|
export * from './structs/ContentString.js'
|
||||||
export * from './structs/ContentType.js'
|
export * from './structs/ContentType.js'
|
||||||
export * from './structs/Item.js'
|
export * from './structs/Item.js'
|
||||||
|
export * from './structs/Skip.js'
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
AbstractUpdateEncoder, ID, Transaction // eslint-disable-line
|
UpdateEncoderV1, UpdateEncoderV2, ID, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -34,7 +34,7 @@ export class AbstractStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder The encoder to write data to.
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
* @param {number} encodingRef
|
* @param {number} encodingRef
|
||||||
*/
|
*/
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {
|
import {
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line
|
UpdateEncoderV1, UpdateEncoderV2, UpdateDecoderV1, UpdateDecoderV2, Transaction, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
export class ContentAny {
|
export class ContentAny {
|
||||||
@ -74,7 +74,7 @@ export class ContentAny {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -95,7 +95,7 @@ export class ContentAny {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentAny}
|
* @return {ContentAny}
|
||||||
*/
|
*/
|
||||||
export const readContentAny = decoder => {
|
export const readContentAny = decoder => {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {
|
import {
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -70,7 +70,7 @@ export class ContentBinary {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -86,7 +86,7 @@ export class ContentBinary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
||||||
* @return {ContentBinary}
|
* @return {ContentBinary}
|
||||||
*/
|
*/
|
||||||
export const readContentBinary = decoder => new ContentBinary(decoder.readBuf())
|
export const readContentBinary = decoder => new ContentBinary(decoder.readBuf())
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
addToDeleteSet,
|
addToDeleteSet,
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
export class ContentDeleted {
|
export class ContentDeleted {
|
||||||
@ -77,7 +77,7 @@ export class ContentDeleted {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -95,7 +95,7 @@ export class ContentDeleted {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
||||||
* @return {ContentDeleted}
|
* @return {ContentDeleted}
|
||||||
*/
|
*/
|
||||||
export const readContentDeleted = decoder => new ContentDeleted(decoder.readLen())
|
export const readContentDeleted = decoder => new ContentDeleted(decoder.readLen())
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
Doc, AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Transaction, Item // eslint-disable-line
|
Doc, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -110,7 +110,7 @@ export class ContentDoc {
|
|||||||
gc (store) { }
|
gc (store) { }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -129,7 +129,7 @@ export class ContentDoc {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentDoc}
|
* @return {ContentDoc}
|
||||||
*/
|
*/
|
||||||
export const readContentDoc = decoder => new ContentDoc(new Doc({ guid: decoder.readString(), ...decoder.readAny() }))
|
export const readContentDoc = decoder => new ContentDoc(new Doc({ guid: decoder.readString(), ...decoder.readAny() }))
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Item, Transaction // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -74,7 +74,7 @@ export class ContentEmbed {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -92,7 +92,7 @@ export class ContentEmbed {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentEmbed}
|
* @return {ContentEmbed}
|
||||||
*/
|
*/
|
||||||
export const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON())
|
export const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON())
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
AbstractType, AbstractUpdateDecoder, AbstractUpdateEncoder, Item, StructStore, Transaction // eslint-disable-line
|
AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Item, StructStore, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -80,7 +80,7 @@ export class ContentFormat {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -97,7 +97,7 @@ export class ContentFormat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentFormat}
|
* @return {ContentFormat}
|
||||||
*/
|
*/
|
||||||
export const readContentFormat = decoder => new ContentFormat(decoder.readString(), decoder.readJSON())
|
export const readContentFormat = decoder => new ContentFormat(decoder.readString(), decoder.readJSON())
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {
|
import {
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -77,7 +77,7 @@ export class ContentJSON {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -100,7 +100,7 @@ export class ContentJSON {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentJSON}
|
* @return {ContentJSON}
|
||||||
*/
|
*/
|
||||||
export const readContentJSON = decoder => {
|
export const readContentJSON = decoder => {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {
|
import {
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, Transaction, Item, StructStore // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -88,7 +88,7 @@ export class ContentString {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -106,7 +106,7 @@ export class ContentString {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentString}
|
* @return {ContentString}
|
||||||
*/
|
*/
|
||||||
export const readContentString = decoder => new ContentString(decoder.readString())
|
export const readContentString = decoder => new ContentString(decoder.readString())
|
||||||
|
@ -7,13 +7,13 @@ import {
|
|||||||
readYXmlFragment,
|
readYXmlFragment,
|
||||||
readYXmlHook,
|
readYXmlHook,
|
||||||
readYXmlText,
|
readYXmlText,
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {Array<function(AbstractUpdateDecoder):AbstractType<any>>}
|
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractType<any>>}
|
||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
export const typeRefs = [
|
export const typeRefs = [
|
||||||
@ -148,7 +148,7 @@ export class ContentType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -166,7 +166,7 @@ export class ContentType {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {ContentType}
|
* @return {ContentType}
|
||||||
*/
|
*/
|
||||||
export const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder))
|
export const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder))
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
import {
|
import {
|
||||||
AbstractStruct,
|
AbstractStruct,
|
||||||
addStruct,
|
addStruct,
|
||||||
AbstractUpdateEncoder, StructStore, Transaction, ID // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
export const structGCRefNumber = 0
|
export const structGCRefNumber = 0
|
||||||
@ -22,6 +22,9 @@ export class GC extends AbstractStruct {
|
|||||||
* @return {boolean}
|
* @return {boolean}
|
||||||
*/
|
*/
|
||||||
mergeWith (right) {
|
mergeWith (right) {
|
||||||
|
if (this.constructor !== right.constructor) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
this.length += right.length
|
this.length += right.length
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@ -39,7 +42,7 @@ export class GC extends AbstractStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
|
@ -22,7 +22,7 @@ import {
|
|||||||
readContentFormat,
|
readContentFormat,
|
||||||
readContentType,
|
readContentType,
|
||||||
addChangedTypeToTransaction,
|
addChangedTypeToTransaction,
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -554,6 +554,7 @@ export class Item extends AbstractStruct {
|
|||||||
*/
|
*/
|
||||||
mergeWith (right) {
|
mergeWith (right) {
|
||||||
if (
|
if (
|
||||||
|
this.constructor === right.constructor &&
|
||||||
compareIDs(right.origin, this.lastId) &&
|
compareIDs(right.origin, this.lastId) &&
|
||||||
this.right === right &&
|
this.right === right &&
|
||||||
compareIDs(this.rightOrigin, right.rightOrigin) &&
|
compareIDs(this.rightOrigin, right.rightOrigin) &&
|
||||||
@ -619,7 +620,7 @@ export class Item extends AbstractStruct {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateEncoder} encoder The encoder to write data to.
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@ -639,6 +640,7 @@ export class Item extends AbstractStruct {
|
|||||||
}
|
}
|
||||||
if (origin === null && rightOrigin === null) {
|
if (origin === null && rightOrigin === null) {
|
||||||
const parent = /** @type {AbstractType<any>} */ (this.parent)
|
const parent = /** @type {AbstractType<any>} */ (this.parent)
|
||||||
|
if (parent._item !== undefined) {
|
||||||
const parentItem = parent._item
|
const parentItem = parent._item
|
||||||
if (parentItem === null) {
|
if (parentItem === null) {
|
||||||
// parent type on y._map
|
// parent type on y._map
|
||||||
@ -650,6 +652,15 @@ export class Item extends AbstractStruct {
|
|||||||
encoder.writeParentInfo(false) // write parent id
|
encoder.writeParentInfo(false) // write parent id
|
||||||
encoder.writeLeftID(parentItem.id)
|
encoder.writeLeftID(parentItem.id)
|
||||||
}
|
}
|
||||||
|
} else if (parent.constructor === String) { // this edge case was added by differential updates
|
||||||
|
encoder.writeParentInfo(true) // write parentYKey
|
||||||
|
encoder.writeString(parent)
|
||||||
|
} else if (parent.constructor === ID) {
|
||||||
|
encoder.writeParentInfo(false) // write parent id
|
||||||
|
encoder.writeLeftID(parent)
|
||||||
|
} else {
|
||||||
|
error.unexpectedCase()
|
||||||
|
}
|
||||||
if (parentSub !== null) {
|
if (parentSub !== null) {
|
||||||
encoder.writeString(parentSub)
|
encoder.writeString(parentSub)
|
||||||
}
|
}
|
||||||
@ -659,7 +670,7 @@ export class Item extends AbstractStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @param {number} info
|
* @param {number} info
|
||||||
*/
|
*/
|
||||||
export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder)
|
export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder)
|
||||||
@ -667,10 +678,10 @@ export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS
|
|||||||
/**
|
/**
|
||||||
* A lookup map for reading Item content.
|
* A lookup map for reading Item content.
|
||||||
*
|
*
|
||||||
* @type {Array<function(AbstractUpdateDecoder):AbstractContent>}
|
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractContent>}
|
||||||
*/
|
*/
|
||||||
export const contentRefs = [
|
export const contentRefs = [
|
||||||
() => { throw error.unexpectedCase() }, // GC is not ItemContent
|
() => { error.unexpectedCase() }, // GC is not ItemContent
|
||||||
readContentDeleted, // 1
|
readContentDeleted, // 1
|
||||||
readContentJSON, // 2
|
readContentJSON, // 2
|
||||||
readContentBinary, // 3
|
readContentBinary, // 3
|
||||||
@ -679,7 +690,8 @@ export const contentRefs = [
|
|||||||
readContentFormat, // 6
|
readContentFormat, // 6
|
||||||
readContentType, // 7
|
readContentType, // 7
|
||||||
readContentAny, // 8
|
readContentAny, // 8
|
||||||
readContentDoc // 9
|
readContentDoc, // 9
|
||||||
|
() => { error.unexpectedCase() } // 10 - Skip is not ItemContent
|
||||||
]
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -759,7 +771,7 @@ export class AbstractContent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
|
60
src/structs/Skip.js
Normal file
60
src/structs/Skip.js
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
|
||||||
|
import {
|
||||||
|
AbstractStruct,
|
||||||
|
UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line
|
||||||
|
} from '../internals.js'
|
||||||
|
import * as error from 'lib0/error.js'
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
|
||||||
|
export const structSkipRefNumber = 10
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
export class Skip extends AbstractStruct {
|
||||||
|
get deleted () {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
delete () {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Skip} right
|
||||||
|
* @return {boolean}
|
||||||
|
*/
|
||||||
|
mergeWith (right) {
|
||||||
|
if (this.constructor !== right.constructor) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
this.length += right.length
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Transaction} transaction
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
integrate (transaction, offset) {
|
||||||
|
// skip structs cannot be integrated
|
||||||
|
error.unexpectedCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
write (encoder, offset) {
|
||||||
|
encoder.writeInfo(structSkipRefNumber)
|
||||||
|
// write as VarUint because Skips can't make use of predictable length-encoding
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, this.length - offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Transaction} transaction
|
||||||
|
* @param {StructStore} store
|
||||||
|
* @return {null | number}
|
||||||
|
*/
|
||||||
|
getMissing (transaction, store) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
@ -11,7 +11,7 @@ import {
|
|||||||
ContentAny,
|
ContentAny,
|
||||||
ContentBinary,
|
ContentBinary,
|
||||||
getItemCleanStart,
|
getItemCleanStart,
|
||||||
ContentDoc, YText, YArray, AbstractUpdateEncoder, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line
|
ContentDoc, YText, YArray, UpdateEncoderV1, UpdateEncoderV2, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as map from 'lib0/map.js'
|
import * as map from 'lib0/map.js'
|
||||||
@ -324,7 +324,7 @@ export class AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) { }
|
_write (encoder) { }
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ import {
|
|||||||
YArrayRefID,
|
YArrayRefID,
|
||||||
callTypeObservers,
|
callTypeObservers,
|
||||||
transact,
|
transact,
|
||||||
ArraySearchMarker, AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, Transaction, Item // eslint-disable-line
|
ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
import { typeListSlice } from './AbstractType.js'
|
import { typeListSlice } from './AbstractType.js'
|
||||||
|
|
||||||
@ -241,7 +241,7 @@ export class YArray extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YArrayRefID)
|
encoder.writeTypeRef(YArrayRefID)
|
||||||
@ -249,7 +249,7 @@ export class YArray extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
|
@ -14,7 +14,7 @@ import {
|
|||||||
YMapRefID,
|
YMapRefID,
|
||||||
callTypeObservers,
|
callTypeObservers,
|
||||||
transact,
|
transact,
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, Transaction, Item // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as iterator from 'lib0/iterator.js'
|
import * as iterator from 'lib0/iterator.js'
|
||||||
@ -238,7 +238,7 @@ export class YMap extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YMapRefID)
|
encoder.writeTypeRef(YMapRefID)
|
||||||
@ -246,7 +246,7 @@ export class YMap extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
|
@ -26,7 +26,7 @@ import {
|
|||||||
typeMapGet,
|
typeMapGet,
|
||||||
typeMapGetAll,
|
typeMapGetAll,
|
||||||
updateMarkerChanges,
|
updateMarkerChanges,
|
||||||
ArraySearchMarker, AbstractUpdateDecoder, AbstractUpdateEncoder, ID, Doc, Item, Snapshot, Transaction // eslint-disable-line
|
ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ID, Doc, Item, Snapshot, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as object from 'lib0/object.js'
|
import * as object from 'lib0/object.js'
|
||||||
@ -1205,7 +1205,7 @@ export class YText extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YTextRefID)
|
encoder.writeTypeRef(YTextRefID)
|
||||||
@ -1213,7 +1213,7 @@ export class YText extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {YText}
|
* @return {YText}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
|
@ -8,7 +8,7 @@ import {
|
|||||||
typeMapGetAll,
|
typeMapGetAll,
|
||||||
typeListForEach,
|
typeListForEach,
|
||||||
YXmlElementRefID,
|
YXmlElementRefID,
|
||||||
YXmlText, ContentType, AbstractType, AbstractUpdateDecoder, AbstractUpdateEncoder, Snapshot, Doc, Item // eslint-disable-line
|
YXmlText, ContentType, AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Snapshot, Doc, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -208,7 +208,7 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateEncoder} encoder The encoder to write data to.
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlElementRefID)
|
encoder.writeTypeRef(YXmlElementRefID)
|
||||||
@ -217,7 +217,7 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {YXmlElement}
|
* @return {YXmlElement}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
|
@ -17,7 +17,7 @@ import {
|
|||||||
transact,
|
transact,
|
||||||
typeListGet,
|
typeListGet,
|
||||||
typeListSlice,
|
typeListSlice,
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error.js'
|
import * as error from 'lib0/error.js'
|
||||||
@ -410,7 +410,7 @@ export class YXmlFragment extends AbstractType {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateEncoder} encoder The encoder to write data to.
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlFragmentRefID)
|
encoder.writeTypeRef(YXmlFragmentRefID)
|
||||||
@ -418,7 +418,7 @@ export class YXmlFragment extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {YXmlFragment}
|
* @return {YXmlFragment}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
import {
|
import {
|
||||||
YMap,
|
YMap,
|
||||||
YXmlHookRefID,
|
YXmlHookRefID,
|
||||||
AbstractUpdateDecoder, AbstractUpdateEncoder // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -76,7 +76,7 @@ export class YXmlHook extends YMap {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateEncoder} encoder The encoder to write data to.
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlHookRefID)
|
encoder.writeTypeRef(YXmlHookRefID)
|
||||||
@ -85,7 +85,7 @@ export class YXmlHook extends YMap {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {YXmlHook}
|
* @return {YXmlHook}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
import {
|
import {
|
||||||
YText,
|
YText,
|
||||||
YXmlTextRefID,
|
YXmlTextRefID,
|
||||||
ContentType, YXmlElement, AbstractUpdateDecoder, AbstractUpdateEncoder // eslint-disable-line
|
ContentType, YXmlElement, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -104,7 +104,7 @@ export class YXmlText extends YText {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlTextRefID)
|
encoder.writeTypeRef(YXmlTextRefID)
|
||||||
@ -112,7 +112,7 @@ export class YXmlText extends YText {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
* @return {YXmlText}
|
* @return {YXmlText}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
|
@ -4,7 +4,8 @@ import {
|
|||||||
getState,
|
getState,
|
||||||
splitItem,
|
splitItem,
|
||||||
iterateStructs,
|
iterateStructs,
|
||||||
AbstractUpdateDecoder, AbstractDSDecoder, AbstractDSEncoder, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line
|
UpdateEncoderV2,
|
||||||
|
DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as array from 'lib0/array.js'
|
import * as array from 'lib0/array.js'
|
||||||
@ -121,8 +122,8 @@ export const sortAndMergeDeleteSet = ds => {
|
|||||||
for (i = 1, j = 1; i < dels.length; i++) {
|
for (i = 1, j = 1; i < dels.length; i++) {
|
||||||
const left = dels[j - 1]
|
const left = dels[j - 1]
|
||||||
const right = dels[i]
|
const right = dels[i]
|
||||||
if (left.clock + left.len === right.clock) {
|
if (left.clock + left.len >= right.clock) {
|
||||||
left.len += right.len
|
left.len = math.max(left.len, right.clock + right.len - left.clock)
|
||||||
} else {
|
} else {
|
||||||
if (j < i) {
|
if (j < i) {
|
||||||
dels[j] = right
|
dels[j] = right
|
||||||
@ -210,7 +211,7 @@ export const createDeleteSetFromStructStore = ss => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractDSEncoder} encoder
|
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||||||
* @param {DeleteSet} ds
|
* @param {DeleteSet} ds
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
@ -232,7 +233,7 @@ export const writeDeleteSet = (encoder, ds) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractDSDecoder} decoder
|
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||||||
* @return {DeleteSet}
|
* @return {DeleteSet}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
@ -260,9 +261,10 @@ export const readDeleteSet = decoder => {
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractDSDecoder} decoder
|
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {StructStore} store
|
* @param {StructStore} store
|
||||||
|
* @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully.
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
@ -315,9 +317,10 @@ export const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (unappliedDS.clients.size > 0) {
|
if (unappliedDS.clients.size > 0) {
|
||||||
// TODO: no need for encoding+decoding ds anymore
|
const ds = new UpdateEncoderV2()
|
||||||
const unappliedDSEncoder = new DSEncoderV2()
|
encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs
|
||||||
writeDeleteSet(unappliedDSEncoder, unappliedDS)
|
writeDeleteSet(ds, unappliedDS)
|
||||||
store.pendingDeleteReaders.push(new DSDecoderV2(decoding.createDecoder((unappliedDSEncoder.toUint8Array()))))
|
return ds.toUint8Array()
|
||||||
}
|
}
|
||||||
|
return null
|
||||||
}
|
}
|
||||||
|
@ -14,9 +14,8 @@ import {
|
|||||||
getState,
|
getState,
|
||||||
findIndexSS,
|
findIndexSS,
|
||||||
UpdateEncoderV2,
|
UpdateEncoderV2,
|
||||||
DefaultDSEncoder,
|
|
||||||
applyUpdateV2,
|
applyUpdateV2,
|
||||||
AbstractDSDecoder, AbstractDSEncoder, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line
|
DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as map from 'lib0/map.js'
|
import * as map from 'lib0/map.js'
|
||||||
@ -78,7 +77,7 @@ export const equalSnapshots = (snap1, snap2) => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Snapshot} snapshot
|
* @param {Snapshot} snapshot
|
||||||
* @param {AbstractDSEncoder} [encoder]
|
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*/
|
*/
|
||||||
export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => {
|
export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => {
|
||||||
@ -91,11 +90,11 @@ export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => {
|
|||||||
* @param {Snapshot} snapshot
|
* @param {Snapshot} snapshot
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*/
|
*/
|
||||||
export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DefaultDSEncoder())
|
export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncoderV1())
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Uint8Array} buf
|
* @param {Uint8Array} buf
|
||||||
* @param {AbstractDSDecoder} [decoder]
|
* @param {DSDecoderV1 | DSDecoderV2} [decoder]
|
||||||
* @return {Snapshot}
|
* @return {Snapshot}
|
||||||
*/
|
*/
|
||||||
export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => {
|
export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => {
|
||||||
|
@ -15,24 +15,13 @@ export class StructStore {
|
|||||||
*/
|
*/
|
||||||
this.clients = new Map()
|
this.clients = new Map()
|
||||||
/**
|
/**
|
||||||
* Store incompleted struct reads here
|
* @type {null | { missing: Map<number, number>, update: Uint8Array }}
|
||||||
* `i` denotes to the next read operation
|
|
||||||
* We could shift the array of refs instead, but shift is incredible
|
|
||||||
* slow in Chrome for arrays with more than 100k elements
|
|
||||||
* @see tryResumePendingStructRefs
|
|
||||||
* @type {Map<number,{i:number,refs:Array<GC|Item>}>}
|
|
||||||
*/
|
*/
|
||||||
this.pendingClientsStructRefs = new Map()
|
this.pendingStructs = null
|
||||||
/**
|
/**
|
||||||
* Stack of pending structs waiting for struct dependencies
|
* @type {null | Uint8Array}
|
||||||
* Maximum length of stack is structReaders.size
|
|
||||||
* @type {Array<GC|Item>}
|
|
||||||
*/
|
*/
|
||||||
this.pendingStack = []
|
this.pendingDs = null
|
||||||
/**
|
|
||||||
* @type {Array<DSDecoderV2>}
|
|
||||||
*/
|
|
||||||
this.pendingDeleteReaders = []
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ import {
|
|||||||
Item,
|
Item,
|
||||||
generateNewClientId,
|
generateNewClientId,
|
||||||
createID,
|
createID,
|
||||||
AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV2, DefaultUpdateEncoder, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
|
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as map from 'lib0/map.js'
|
import * as map from 'lib0/map.js'
|
||||||
@ -118,7 +118,7 @@ export class Transaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @return {boolean} Whether data was written.
|
* @return {boolean} Whether data was written.
|
||||||
*/
|
*/
|
||||||
@ -337,7 +337,7 @@ const cleanupTransactions = (transactionCleanups, i) => {
|
|||||||
// @todo Merge all the transactions into one and provide send the data as a single update message
|
// @todo Merge all the transactions into one and provide send the data as a single update message
|
||||||
doc.emit('afterTransactionCleanup', [transaction, doc])
|
doc.emit('afterTransactionCleanup', [transaction, doc])
|
||||||
if (doc._observers.has('update')) {
|
if (doc._observers.has('update')) {
|
||||||
const encoder = new DefaultUpdateEncoder()
|
const encoder = new UpdateEncoderV1()
|
||||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
||||||
if (hasContent) {
|
if (hasContent) {
|
||||||
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
||||||
|
@ -1,129 +1,9 @@
|
|||||||
import * as buffer from 'lib0/buffer.js'
|
import * as buffer from 'lib0/buffer.js'
|
||||||
import * as error from 'lib0/error.js'
|
|
||||||
import * as decoding from 'lib0/decoding.js'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
import {
|
import {
|
||||||
ID, createID
|
ID, createID
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
export class AbstractDSDecoder {
|
|
||||||
/**
|
|
||||||
* @param {decoding.Decoder} decoder
|
|
||||||
*/
|
|
||||||
constructor (decoder) {
|
|
||||||
this.restDecoder = decoder
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
resetDsCurVal () { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readDsClock () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readDsLen () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class AbstractUpdateDecoder extends AbstractDSDecoder {
|
|
||||||
/**
|
|
||||||
* @return {ID}
|
|
||||||
*/
|
|
||||||
readLeftID () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {ID}
|
|
||||||
*/
|
|
||||||
readRightID () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read the next client id.
|
|
||||||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
|
||||||
*
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readClient () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
readInfo () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {string}
|
|
||||||
*/
|
|
||||||
readString () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {boolean} isKey
|
|
||||||
*/
|
|
||||||
readParentInfo () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
readTypeRef () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write len of a struct - well suited for Opt RLE encoder.
|
|
||||||
*
|
|
||||||
* @return {number} len
|
|
||||||
*/
|
|
||||||
readLen () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
readAny () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
readBuf () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Legacy implementation uses JSON parse. We use any-decoding in v2.
|
|
||||||
*
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
readJSON () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {string}
|
|
||||||
*/
|
|
||||||
readKey () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DSDecoderV1 {
|
export class DSDecoderV1 {
|
||||||
/**
|
/**
|
||||||
* @param {decoding.Decoder} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
@ -247,6 +127,9 @@ export class DSDecoderV2 {
|
|||||||
* @param {decoding.Decoder} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
*/
|
*/
|
||||||
constructor (decoder) {
|
constructor (decoder) {
|
||||||
|
/**
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
this.dsCurrVal = 0
|
this.dsCurrVal = 0
|
||||||
this.restDecoder = decoder
|
this.restDecoder = decoder
|
||||||
}
|
}
|
||||||
@ -255,11 +138,17 @@ export class DSDecoderV2 {
|
|||||||
this.dsCurrVal = 0
|
this.dsCurrVal = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return {number}
|
||||||
|
*/
|
||||||
readDsClock () {
|
readDsClock () {
|
||||||
this.dsCurrVal += decoding.readVarUint(this.restDecoder)
|
this.dsCurrVal += decoding.readVarUint(this.restDecoder)
|
||||||
return this.dsCurrVal
|
return this.dsCurrVal
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return {number}
|
||||||
|
*/
|
||||||
readDsLen () {
|
readDsLen () {
|
||||||
const diff = decoding.readVarUint(this.restDecoder) + 1
|
const diff = decoding.readVarUint(this.restDecoder) + 1
|
||||||
this.dsCurrVal += diff
|
this.dsCurrVal += diff
|
||||||
@ -280,7 +169,7 @@ export class UpdateDecoderV2 extends DSDecoderV2 {
|
|||||||
* @type {Array<string>}
|
* @type {Array<string>}
|
||||||
*/
|
*/
|
||||||
this.keys = []
|
this.keys = []
|
||||||
decoding.readUint8(decoder) // read feature flag - currently unused
|
decoding.readVarUint(decoder) // read feature flag - currently unused
|
||||||
this.keyClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
this.keyClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||||
this.clientDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
this.clientDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||||
this.leftClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
this.leftClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||||
|
@ -6,110 +6,9 @@ import {
|
|||||||
ID // eslint-disable-line
|
ID // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
export class AbstractDSEncoder {
|
|
||||||
constructor () {
|
|
||||||
this.restEncoder = encoding.createEncoder()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
toUint8Array () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Resets the ds value to 0.
|
|
||||||
* The v2 encoder uses this information to reset the initial diff value.
|
|
||||||
*/
|
|
||||||
resetDsCurVal () { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} clock
|
|
||||||
*/
|
|
||||||
writeDsClock (clock) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} len
|
|
||||||
*/
|
|
||||||
writeDsLen (len) { }
|
|
||||||
}
|
|
||||||
|
|
||||||
export class AbstractUpdateEncoder extends AbstractDSEncoder {
|
|
||||||
/**
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
toUint8Array () {
|
|
||||||
error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
writeLeftID (id) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
writeRightID (id) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Use writeClient and writeClock instead of writeID if possible.
|
|
||||||
* @param {number} client
|
|
||||||
*/
|
|
||||||
writeClient (client) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
writeInfo (info) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} s
|
|
||||||
*/
|
|
||||||
writeString (s) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {boolean} isYKey
|
|
||||||
*/
|
|
||||||
writeParentInfo (isYKey) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
writeTypeRef (info) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write len of a struct - well suited for Opt RLE encoder.
|
|
||||||
*
|
|
||||||
* @param {number} len
|
|
||||||
*/
|
|
||||||
writeLen (len) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} any
|
|
||||||
*/
|
|
||||||
writeAny (any) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} buf
|
|
||||||
*/
|
|
||||||
writeBuf (buf) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} embed
|
|
||||||
*/
|
|
||||||
writeJSON (embed) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} key
|
|
||||||
*/
|
|
||||||
writeKey (key) { }
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DSEncoderV1 {
|
export class DSEncoderV1 {
|
||||||
constructor () {
|
constructor () {
|
||||||
this.restEncoder = new encoding.Encoder()
|
this.restEncoder = encoding.createEncoder()
|
||||||
}
|
}
|
||||||
|
|
||||||
toUint8Array () {
|
toUint8Array () {
|
||||||
@ -228,7 +127,7 @@ export class UpdateEncoderV1 extends DSEncoderV1 {
|
|||||||
|
|
||||||
export class DSEncoderV2 {
|
export class DSEncoderV2 {
|
||||||
constructor () {
|
constructor () {
|
||||||
this.restEncoder = new encoding.Encoder() // encodes all the rest / non-optimized
|
this.restEncoder = encoding.createEncoder() // encodes all the rest / non-optimized
|
||||||
this.dsCurrVal = 0
|
this.dsCurrVal = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -288,7 +187,7 @@ export class UpdateEncoderV2 extends DSEncoderV2 {
|
|||||||
|
|
||||||
toUint8Array () {
|
toUint8Array () {
|
||||||
const encoder = encoding.createEncoder()
|
const encoder = encoding.createEncoder()
|
||||||
encoding.writeUint8(encoder, 0) // this is a feature flag that we might use in the future
|
encoding.writeVarUint(encoder, 0) // this is a feature flag that we might use in the future
|
||||||
encoding.writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array())
|
encoding.writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array())
|
||||||
encoding.writeVarUint8Array(encoder, this.clientEncoder.toUint8Array())
|
encoding.writeVarUint8Array(encoder, this.clientEncoder.toUint8Array())
|
||||||
encoding.writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array())
|
encoding.writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array())
|
||||||
|
@ -29,39 +29,23 @@ import {
|
|||||||
UpdateDecoderV2,
|
UpdateDecoderV2,
|
||||||
UpdateEncoderV1,
|
UpdateEncoderV1,
|
||||||
UpdateEncoderV2,
|
UpdateEncoderV2,
|
||||||
DSDecoderV2,
|
|
||||||
DSEncoderV2,
|
DSEncoderV2,
|
||||||
DSDecoderV1,
|
DSDecoderV1,
|
||||||
DSEncoderV1,
|
DSEncoderV1,
|
||||||
AbstractDSEncoder, AbstractDSDecoder, AbstractUpdateEncoder, AbstractUpdateDecoder, AbstractContent, Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line
|
mergeUpdatesV2,
|
||||||
|
Skip,
|
||||||
|
diffUpdateV2,
|
||||||
|
DSDecoderV2, Doc, Transaction, GC, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as encoding from 'lib0/encoding.js'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as decoding from 'lib0/decoding.js'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
import * as binary from 'lib0/binary.js'
|
import * as binary from 'lib0/binary.js'
|
||||||
import * as map from 'lib0/map.js'
|
import * as map from 'lib0/map.js'
|
||||||
|
import * as math from 'lib0/math.js'
|
||||||
export let DefaultDSEncoder = DSEncoderV1
|
|
||||||
export let DefaultDSDecoder = DSDecoderV1
|
|
||||||
export let DefaultUpdateEncoder = UpdateEncoderV1
|
|
||||||
export let DefaultUpdateDecoder = UpdateDecoderV1
|
|
||||||
|
|
||||||
export const useV1Encoding = () => {
|
|
||||||
DefaultDSEncoder = DSEncoderV1
|
|
||||||
DefaultDSDecoder = DSDecoderV1
|
|
||||||
DefaultUpdateEncoder = UpdateEncoderV1
|
|
||||||
DefaultUpdateDecoder = UpdateDecoderV1
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useV2Encoding = () => {
|
|
||||||
DefaultDSEncoder = DSEncoderV2
|
|
||||||
DefaultDSDecoder = DSDecoderV2
|
|
||||||
DefaultUpdateEncoder = UpdateEncoderV2
|
|
||||||
DefaultUpdateDecoder = UpdateDecoderV2
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {Array<GC|Item>} structs All structs by `client`
|
* @param {Array<GC|Item>} structs All structs by `client`
|
||||||
* @param {number} client
|
* @param {number} client
|
||||||
* @param {number} clock write structs starting with `ID(client,clock)`
|
* @param {number} clock write structs starting with `ID(client,clock)`
|
||||||
@ -70,6 +54,7 @@ export const useV2Encoding = () => {
|
|||||||
*/
|
*/
|
||||||
const writeStructs = (encoder, structs, client, clock) => {
|
const writeStructs = (encoder, structs, client, clock) => {
|
||||||
// write first id
|
// write first id
|
||||||
|
clock = math.max(clock, structs[0].id.clock) // make sure the first id exists
|
||||||
const startNewStructs = findIndexSS(structs, clock)
|
const startNewStructs = findIndexSS(structs, clock)
|
||||||
// write # encoded structs
|
// write # encoded structs
|
||||||
encoding.writeVarUint(encoder.restEncoder, structs.length - startNewStructs)
|
encoding.writeVarUint(encoder.restEncoder, structs.length - startNewStructs)
|
||||||
@ -84,7 +69,7 @@ const writeStructs = (encoder, structs, client, clock) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {StructStore} store
|
* @param {StructStore} store
|
||||||
* @param {Map<number,number>} _sm
|
* @param {Map<number,number>} _sm
|
||||||
*
|
*
|
||||||
@ -116,15 +101,18 @@ export const writeClientsStructs = (encoder, store, _sm) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractUpdateDecoder} decoder The decoder object to read data from.
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from.
|
||||||
* @param {Map<number,Array<GC|Item>>} clientRefs
|
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @return {Map<number,Array<GC|Item>>}
|
* @return {Map<number, { i: number, refs: Array<Item | GC> }>}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readClientsStructRefs = (decoder, clientRefs, doc) => {
|
export const readClientsStructRefs = (decoder, doc) => {
|
||||||
|
/**
|
||||||
|
* @type {Map<number, { i: number, refs: Array<Item | GC> }>}
|
||||||
|
*/
|
||||||
|
const clientRefs = map.create()
|
||||||
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
||||||
for (let i = 0; i < numOfStateUpdates; i++) {
|
for (let i = 0; i < numOfStateUpdates; i++) {
|
||||||
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
||||||
@ -135,10 +123,24 @@ export const readClientsStructRefs = (decoder, clientRefs, doc) => {
|
|||||||
const client = decoder.readClient()
|
const client = decoder.readClient()
|
||||||
let clock = decoding.readVarUint(decoder.restDecoder)
|
let clock = decoding.readVarUint(decoder.restDecoder)
|
||||||
// const start = performance.now()
|
// const start = performance.now()
|
||||||
clientRefs.set(client, refs)
|
clientRefs.set(client, { i: 0, refs })
|
||||||
for (let i = 0; i < numberOfStructs; i++) {
|
for (let i = 0; i < numberOfStructs; i++) {
|
||||||
const info = decoder.readInfo()
|
const info = decoder.readInfo()
|
||||||
if ((binary.BITS5 & info) !== 0) {
|
switch (binary.BITS5 & info) {
|
||||||
|
case 0: { // GC
|
||||||
|
const len = decoder.readLen()
|
||||||
|
refs[i] = new GC(createID(client, clock), len)
|
||||||
|
clock += len
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case 10: { // Skip Struct (nothing to apply)
|
||||||
|
// @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing.
|
||||||
|
const len = decoding.readVarUint(decoder.restDecoder)
|
||||||
|
refs[i] = new Skip(createID(client, clock), len)
|
||||||
|
clock += len
|
||||||
|
break
|
||||||
|
}
|
||||||
|
default: { // Item with content
|
||||||
/**
|
/**
|
||||||
* The optimized implementation doesn't use any variables because inlining variables is faster.
|
* The optimized implementation doesn't use any variables because inlining variables is faster.
|
||||||
* Below a non-optimized version is shown that implements the basic algorithm with
|
* Below a non-optimized version is shown that implements the basic algorithm with
|
||||||
@ -186,10 +188,7 @@ export const readClientsStructRefs = (decoder, clientRefs, doc) => {
|
|||||||
*/
|
*/
|
||||||
refs[i] = struct
|
refs[i] = struct
|
||||||
clock += struct.length
|
clock += struct.length
|
||||||
} else {
|
}
|
||||||
const len = decoder.readLen()
|
|
||||||
refs[i] = new GC(createID(client, clock), len)
|
|
||||||
clock += len
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// console.log('time to read: ', performance.now() - start) // @todo remove
|
// console.log('time to read: ', performance.now() - start) // @todo remove
|
||||||
@ -218,26 +217,32 @@ export const readClientsStructRefs = (decoder, clientRefs, doc) => {
|
|||||||
*
|
*
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {StructStore} store
|
* @param {StructStore} store
|
||||||
|
* @param {Map<number, { i: number, refs: (GC | Item)[] }>} clientsStructRefs
|
||||||
|
* @return { null | { update: Uint8Array, missing: Map<number,number> } }
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
const resumeStructIntegration = (transaction, store) => {
|
const integrateStructs = (transaction, store, clientsStructRefs) => {
|
||||||
const stack = store.pendingStack // @todo don't forget to append stackhead at the end
|
/**
|
||||||
const clientsStructRefs = store.pendingClientsStructRefs
|
* @type {Array<Item | GC>}
|
||||||
|
*/
|
||||||
|
const stack = []
|
||||||
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
|
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
|
||||||
const clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b)
|
let clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b)
|
||||||
if (clientsStructRefsIds.length === 0) {
|
if (clientsStructRefsIds.length === 0) {
|
||||||
return
|
return null
|
||||||
}
|
}
|
||||||
const getNextStructTarget = () => {
|
const getNextStructTarget = () => {
|
||||||
|
if (clientsStructRefsIds.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
||||||
while (nextStructsTarget.refs.length === nextStructsTarget.i) {
|
while (nextStructsTarget.refs.length === nextStructsTarget.i) {
|
||||||
clientsStructRefsIds.pop()
|
clientsStructRefsIds.pop()
|
||||||
if (clientsStructRefsIds.length > 0) {
|
if (clientsStructRefsIds.length > 0) {
|
||||||
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
||||||
} else {
|
} else {
|
||||||
store.pendingClientsStructRefs.clear()
|
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -245,49 +250,87 @@ const resumeStructIntegration = (transaction, store) => {
|
|||||||
}
|
}
|
||||||
let curStructsTarget = getNextStructTarget()
|
let curStructsTarget = getNextStructTarget()
|
||||||
if (curStructsTarget === null && stack.length === 0) {
|
if (curStructsTarget === null && stack.length === 0) {
|
||||||
return
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {StructStore}
|
||||||
|
*/
|
||||||
|
const restStructs = new StructStore()
|
||||||
|
const missingSV = new Map()
|
||||||
|
/**
|
||||||
|
* @param {number} client
|
||||||
|
* @param {number} clock
|
||||||
|
*/
|
||||||
|
const updateMissingSv = (client, clock) => {
|
||||||
|
const mclock = missingSV.get(client)
|
||||||
|
if (mclock == null || mclock > clock) {
|
||||||
|
missingSV.set(client, clock)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* @type {GC|Item}
|
* @type {GC|Item}
|
||||||
*/
|
*/
|
||||||
let stackHead = stack.length > 0
|
let stackHead = /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++]
|
||||||
? /** @type {GC|Item} */ (stack.pop())
|
|
||||||
: /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++]
|
|
||||||
// caching the state because it is used very often
|
// caching the state because it is used very often
|
||||||
const state = new Map()
|
const state = new Map()
|
||||||
|
|
||||||
|
const addStackToRestSS = () => {
|
||||||
|
for (const item of stack) {
|
||||||
|
const client = item.id.client
|
||||||
|
const unapplicableItems = clientsStructRefs.get(client)
|
||||||
|
if (unapplicableItems) {
|
||||||
|
// decrement because we weren't able to apply previous operation
|
||||||
|
unapplicableItems.i--
|
||||||
|
restStructs.clients.set(client, unapplicableItems.refs.slice(unapplicableItems.i))
|
||||||
|
clientsStructRefs.delete(client)
|
||||||
|
unapplicableItems.i = 0
|
||||||
|
unapplicableItems.refs = []
|
||||||
|
} else {
|
||||||
|
// item was the last item on clientsStructRefs and the field was already cleared. Add item to restStructs and continue
|
||||||
|
restStructs.clients.set(client, [item])
|
||||||
|
}
|
||||||
|
// remove client from clientsStructRefsIds to prevent users from applying the same update again
|
||||||
|
clientsStructRefsIds = clientsStructRefsIds.filter(c => c !== client)
|
||||||
|
}
|
||||||
|
stack.length = 0
|
||||||
|
}
|
||||||
|
|
||||||
// iterate over all struct readers until we are done
|
// iterate over all struct readers until we are done
|
||||||
while (true) {
|
while (true) {
|
||||||
|
if (stackHead.constructor !== Skip) {
|
||||||
const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client))
|
const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client))
|
||||||
const offset = stackHead.id.clock < localClock ? localClock - stackHead.id.clock : 0
|
const offset = localClock - stackHead.id.clock
|
||||||
if (stackHead.id.clock + offset !== localClock) {
|
if (offset < 0) {
|
||||||
// A previous message from this client is missing
|
// update from the same client is missing
|
||||||
// check if there is a pending structRef with a smaller clock and switch them
|
stack.push(stackHead)
|
||||||
|
updateMissingSv(stackHead.id.client, stackHead.id.clock - 1)
|
||||||
|
// hid a dead wall, add all items from stack to restSS
|
||||||
|
addStackToRestSS()
|
||||||
|
} else {
|
||||||
|
const missing = stackHead.getMissing(transaction, store)
|
||||||
|
if (missing !== null) {
|
||||||
|
stack.push(stackHead)
|
||||||
|
// get the struct reader that has the missing struct
|
||||||
/**
|
/**
|
||||||
* @type {{ refs: Array<GC|Item>, i: number }}
|
* @type {{ refs: Array<GC|Item>, i: number }}
|
||||||
*/
|
*/
|
||||||
const structRefs = clientsStructRefs.get(stackHead.id.client) || { refs: [], i: 0 }
|
const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 }
|
||||||
if (structRefs.refs.length !== structRefs.i) {
|
if (structRefs.refs.length === structRefs.i) {
|
||||||
const r = structRefs.refs[structRefs.i]
|
// This update message causally depends on another update message that doesn't exist yet
|
||||||
if (r.id.clock < stackHead.id.clock) {
|
updateMissingSv(/** @type {number} */ (missing), getState(store, missing))
|
||||||
// put ref with smaller clock on stack instead and continue
|
addStackToRestSS()
|
||||||
structRefs.refs[structRefs.i] = stackHead
|
} else {
|
||||||
stackHead = r
|
stackHead = structRefs.refs[structRefs.i++]
|
||||||
// sort the set because this approach might bring the list out of order
|
|
||||||
structRefs.refs = structRefs.refs.slice(structRefs.i).sort((r1, r2) => r1.id.clock - r2.id.clock)
|
|
||||||
structRefs.i = 0
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
} else if (offset === 0 || offset < stackHead.length) {
|
||||||
// wait until missing struct is available
|
// all fine, apply the stackhead
|
||||||
stack.push(stackHead)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const missing = stackHead.getMissing(transaction, store)
|
|
||||||
if (missing === null) {
|
|
||||||
if (offset === 0 || offset < stackHead.length) {
|
|
||||||
stackHead.integrate(transaction, offset)
|
stackHead.integrate(transaction, offset)
|
||||||
state.set(stackHead.id.client, stackHead.id.clock + stackHead.length)
|
state.set(stackHead.id.client, stackHead.id.clock + stackHead.length)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
// iterate to next stackHead
|
// iterate to next stackHead
|
||||||
if (stack.length > 0) {
|
if (stack.length > 0) {
|
||||||
stackHead = /** @type {GC|Item} */ (stack.pop())
|
stackHead = /** @type {GC|Item} */ (stack.pop())
|
||||||
@ -302,41 +345,20 @@ const resumeStructIntegration = (transaction, store) => {
|
|||||||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++])
|
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
// get the struct reader that has the missing struct
|
|
||||||
/**
|
|
||||||
* @type {{ refs: Array<GC|Item>, i: number }}
|
|
||||||
*/
|
|
||||||
const structRefs = clientsStructRefs.get(missing) || { refs: [], i: 0 }
|
|
||||||
if (structRefs.refs.length === structRefs.i) {
|
|
||||||
// This update message causally depends on another update message.
|
|
||||||
stack.push(stackHead)
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
stack.push(stackHead)
|
if (restStructs.clients.size > 0) {
|
||||||
stackHead = structRefs.refs[structRefs.i++]
|
const encoder = new UpdateEncoderV2()
|
||||||
|
writeClientsStructs(encoder, restStructs, new Map())
|
||||||
|
// write empty deleteset
|
||||||
|
// writeDeleteSet(encoder, new DeleteSet())
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, 0) // => no need for an extra function call, just write 0 deletes
|
||||||
|
return { missing: missingSV, update: encoder.toUint8Array() }
|
||||||
}
|
}
|
||||||
}
|
return null
|
||||||
store.pendingClientsStructRefs.clear()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} transaction
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {StructStore} store
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const tryResumePendingDeleteReaders = (transaction, store) => {
|
|
||||||
const pendingReaders = store.pendingDeleteReaders
|
|
||||||
store.pendingDeleteReaders = []
|
|
||||||
for (let i = 0; i < pendingReaders.length; i++) {
|
|
||||||
readAndApplyDeleteSet(pendingReaders[i], transaction, store)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
@ -344,78 +366,6 @@ export const tryResumePendingDeleteReaders = (transaction, store) => {
|
|||||||
*/
|
*/
|
||||||
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState)
|
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState)
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {StructStore} store
|
|
||||||
* @param {Map<number, Array<GC|Item>>} clientsStructsRefs
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
|
|
||||||
const pendingClientsStructRefs = store.pendingClientsStructRefs
|
|
||||||
clientsStructsRefs.forEach((structRefs, client) => {
|
|
||||||
const pendingStructRefs = pendingClientsStructRefs.get(client)
|
|
||||||
if (pendingStructRefs === undefined) {
|
|
||||||
pendingClientsStructRefs.set(client, { refs: structRefs, i: 0 })
|
|
||||||
} else {
|
|
||||||
// merge into existing structRefs
|
|
||||||
const merged = pendingStructRefs.i > 0 ? pendingStructRefs.refs.slice(pendingStructRefs.i) : pendingStructRefs.refs
|
|
||||||
for (let i = 0; i < structRefs.length; i++) {
|
|
||||||
merged.push(structRefs[i])
|
|
||||||
}
|
|
||||||
pendingStructRefs.i = 0
|
|
||||||
pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Map<number,{refs:Array<GC|Item>,i:number}>} pendingClientsStructRefs
|
|
||||||
*/
|
|
||||||
const cleanupPendingStructs = pendingClientsStructRefs => {
|
|
||||||
// cleanup pendingClientsStructs if not fully finished
|
|
||||||
pendingClientsStructRefs.forEach((refs, client) => {
|
|
||||||
if (refs.i === refs.refs.length) {
|
|
||||||
pendingClientsStructRefs.delete(client)
|
|
||||||
} else {
|
|
||||||
refs.refs.splice(0, refs.i)
|
|
||||||
refs.i = 0
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read the next Item in a Decoder and fill this Item with the read data.
|
|
||||||
*
|
|
||||||
* This is called when data is received from a remote peer.
|
|
||||||
*
|
|
||||||
* @param {AbstractUpdateDecoder} decoder The decoder object to read data from.
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
* @param {StructStore} store
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const readStructs = (decoder, transaction, store) => {
|
|
||||||
const clientsStructRefs = new Map()
|
|
||||||
// let start = performance.now()
|
|
||||||
readClientsStructRefs(decoder, clientsStructRefs, transaction.doc)
|
|
||||||
// console.log('time to read structs: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
mergeReadStructsIntoPendingReads(store, clientsStructRefs)
|
|
||||||
// console.log('time to merge: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
resumeStructIntegration(transaction, store)
|
|
||||||
// console.log('time to integrate: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
cleanupPendingStructs(store.pendingClientsStructRefs)
|
|
||||||
// console.log('time to cleanup: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
tryResumePendingDeleteReaders(transaction, store)
|
|
||||||
// console.log('time to resume delete readers: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read and apply a document update.
|
* Read and apply a document update.
|
||||||
*
|
*
|
||||||
@ -424,14 +374,75 @@ export const readStructs = (decoder, transaction, store) => {
|
|||||||
* @param {decoding.Decoder} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @param {Doc} ydoc
|
* @param {Doc} ydoc
|
||||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||||||
* @param {AbstractUpdateDecoder} [structDecoder]
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} [structDecoder]
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) =>
|
export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) =>
|
||||||
transact(ydoc, transaction => {
|
transact(ydoc, transaction => {
|
||||||
readStructs(structDecoder, transaction, ydoc.store)
|
let retry = false
|
||||||
readAndApplyDeleteSet(structDecoder, transaction, ydoc.store)
|
const doc = transaction.doc
|
||||||
|
const store = doc.store
|
||||||
|
// let start = performance.now()
|
||||||
|
const ss = readClientsStructRefs(structDecoder, doc)
|
||||||
|
// console.log('time to read structs: ', performance.now() - start) // @todo remove
|
||||||
|
// start = performance.now()
|
||||||
|
// console.log('time to merge: ', performance.now() - start) // @todo remove
|
||||||
|
// start = performance.now()
|
||||||
|
const restStructs = integrateStructs(transaction, store, ss)
|
||||||
|
const pending = store.pendingStructs
|
||||||
|
if (pending) {
|
||||||
|
// check if we can apply something
|
||||||
|
for (const [client, clock] of pending.missing) {
|
||||||
|
if (clock < getState(store, client)) {
|
||||||
|
retry = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (restStructs) {
|
||||||
|
// merge restStructs into store.pending
|
||||||
|
for (const [client, clock] of restStructs.missing) {
|
||||||
|
const mclock = pending.missing.get(client)
|
||||||
|
if (mclock == null || mclock > clock) {
|
||||||
|
pending.missing.set(client, clock)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pending.update = mergeUpdatesV2([pending.update, restStructs.update])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
store.pendingStructs = restStructs
|
||||||
|
}
|
||||||
|
// console.log('time to integrate: ', performance.now() - start) // @todo remove
|
||||||
|
// start = performance.now()
|
||||||
|
const dsRest = readAndApplyDeleteSet(structDecoder, transaction, store)
|
||||||
|
if (store.pendingDs) {
|
||||||
|
// @todo we could make a lower-bound state-vector check as we do above
|
||||||
|
const pendingDSUpdate = new UpdateDecoderV2(decoding.createDecoder(store.pendingDs))
|
||||||
|
decoding.readVarUint(pendingDSUpdate.restDecoder) // read 0 structs, because we only encode deletes in pendingdsupdate
|
||||||
|
const dsRest2 = readAndApplyDeleteSet(pendingDSUpdate, transaction, store)
|
||||||
|
if (dsRest && dsRest2) {
|
||||||
|
// case 1: ds1 != null && ds2 != null
|
||||||
|
store.pendingDs = mergeUpdatesV2([dsRest, dsRest2])
|
||||||
|
} else {
|
||||||
|
// case 2: ds1 != null
|
||||||
|
// case 3: ds2 != null
|
||||||
|
// case 4: ds1 == null && ds2 == null
|
||||||
|
store.pendingDs = dsRest || dsRest2
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Either dsRest == null && pendingDs == null OR dsRest != null
|
||||||
|
store.pendingDs = dsRest
|
||||||
|
}
|
||||||
|
// console.log('time to cleanup: ', performance.now() - start) // @todo remove
|
||||||
|
// start = performance.now()
|
||||||
|
|
||||||
|
// console.log('time to resume delete readers: ', performance.now() - start) // @todo remove
|
||||||
|
// start = performance.now()
|
||||||
|
if (retry) {
|
||||||
|
const update = /** @type {{update: Uint8Array}} */ (store.pendingStructs).update
|
||||||
|
store.pendingStructs = null
|
||||||
|
applyUpdateV2(transaction.doc, update)
|
||||||
|
}
|
||||||
}, transactionOrigin, false)
|
}, transactionOrigin, false)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -445,7 +456,7 @@ export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = n
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new DefaultUpdateDecoder(decoder))
|
export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new UpdateDecoderV1(decoder))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
||||||
@ -475,13 +486,13 @@ export const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = Update
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, DefaultUpdateDecoder)
|
export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, UpdateDecoderV1)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
||||||
* only write the operations that are missing.
|
* only write the operations that are missing.
|
||||||
*
|
*
|
||||||
* @param {AbstractUpdateEncoder} encoder
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||||
*
|
*
|
||||||
@ -500,15 +511,29 @@ export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map())
|
|||||||
*
|
*
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||||
* @param {AbstractUpdateEncoder} [encoder]
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} [encoder]
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = new UpdateEncoderV2()) => {
|
export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector = new Uint8Array([0]), encoder = new UpdateEncoderV2()) => {
|
||||||
const targetStateVector = encodedTargetStateVector == null ? new Map() : decodeStateVector(encodedTargetStateVector)
|
const targetStateVector = decodeStateVector(encodedTargetStateVector)
|
||||||
writeStateAsUpdate(encoder, doc, targetStateVector)
|
writeStateAsUpdate(encoder, doc, targetStateVector)
|
||||||
return encoder.toUint8Array()
|
const updates = [encoder.toUint8Array()]
|
||||||
|
// also add the pending updates (if there are any)
|
||||||
|
// @todo support diffirent encoders
|
||||||
|
if (encoder.constructor === UpdateEncoderV2) {
|
||||||
|
if (doc.store.pendingDs) {
|
||||||
|
updates.push(doc.store.pendingDs)
|
||||||
|
}
|
||||||
|
if (doc.store.pendingStructs) {
|
||||||
|
updates.push(diffUpdateV2(doc.store.pendingStructs.update, encodedTargetStateVector))
|
||||||
|
}
|
||||||
|
if (updates.length > 1) {
|
||||||
|
return mergeUpdatesV2(updates)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return updates[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -523,12 +548,12 @@ export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector, encoder = n
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new DefaultUpdateEncoder())
|
export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new UpdateEncoderV1())
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read state vector from Decoder and return as Map
|
* Read state vector from Decoder and return as Map
|
||||||
*
|
*
|
||||||
* @param {AbstractDSDecoder} decoder
|
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||||||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
@ -552,7 +577,7 @@ export const readStateVector = decoder => {
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState)))
|
// export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState)))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read decodedState and return State as Map.
|
* Read decodedState and return State as Map.
|
||||||
@ -562,10 +587,10 @@ export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoder
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const decodeStateVector = decodedState => readStateVector(new DefaultDSDecoder(decoding.createDecoder(decodedState)))
|
export const decodeStateVector = decodedState => readStateVector(new DSDecoderV1(decoding.createDecoder(decodedState)))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractDSEncoder} encoder
|
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||||||
* @param {Map<number,number>} sv
|
* @param {Map<number,number>} sv
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
@ -579,7 +604,7 @@ export const writeStateVector = (encoder, sv) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractDSEncoder} encoder
|
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
@ -589,23 +614,27 @@ export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encod
|
|||||||
/**
|
/**
|
||||||
* Encode State as Uint8Array.
|
* Encode State as Uint8Array.
|
||||||
*
|
*
|
||||||
* @param {Doc} doc
|
* @param {Doc|Map<number,number>} doc
|
||||||
* @param {AbstractDSEncoder} [encoder]
|
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => {
|
export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => {
|
||||||
|
if (doc instanceof Map) {
|
||||||
|
writeStateVector(encoder, doc)
|
||||||
|
} else {
|
||||||
writeDocumentStateVector(encoder, doc)
|
writeDocumentStateVector(encoder, doc)
|
||||||
|
}
|
||||||
return encoder.toUint8Array()
|
return encoder.toUint8Array()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encode State as Uint8Array.
|
* Encode State as Uint8Array.
|
||||||
*
|
*
|
||||||
* @param {Doc} doc
|
* @param {Doc|Map<number,number>} doc
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const encodeStateVector = doc => encodeStateVectorV2(doc, new DefaultDSEncoder())
|
export const encodeStateVector = doc => encodeStateVectorV2(doc, new DSEncoderV1())
|
||||||
|
510
src/utils/updates.js
Normal file
510
src/utils/updates.js
Normal file
@ -0,0 +1,510 @@
|
|||||||
|
|
||||||
|
import * as binary from 'lib0/binary.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as logging from 'lib0/logging.js'
|
||||||
|
import * as math from 'lib0/math.js'
|
||||||
|
import {
|
||||||
|
createID,
|
||||||
|
readItemContent,
|
||||||
|
readDeleteSet,
|
||||||
|
writeDeleteSet,
|
||||||
|
Skip,
|
||||||
|
mergeDeleteSets,
|
||||||
|
DSEncoderV1,
|
||||||
|
DSEncoderV2,
|
||||||
|
decodeStateVector,
|
||||||
|
Item, GC, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line
|
||||||
|
} from '../internals.js'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
|
*/
|
||||||
|
function * lazyStructReaderGenerator (decoder) {
|
||||||
|
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
||||||
|
for (let i = 0; i < numOfStateUpdates; i++) {
|
||||||
|
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
||||||
|
const client = decoder.readClient()
|
||||||
|
let clock = decoding.readVarUint(decoder.restDecoder)
|
||||||
|
for (let i = 0; i < numberOfStructs; i++) {
|
||||||
|
const info = decoder.readInfo()
|
||||||
|
// @todo use switch instead of ifs
|
||||||
|
if (info === 10) {
|
||||||
|
const len = decoding.readVarUint(decoder.restDecoder)
|
||||||
|
yield new Skip(createID(client, clock), len)
|
||||||
|
clock += len
|
||||||
|
} else if ((binary.BITS5 & info) !== 0) {
|
||||||
|
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
||||||
|
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||||||
|
// and we read the next string as parentYKey.
|
||||||
|
// It indicates how we store/retrieve parent from `y.share`
|
||||||
|
// @type {string|null}
|
||||||
|
const struct = new Item(
|
||||||
|
createID(client, clock),
|
||||||
|
null, // left
|
||||||
|
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
|
||||||
|
null, // right
|
||||||
|
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
|
||||||
|
// @ts-ignore Force writing a string here.
|
||||||
|
cantCopyParentInfo ? (decoder.readParentInfo() ? decoder.readString() : decoder.readLeftID()) : null, // parent
|
||||||
|
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
||||||
|
readItemContent(decoder, info) // item content
|
||||||
|
)
|
||||||
|
yield struct
|
||||||
|
clock += struct.length
|
||||||
|
} else {
|
||||||
|
const len = decoder.readLen()
|
||||||
|
yield new GC(createID(client, clock), len)
|
||||||
|
clock += len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LazyStructReader {
|
||||||
|
/**
|
||||||
|
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||||
|
* @param {boolean} filterSkips
|
||||||
|
*/
|
||||||
|
constructor (decoder, filterSkips) {
|
||||||
|
this.gen = lazyStructReaderGenerator(decoder)
|
||||||
|
/**
|
||||||
|
* @type {null | Item | Skip | GC}
|
||||||
|
*/
|
||||||
|
this.curr = null
|
||||||
|
this.done = false
|
||||||
|
this.filterSkips = filterSkips
|
||||||
|
this.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return {Item | GC | Skip |null}
|
||||||
|
*/
|
||||||
|
next () {
|
||||||
|
// ignore "Skip" structs
|
||||||
|
do {
|
||||||
|
this.curr = this.gen.next().value || null
|
||||||
|
} while (this.filterSkips && this.curr !== null && this.curr.constructor === Skip)
|
||||||
|
return this.curr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
|
||||||
|
const structs = []
|
||||||
|
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
||||||
|
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
||||||
|
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
||||||
|
structs.push(curr)
|
||||||
|
}
|
||||||
|
logging.print('Structs: ', structs)
|
||||||
|
const ds = readDeleteSet(updateDecoder)
|
||||||
|
logging.print('DeleteSet: ', ds)
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LazyStructWriter {
|
||||||
|
/**
|
||||||
|
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||||
|
*/
|
||||||
|
constructor (encoder) {
|
||||||
|
this.currClient = 0
|
||||||
|
this.startClock = 0
|
||||||
|
this.written = 0
|
||||||
|
this.encoder = encoder
|
||||||
|
/**
|
||||||
|
* We want to write operations lazily, but also we need to know beforehand how many operations we want to write for each client.
|
||||||
|
*
|
||||||
|
* This kind of meta-information (#clients, #structs-per-client-written) is written to the restEncoder.
|
||||||
|
*
|
||||||
|
* We fragment the restEncoder and store a slice of it per-client until we know how many clients there are.
|
||||||
|
* When we flush (toUint8Array) we write the restEncoder using the fragments and the meta-information.
|
||||||
|
*
|
||||||
|
* @type {Array<{ written: number, restEncoder: Uint8Array }>}
|
||||||
|
*/
|
||||||
|
this.clientStructs = []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Array<Uint8Array>} updates
|
||||||
|
* @return {Uint8Array}
|
||||||
|
*/
|
||||||
|
export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @param {typeof DSEncoderV1 | typeof DSEncoderV2} YEncoder
|
||||||
|
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder
|
||||||
|
* @return {Uint8Array}
|
||||||
|
*/
|
||||||
|
export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YDecoder = UpdateDecoderV2) => {
|
||||||
|
const encoder = new YEncoder()
|
||||||
|
const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), true)
|
||||||
|
let curr = updateDecoder.curr
|
||||||
|
if (curr !== null) {
|
||||||
|
let size = 1
|
||||||
|
let currClient = curr.id.client
|
||||||
|
let currClock = curr.id.clock
|
||||||
|
let stopCounting = false
|
||||||
|
for (; curr !== null; curr = updateDecoder.next()) {
|
||||||
|
if (currClient !== curr.id.client) {
|
||||||
|
size++
|
||||||
|
// We found a new client
|
||||||
|
// write what we have to the encoder
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, currClient)
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, currClock)
|
||||||
|
currClient = curr.id.client
|
||||||
|
stopCounting = false
|
||||||
|
}
|
||||||
|
if (curr.constructor === Skip) {
|
||||||
|
stopCounting = true
|
||||||
|
}
|
||||||
|
if (!stopCounting) {
|
||||||
|
currClock = curr.id.clock + curr.length
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// write what we have
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, currClient)
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, currClock)
|
||||||
|
// prepend the size of the state vector
|
||||||
|
const enc = encoding.createEncoder()
|
||||||
|
encoding.writeVarUint(enc, size)
|
||||||
|
encoding.writeBinaryEncoder(enc, encoder.restEncoder)
|
||||||
|
encoder.restEncoder = enc
|
||||||
|
return encoder.toUint8Array()
|
||||||
|
} else {
|
||||||
|
encoding.writeVarUint(encoder.restEncoder, 0)
|
||||||
|
return encoder.toUint8Array()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @return {Uint8Array}
|
||||||
|
*/
|
||||||
|
export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV1)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder
|
||||||
|
* @return {{ from: Map<number,number>, to: Map<number,number> }}
|
||||||
|
*/
|
||||||
|
export const parseUpdateMetaV2 = (update, YDecoder = UpdateDecoderV2) => {
|
||||||
|
/**
|
||||||
|
* @type {Map<number, number>}
|
||||||
|
*/
|
||||||
|
const from = new Map()
|
||||||
|
/**
|
||||||
|
* @type {Map<number, number>}
|
||||||
|
*/
|
||||||
|
const to = new Map()
|
||||||
|
const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false)
|
||||||
|
let curr = updateDecoder.curr
|
||||||
|
if (curr !== null) {
|
||||||
|
let currClient = curr.id.client
|
||||||
|
let currClock = curr.id.clock
|
||||||
|
// write the beginning to `from`
|
||||||
|
from.set(currClient, currClock)
|
||||||
|
for (; curr !== null; curr = updateDecoder.next()) {
|
||||||
|
if (currClient !== curr.id.client) {
|
||||||
|
// We found a new client
|
||||||
|
// write the end to `to`
|
||||||
|
to.set(currClient, currClock)
|
||||||
|
// write the beginning to `from`
|
||||||
|
from.set(curr.id.client, curr.id.clock)
|
||||||
|
// update currClient
|
||||||
|
currClient = curr.id.client
|
||||||
|
}
|
||||||
|
currClock = curr.id.clock + curr.length
|
||||||
|
}
|
||||||
|
// write the end to `to`
|
||||||
|
to.set(currClient, currClock)
|
||||||
|
}
|
||||||
|
return { from, to }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @return {{ from: Map<number,number>, to: Map<number,number> }}
|
||||||
|
*/
|
||||||
|
export const parseUpdateMeta = update => parseUpdateMetaV2(update, UpdateDecoderV1)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is intended to slice any kind of struct and retrieve the right part.
|
||||||
|
* It does not handle side-effects, so it should only be used by the lazy-encoder.
|
||||||
|
*
|
||||||
|
* @param {Item | GC | Skip} left
|
||||||
|
* @param {number} diff
|
||||||
|
* @return {Item | GC}
|
||||||
|
*/
|
||||||
|
const sliceStruct = (left, diff) => {
|
||||||
|
if (left.constructor === GC) {
|
||||||
|
const { client, clock } = left.id
|
||||||
|
return new GC(createID(client, clock + diff), left.length - diff)
|
||||||
|
} else if (left.constructor === Skip) {
|
||||||
|
const { client, clock } = left.id
|
||||||
|
return new Skip(createID(client, clock + diff), left.length - diff)
|
||||||
|
} else {
|
||||||
|
const leftItem = /** @type {Item} */ (left)
|
||||||
|
const { client, clock } = leftItem.id
|
||||||
|
return new Item(
|
||||||
|
createID(client, clock + diff),
|
||||||
|
null,
|
||||||
|
createID(client, clock + diff - 1),
|
||||||
|
null,
|
||||||
|
leftItem.rightOrigin,
|
||||||
|
leftItem.parent,
|
||||||
|
leftItem.parentSub,
|
||||||
|
leftItem.content.splice(diff)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* This function works similarly to `readUpdateV2`.
|
||||||
|
*
|
||||||
|
* @param {Array<Uint8Array>} updates
|
||||||
|
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||||||
|
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
||||||
|
* @return {Uint8Array}
|
||||||
|
*/
|
||||||
|
export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
||||||
|
const updateDecoders = updates.map(update => new YDecoder(decoding.createDecoder(update)))
|
||||||
|
let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder, true))
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @todo we don't need offset because we always slice before
|
||||||
|
* @type {null | { struct: Item | GC | Skip, offset: number }}
|
||||||
|
*/
|
||||||
|
let currWrite = null
|
||||||
|
|
||||||
|
const updateEncoder = new YEncoder()
|
||||||
|
// write structs lazily
|
||||||
|
const lazyStructEncoder = new LazyStructWriter(updateEncoder)
|
||||||
|
|
||||||
|
// Note: We need to ensure that all lazyStructDecoders are fully consumed
|
||||||
|
// Note: Should merge document updates whenever possible - even from different updates
|
||||||
|
// Note: Should handle that some operations cannot be applied yet ()
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
// Write higher clients first ⇒ sort by clientID & clock and remove decoders without content
|
||||||
|
lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null)
|
||||||
|
lazyStructDecoders.sort(
|
||||||
|
/** @type {function(any,any):number} */ (dec1, dec2) => {
|
||||||
|
if (dec1.curr.id.client === dec2.curr.id.client) {
|
||||||
|
const clockDiff = dec1.curr.id.clock - dec2.curr.id.clock
|
||||||
|
if (clockDiff === 0) {
|
||||||
|
return dec1.curr.constructor === dec2.curr.constructor ? 0 : (
|
||||||
|
dec1.curr.constructor === Skip ? 1 : -1
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
return clockDiff
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return dec2.curr.id.client - dec1.curr.id.client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if (lazyStructDecoders.length === 0) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
const currDecoder = lazyStructDecoders[0]
|
||||||
|
// write from currDecoder until the next operation is from another client or if filler-struct
|
||||||
|
// then we need to reorder the decoders and find the next operation to write
|
||||||
|
const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client
|
||||||
|
|
||||||
|
if (currWrite !== null) {
|
||||||
|
let curr = /** @type {Item | GC | null} */ (currDecoder.curr)
|
||||||
|
|
||||||
|
// iterate until we find something that we haven't written already
|
||||||
|
// remember: first the high client-ids are written
|
||||||
|
while (curr !== null && curr.id.clock + curr.length <= currWrite.struct.id.clock + currWrite.struct.length && curr.id.client >= currWrite.struct.id.client) {
|
||||||
|
curr = currDecoder.next()
|
||||||
|
}
|
||||||
|
if (curr === null || curr.id.client !== firstClient) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (firstClient !== currWrite.struct.id.client) {
|
||||||
|
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||||
|
currWrite = { struct: curr, offset: 0 }
|
||||||
|
currDecoder.next()
|
||||||
|
} else {
|
||||||
|
if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) {
|
||||||
|
// @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock)
|
||||||
|
if (currWrite.struct.constructor === Skip) {
|
||||||
|
// extend existing skip
|
||||||
|
currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock
|
||||||
|
} else {
|
||||||
|
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||||
|
const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length
|
||||||
|
/**
|
||||||
|
* @type {Skip}
|
||||||
|
*/
|
||||||
|
const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff)
|
||||||
|
currWrite = { struct, offset: 0 }
|
||||||
|
}
|
||||||
|
} else { // if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) {
|
||||||
|
const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock
|
||||||
|
if (diff > 0) {
|
||||||
|
if (currWrite.struct.constructor === Skip) {
|
||||||
|
// prefer to slice Skip because the other struct might contain more information
|
||||||
|
currWrite.struct.length -= diff
|
||||||
|
} else {
|
||||||
|
curr = sliceStruct(curr, diff)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) {
|
||||||
|
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||||
|
currWrite = { struct: curr, offset: 0 }
|
||||||
|
currDecoder.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
currWrite = { struct: /** @type {Item | GC} */ (currDecoder.curr), offset: 0 }
|
||||||
|
currDecoder.next()
|
||||||
|
}
|
||||||
|
for (
|
||||||
|
let next = currDecoder.curr;
|
||||||
|
next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length && next.constructor !== Skip;
|
||||||
|
next = currDecoder.next()
|
||||||
|
) {
|
||||||
|
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||||
|
currWrite = { struct: next, offset: 0 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (currWrite !== null) {
|
||||||
|
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||||
|
currWrite = null
|
||||||
|
}
|
||||||
|
finishLazyStructWriting(lazyStructEncoder)
|
||||||
|
|
||||||
|
const dss = updateDecoders.map(decoder => readDeleteSet(decoder))
|
||||||
|
const ds = mergeDeleteSets(dss)
|
||||||
|
writeDeleteSet(updateEncoder, ds)
|
||||||
|
return updateEncoder.toUint8Array()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @param {Uint8Array} sv
|
||||||
|
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||||||
|
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
||||||
|
*/
|
||||||
|
export const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
||||||
|
const state = decodeStateVector(sv)
|
||||||
|
const encoder = new YEncoder()
|
||||||
|
const lazyStructWriter = new LazyStructWriter(encoder)
|
||||||
|
const decoder = new YDecoder(decoding.createDecoder(update))
|
||||||
|
const reader = new LazyStructReader(decoder, false)
|
||||||
|
while (reader.curr) {
|
||||||
|
const curr = reader.curr
|
||||||
|
const currClient = curr.id.client
|
||||||
|
const svClock = state.get(currClient) || 0
|
||||||
|
if (reader.curr.constructor === Skip) {
|
||||||
|
// the first written struct shouldn't be a skip
|
||||||
|
reader.next()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (curr.id.clock + curr.length > svClock) {
|
||||||
|
writeStructToLazyStructWriter(lazyStructWriter, curr, math.max(svClock - curr.id.clock, 0))
|
||||||
|
reader.next()
|
||||||
|
while (reader.curr && reader.curr.id.client === currClient) {
|
||||||
|
writeStructToLazyStructWriter(lazyStructWriter, reader.curr, 0)
|
||||||
|
reader.next()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// read until something new comes up
|
||||||
|
while (reader.curr && reader.curr.id.client === currClient && reader.curr.id.clock + reader.curr.length <= svClock) {
|
||||||
|
reader.next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finishLazyStructWriting(lazyStructWriter)
|
||||||
|
// write ds
|
||||||
|
const ds = readDeleteSet(decoder)
|
||||||
|
writeDeleteSet(encoder, ds)
|
||||||
|
return encoder.toUint8Array()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @param {Uint8Array} sv
|
||||||
|
*/
|
||||||
|
export const diffUpdate = (update, sv) => diffUpdateV2(update, sv, UpdateDecoderV1, UpdateEncoderV1)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {LazyStructWriter} lazyWriter
|
||||||
|
*/
|
||||||
|
const flushLazyStructWriter = lazyWriter => {
|
||||||
|
if (lazyWriter.written > 0) {
|
||||||
|
lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: encoding.toUint8Array(lazyWriter.encoder.restEncoder) })
|
||||||
|
lazyWriter.encoder.restEncoder = encoding.createEncoder()
|
||||||
|
lazyWriter.written = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {LazyStructWriter} lazyWriter
|
||||||
|
* @param {Item | GC} struct
|
||||||
|
* @param {number} offset
|
||||||
|
*/
|
||||||
|
const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => {
|
||||||
|
// flush curr if we start another client
|
||||||
|
if (lazyWriter.written > 0 && lazyWriter.currClient !== struct.id.client) {
|
||||||
|
flushLazyStructWriter(lazyWriter)
|
||||||
|
}
|
||||||
|
if (lazyWriter.written === 0) {
|
||||||
|
lazyWriter.currClient = struct.id.client
|
||||||
|
// write next client
|
||||||
|
lazyWriter.encoder.writeClient(struct.id.client)
|
||||||
|
// write startClock
|
||||||
|
encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock + offset)
|
||||||
|
}
|
||||||
|
struct.write(lazyWriter.encoder, offset)
|
||||||
|
lazyWriter.written++
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Call this function when we collected all parts and want to
|
||||||
|
* put all the parts together. After calling this method,
|
||||||
|
* you can continue using the UpdateEncoder.
|
||||||
|
*
|
||||||
|
* @param {LazyStructWriter} lazyWriter
|
||||||
|
*/
|
||||||
|
const finishLazyStructWriting = (lazyWriter) => {
|
||||||
|
flushLazyStructWriter(lazyWriter)
|
||||||
|
|
||||||
|
// this is a fresh encoder because we called flushCurr
|
||||||
|
const restEncoder = lazyWriter.encoder.restEncoder
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Now we put all the fragments together.
|
||||||
|
* This works similarly to `writeClientsStructs`
|
||||||
|
*/
|
||||||
|
|
||||||
|
// write # states that were updated - i.e. the clients
|
||||||
|
encoding.writeVarUint(restEncoder, lazyWriter.clientStructs.length)
|
||||||
|
|
||||||
|
for (let i = 0; i < lazyWriter.clientStructs.length; i++) {
|
||||||
|
const partStructs = lazyWriter.clientStructs[i]
|
||||||
|
/**
|
||||||
|
* Works similarly to `writeStructs`
|
||||||
|
*/
|
||||||
|
// write # encoded structs
|
||||||
|
encoding.writeVarUint(restEncoder, partStructs.written)
|
||||||
|
// write the rest of the fragment
|
||||||
|
encoding.writeUint8Array(restEncoder, partStructs.restEncoder)
|
||||||
|
}
|
||||||
|
}
|
@ -22,7 +22,7 @@ import {
|
|||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testStructReferences = tc => {
|
export const testStructReferences = tc => {
|
||||||
t.assert(contentRefs.length === 10)
|
t.assert(contentRefs.length === 11)
|
||||||
t.assert(contentRefs[1] === readContentDeleted)
|
t.assert(contentRefs[1] === readContentDeleted)
|
||||||
t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json?
|
t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json?
|
||||||
t.assert(contentRefs[3] === readContentBinary)
|
t.assert(contentRefs[3] === readContentBinary)
|
||||||
@ -32,6 +32,7 @@ export const testStructReferences = tc => {
|
|||||||
t.assert(contentRefs[7] === readContentType)
|
t.assert(contentRefs[7] === readContentType)
|
||||||
t.assert(contentRefs[8] === readContentAny)
|
t.assert(contentRefs[8] === readContentAny)
|
||||||
t.assert(contentRefs[9] === readContentDoc)
|
t.assert(contentRefs[9] === readContentDoc)
|
||||||
|
// contentRefs[10] is reserved for Skip structs
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -8,6 +8,7 @@ import * as undoredo from './undo-redo.tests.js'
|
|||||||
import * as compatibility from './compatibility.tests.js'
|
import * as compatibility from './compatibility.tests.js'
|
||||||
import * as doc from './doc.tests.js'
|
import * as doc from './doc.tests.js'
|
||||||
import * as snapshot from './snapshot.tests.js'
|
import * as snapshot from './snapshot.tests.js'
|
||||||
|
import * as updates from './updates.tests.js'
|
||||||
import * as relativePositions from './relativePositions.tests.js'
|
import * as relativePositions from './relativePositions.tests.js'
|
||||||
|
|
||||||
import { runTests } from 'lib0/testing.js'
|
import { runTests } from 'lib0/testing.js'
|
||||||
@ -18,7 +19,7 @@ if (isBrowser) {
|
|||||||
log.createVConsole(document.body)
|
log.createVConsole(document.body)
|
||||||
}
|
}
|
||||||
runTests({
|
runTests({
|
||||||
doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, relativePositions
|
doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates, relativePositions
|
||||||
}).then(success => {
|
}).then(success => {
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
if (isNode) {
|
if (isNode) {
|
||||||
|
@ -27,6 +27,39 @@ const broadcastMessage = (y, m) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export let useV2 = false
|
||||||
|
|
||||||
|
export const encV1 = {
|
||||||
|
encodeStateAsUpdate: Y.encodeStateAsUpdate,
|
||||||
|
mergeUpdates: Y.mergeUpdates,
|
||||||
|
applyUpdate: Y.applyUpdate,
|
||||||
|
logUpdate: Y.logUpdate,
|
||||||
|
updateEventName: 'update',
|
||||||
|
diffUpdate: Y.diffUpdate
|
||||||
|
}
|
||||||
|
|
||||||
|
export const encV2 = {
|
||||||
|
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
||||||
|
mergeUpdates: Y.mergeUpdatesV2,
|
||||||
|
applyUpdate: Y.applyUpdateV2,
|
||||||
|
logUpdate: Y.logUpdateV2,
|
||||||
|
updateEventName: 'updateV2',
|
||||||
|
diffUpdate: Y.diffUpdateV2
|
||||||
|
}
|
||||||
|
|
||||||
|
export let enc = encV1
|
||||||
|
|
||||||
|
const useV1Encoding = () => {
|
||||||
|
useV2 = false
|
||||||
|
enc = encV1
|
||||||
|
}
|
||||||
|
|
||||||
|
const useV2Encoding = () => {
|
||||||
|
console.error('sync protocol doesnt support v2 protocol yet, fallback to v1 encoding') // @Todo
|
||||||
|
useV2 = false
|
||||||
|
enc = encV1
|
||||||
|
}
|
||||||
|
|
||||||
export class TestYInstance extends Y.Doc {
|
export class TestYInstance extends Y.Doc {
|
||||||
/**
|
/**
|
||||||
* @param {TestConnector} testConnector
|
* @param {TestConnector} testConnector
|
||||||
@ -44,12 +77,19 @@ export class TestYInstance extends Y.Doc {
|
|||||||
*/
|
*/
|
||||||
this.receiving = new Map()
|
this.receiving = new Map()
|
||||||
testConnector.allConns.add(this)
|
testConnector.allConns.add(this)
|
||||||
|
/**
|
||||||
|
* The list of received updates.
|
||||||
|
* We are going to merge them later using Y.mergeUpdates and check if the resulting document is correct.
|
||||||
|
* @type {Array<Uint8Array>}
|
||||||
|
*/
|
||||||
|
this.updates = []
|
||||||
// set up observe on local model
|
// set up observe on local model
|
||||||
this.on('update', /** @param {Uint8Array} update @param {any} origin */ (update, origin) => {
|
this.on(enc.updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => {
|
||||||
if (origin !== testConnector) {
|
if (origin !== testConnector) {
|
||||||
const encoder = encoding.createEncoder()
|
const encoder = encoding.createEncoder()
|
||||||
syncProtocol.writeUpdate(encoder, update)
|
syncProtocol.writeUpdate(encoder, update)
|
||||||
broadcastMessage(this, encoding.toUint8Array(encoder))
|
broadcastMessage(this, encoding.toUint8Array(encoder))
|
||||||
|
this.updates.push(update)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
this.connect()
|
this.connect()
|
||||||
@ -162,6 +202,17 @@ export class TestConnector {
|
|||||||
// send reply message
|
// send reply message
|
||||||
sender._receive(encoding.toUint8Array(encoder), receiver)
|
sender._receive(encoding.toUint8Array(encoder), receiver)
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
// If update message, add the received message to the list of received messages
|
||||||
|
const decoder = decoding.createDecoder(m)
|
||||||
|
const messageType = decoding.readVarUint(decoder)
|
||||||
|
switch (messageType) {
|
||||||
|
case syncProtocol.messageYjsUpdate:
|
||||||
|
case syncProtocol.messageYjsSyncStep2:
|
||||||
|
receiver.updates.push(decoding.readVarUint8Array(decoder))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
@ -240,9 +291,9 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => {
|
|||||||
const gen = tc.prng
|
const gen = tc.prng
|
||||||
// choose an encoding approach at random
|
// choose an encoding approach at random
|
||||||
if (prng.bool(gen)) {
|
if (prng.bool(gen)) {
|
||||||
Y.useV2Encoding()
|
useV2Encoding()
|
||||||
} else {
|
} else {
|
||||||
Y.useV1Encoding()
|
useV1Encoding()
|
||||||
}
|
}
|
||||||
|
|
||||||
const testConnector = new TestConnector(gen)
|
const testConnector = new TestConnector(gen)
|
||||||
@ -258,7 +309,7 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => {
|
|||||||
}
|
}
|
||||||
testConnector.syncAll()
|
testConnector.syncAll()
|
||||||
result.testObjects = result.users.map(initTestObject || (() => null))
|
result.testObjects = result.users.map(initTestObject || (() => null))
|
||||||
Y.useV1Encoding()
|
useV1Encoding()
|
||||||
return /** @type {any} */ (result)
|
return /** @type {any} */ (result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,14 +325,21 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => {
|
|||||||
export const compare = users => {
|
export const compare = users => {
|
||||||
users.forEach(u => u.connect())
|
users.forEach(u => u.connect())
|
||||||
while (users[0].tc.flushAllMessages()) {}
|
while (users[0].tc.flushAllMessages()) {}
|
||||||
|
// For each document, merge all received document updates with Y.mergeUpdates and create a new document which will be added to the list of "users"
|
||||||
|
// This ensures that mergeUpdates works correctly
|
||||||
|
const mergedDocs = users.map(user => {
|
||||||
|
const ydoc = new Y.Doc()
|
||||||
|
enc.applyUpdate(ydoc, enc.mergeUpdates(user.updates))
|
||||||
|
return ydoc
|
||||||
|
})
|
||||||
|
users.push(.../** @type {any} */(mergedDocs))
|
||||||
const userArrayValues = users.map(u => u.getArray('array').toJSON())
|
const userArrayValues = users.map(u => u.getArray('array').toJSON())
|
||||||
const userMapValues = users.map(u => u.getMap('map').toJSON())
|
const userMapValues = users.map(u => u.getMap('map').toJSON())
|
||||||
const userXmlValues = users.map(u => u.get('xml', Y.YXmlElement).toString())
|
const userXmlValues = users.map(u => u.get('xml', Y.YXmlElement).toString())
|
||||||
const userTextValues = users.map(u => u.getText('text').toDelta())
|
const userTextValues = users.map(u => u.getText('text').toDelta())
|
||||||
for (const u of users) {
|
for (const u of users) {
|
||||||
t.assert(u.store.pendingDeleteReaders.length === 0)
|
t.assert(u.store.pendingDs === null)
|
||||||
t.assert(u.store.pendingStack.length === 0)
|
t.assert(u.store.pendingStructs === null)
|
||||||
t.assert(u.store.pendingClientsStructRefs.size === 0)
|
|
||||||
}
|
}
|
||||||
// Test Array iterator
|
// Test Array iterator
|
||||||
t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array')))
|
t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array')))
|
||||||
|
246
tests/updates.tests.js
Normal file
246
tests/updates.tests.js
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
import * as t from 'lib0/testing.js'
|
||||||
|
import { init, compare } from './testHelper.js' // eslint-disable-line
|
||||||
|
import * as Y from '../src/index.js'
|
||||||
|
import { readClientsStructRefs, readDeleteSet, UpdateDecoderV2, UpdateEncoderV2, writeDeleteSet } from '../src/internals.js'
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef {Object} Enc
|
||||||
|
* @property {function(Array<Uint8Array>):Uint8Array} Enc.mergeUpdates
|
||||||
|
* @property {function(Y.Doc):Uint8Array} Enc.encodeStateAsUpdate
|
||||||
|
* @property {function(Y.Doc, Uint8Array):void} Enc.applyUpdate
|
||||||
|
* @property {function(Uint8Array):void} Enc.logUpdate
|
||||||
|
* @property {function(Uint8Array):{from:Map<number,number>,to:Map<number,number>}} Enc.parseUpdateMeta
|
||||||
|
* @property {function(Y.Doc):Uint8Array} Enc.encodeStateVector
|
||||||
|
* @property {function(Uint8Array):Uint8Array} Enc.encodeStateVectorFromUpdate
|
||||||
|
* @property {string} Enc.updateEventName
|
||||||
|
* @property {string} Enc.description
|
||||||
|
* @property {function(Uint8Array, Uint8Array):Uint8Array} Enc.diffUpdate
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {Enc}
|
||||||
|
*/
|
||||||
|
const encV1 = {
|
||||||
|
mergeUpdates: Y.mergeUpdates,
|
||||||
|
encodeStateAsUpdate: Y.encodeStateAsUpdate,
|
||||||
|
applyUpdate: Y.applyUpdate,
|
||||||
|
logUpdate: Y.logUpdate,
|
||||||
|
parseUpdateMeta: Y.parseUpdateMeta,
|
||||||
|
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdate,
|
||||||
|
encodeStateVector: Y.encodeStateVector,
|
||||||
|
updateEventName: 'update',
|
||||||
|
description: 'V1',
|
||||||
|
diffUpdate: Y.diffUpdate
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {Enc}
|
||||||
|
*/
|
||||||
|
const encV2 = {
|
||||||
|
mergeUpdates: Y.mergeUpdatesV2,
|
||||||
|
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
||||||
|
applyUpdate: Y.applyUpdateV2,
|
||||||
|
logUpdate: Y.logUpdateV2,
|
||||||
|
parseUpdateMeta: Y.parseUpdateMetaV2,
|
||||||
|
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2,
|
||||||
|
encodeStateVector: Y.encodeStateVector,
|
||||||
|
updateEventName: 'updateV2',
|
||||||
|
description: 'V2',
|
||||||
|
diffUpdate: Y.diffUpdateV2
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {Enc}
|
||||||
|
*/
|
||||||
|
const encDoc = {
|
||||||
|
mergeUpdates: (updates) => {
|
||||||
|
const ydoc = new Y.Doc({ gc: false })
|
||||||
|
updates.forEach(update => {
|
||||||
|
Y.applyUpdateV2(ydoc, update)
|
||||||
|
})
|
||||||
|
return Y.encodeStateAsUpdateV2(ydoc)
|
||||||
|
},
|
||||||
|
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
||||||
|
applyUpdate: Y.applyUpdateV2,
|
||||||
|
logUpdate: Y.logUpdateV2,
|
||||||
|
parseUpdateMeta: Y.parseUpdateMetaV2,
|
||||||
|
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2,
|
||||||
|
encodeStateVector: Y.encodeStateVector,
|
||||||
|
updateEventName: 'updateV2',
|
||||||
|
description: 'Merge via Y.Doc',
|
||||||
|
/**
|
||||||
|
* @param {Uint8Array} update
|
||||||
|
* @param {Uint8Array} sv
|
||||||
|
*/
|
||||||
|
diffUpdate: (update, sv) => {
|
||||||
|
const ydoc = new Y.Doc({ gc: false })
|
||||||
|
Y.applyUpdateV2(ydoc, update)
|
||||||
|
return Y.encodeStateAsUpdateV2(ydoc, sv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const encoders = [encV1, encV2, encDoc]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Array<Y.Doc>} users
|
||||||
|
* @param {Enc} enc
|
||||||
|
*/
|
||||||
|
const fromUpdates = (users, enc) => {
|
||||||
|
const updates = users.map(user =>
|
||||||
|
enc.encodeStateAsUpdate(user)
|
||||||
|
)
|
||||||
|
const ydoc = new Y.Doc()
|
||||||
|
enc.applyUpdate(ydoc, enc.mergeUpdates(updates))
|
||||||
|
return ydoc
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {t.TestCase} tc
|
||||||
|
*/
|
||||||
|
export const testMergeUpdates = tc => {
|
||||||
|
const { users, array0, array1 } = init(tc, { users: 3 })
|
||||||
|
|
||||||
|
array0.insert(0, [1])
|
||||||
|
array1.insert(0, [2])
|
||||||
|
|
||||||
|
compare(users)
|
||||||
|
encoders.forEach(enc => {
|
||||||
|
const merged = fromUpdates(users, enc)
|
||||||
|
t.compareArrays(array0.toArray(), merged.getArray('array').toArray())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Y.Doc} ydoc
|
||||||
|
* @param {Array<Uint8Array>} updates - expecting at least 4 updates
|
||||||
|
* @param {Enc} enc
|
||||||
|
* @param {boolean} hasDeletes
|
||||||
|
*/
|
||||||
|
const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => {
|
||||||
|
const cases = []
|
||||||
|
|
||||||
|
// Case 1: Simple case, simply merge everything
|
||||||
|
cases.push(enc.mergeUpdates(updates))
|
||||||
|
|
||||||
|
// Case 2: Overlapping updates
|
||||||
|
cases.push(enc.mergeUpdates([
|
||||||
|
enc.mergeUpdates(updates.slice(2)),
|
||||||
|
enc.mergeUpdates(updates.slice(0, 2))
|
||||||
|
]))
|
||||||
|
|
||||||
|
// Case 3: Overlapping updates
|
||||||
|
cases.push(enc.mergeUpdates([
|
||||||
|
enc.mergeUpdates(updates.slice(2)),
|
||||||
|
enc.mergeUpdates(updates.slice(1, 3)),
|
||||||
|
updates[0]
|
||||||
|
]))
|
||||||
|
|
||||||
|
// Case 4: Separated updates (containing skips)
|
||||||
|
cases.push(enc.mergeUpdates([
|
||||||
|
enc.mergeUpdates([updates[0], updates[2]]),
|
||||||
|
enc.mergeUpdates([updates[1], updates[3]]),
|
||||||
|
enc.mergeUpdates(updates.slice(4))
|
||||||
|
]))
|
||||||
|
|
||||||
|
// Case 5: overlapping with many duplicates
|
||||||
|
cases.push(enc.mergeUpdates(cases))
|
||||||
|
|
||||||
|
// const targetState = enc.encodeStateAsUpdate(ydoc)
|
||||||
|
// t.info('Target State: ')
|
||||||
|
// enc.logUpdate(targetState)
|
||||||
|
|
||||||
|
cases.forEach((mergedUpdates, i) => {
|
||||||
|
// t.info('State Case $' + i + ':')
|
||||||
|
// enc.logUpdate(updates)
|
||||||
|
const merged = new Y.Doc({ gc: false })
|
||||||
|
enc.applyUpdate(merged, mergedUpdates)
|
||||||
|
t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray())
|
||||||
|
t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(mergedUpdates))
|
||||||
|
|
||||||
|
if (enc.updateEventName !== 'update') { // @todo should this also work on legacy updates?
|
||||||
|
for (let j = 1; j < updates.length; j++) {
|
||||||
|
const partMerged = enc.mergeUpdates(updates.slice(j))
|
||||||
|
const partMeta = enc.parseUpdateMeta(partMerged)
|
||||||
|
const targetSV = Y.encodeStateVectorFromUpdateV2(Y.mergeUpdatesV2(updates.slice(0, j)))
|
||||||
|
const diffed = enc.diffUpdate(mergedUpdates, targetSV)
|
||||||
|
const diffedMeta = enc.parseUpdateMeta(diffed)
|
||||||
|
const decDiffedSV = Y.decodeStateVector(enc.encodeStateVectorFromUpdate(diffed))
|
||||||
|
t.compare(partMeta, diffedMeta)
|
||||||
|
t.compare(decDiffedSV, partMeta.to)
|
||||||
|
{
|
||||||
|
// We can'd do the following
|
||||||
|
// - t.compare(diffed, mergedDeletes)
|
||||||
|
// because diffed contains the set of all deletes.
|
||||||
|
// So we add all deletes from `diffed` to `partDeletes` and compare then
|
||||||
|
const decoder = decoding.createDecoder(diffed)
|
||||||
|
const updateDecoder = new UpdateDecoderV2(decoder)
|
||||||
|
readClientsStructRefs(updateDecoder, new Y.Doc())
|
||||||
|
const ds = readDeleteSet(updateDecoder)
|
||||||
|
const updateEncoder = new UpdateEncoderV2()
|
||||||
|
encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs
|
||||||
|
writeDeleteSet(updateEncoder, ds)
|
||||||
|
const deletesUpdate = updateEncoder.toUint8Array()
|
||||||
|
const mergedDeletes = Y.mergeUpdatesV2([deletesUpdate, partMerged])
|
||||||
|
if (!hasDeletes || enc !== encDoc) {
|
||||||
|
// deletes will almost definitely lead to different encoders because of the mergeStruct feature that is present in encDoc
|
||||||
|
t.compare(diffed, mergedDeletes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = enc.parseUpdateMeta(mergedUpdates)
|
||||||
|
meta.from.forEach((clock, client) => t.assert(clock === 0))
|
||||||
|
meta.to.forEach((clock, client) => {
|
||||||
|
const structs = /** @type {Array<Y.Item>} */ (merged.store.clients.get(client))
|
||||||
|
const lastStruct = structs[structs.length - 1]
|
||||||
|
t.assert(lastStruct.id.clock + lastStruct.length === clock)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {t.TestCase} tc
|
||||||
|
*/
|
||||||
|
export const testMergeUpdates1 = tc => {
|
||||||
|
encoders.forEach((enc, i) => {
|
||||||
|
t.info(`Using encoder: ${enc.description}`)
|
||||||
|
const ydoc = new Y.Doc({ gc: false })
|
||||||
|
const updates = /** @type {Array<Uint8Array>} */ ([])
|
||||||
|
ydoc.on(enc.updateEventName, update => { updates.push(update) })
|
||||||
|
|
||||||
|
const array = ydoc.getArray()
|
||||||
|
array.insert(0, [1])
|
||||||
|
array.insert(0, [2])
|
||||||
|
array.insert(0, [3])
|
||||||
|
array.insert(0, [4])
|
||||||
|
|
||||||
|
checkUpdateCases(ydoc, updates, enc, false)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {t.TestCase} tc
|
||||||
|
*/
|
||||||
|
export const testMergeUpdates2 = tc => {
|
||||||
|
encoders.forEach((enc, i) => {
|
||||||
|
t.info(`Using encoder: ${enc.description}`)
|
||||||
|
const ydoc = new Y.Doc({ gc: false })
|
||||||
|
const updates = /** @type {Array<Uint8Array>} */ ([])
|
||||||
|
ydoc.on(enc.updateEventName, update => { updates.push(update) })
|
||||||
|
|
||||||
|
const array = ydoc.getArray()
|
||||||
|
array.insert(0, [1, 2])
|
||||||
|
array.delete(1, 1)
|
||||||
|
array.insert(0, [3, 4])
|
||||||
|
array.delete(1, 2)
|
||||||
|
|
||||||
|
checkUpdateCases(ydoc, updates, enc, true)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @todo be able to apply Skip structs to Yjs docs
|
||||||
|
*/
|
@ -64,7 +64,7 @@ export const testInsertThreeElementsTryRegetProperty = tc => {
|
|||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testConcurrentInsertWithThreeConflicts = tc => {
|
export const testConcurrentInsertWithThreeConflicts = tc => {
|
||||||
var { users, array0, array1, array2 } = init(tc, { users: 3 })
|
const { users, array0, array1, array2 } = init(tc, { users: 3 })
|
||||||
array0.insert(0, [0])
|
array0.insert(0, [0])
|
||||||
array1.insert(0, [1])
|
array1.insert(0, [1])
|
||||||
array2.insert(0, [2])
|
array2.insert(0, [2])
|
||||||
@ -107,7 +107,7 @@ export const testInsertionsInLateSync = tc => {
|
|||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testDisconnectReallyPreventsSendingMessages = tc => {
|
export const testDisconnectReallyPreventsSendingMessages = tc => {
|
||||||
var { testConnector, users, array0, array1 } = init(tc, { users: 3 })
|
const { testConnector, users, array0, array1 } = init(tc, { users: 3 })
|
||||||
array0.insert(0, ['x', 'y'])
|
array0.insert(0, ['x', 'y'])
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
users[1].disconnect()
|
users[1].disconnect()
|
||||||
@ -388,13 +388,13 @@ const getUniqueNumber = () => _uniqueNumber++
|
|||||||
const arrayTransactions = [
|
const arrayTransactions = [
|
||||||
function insert (user, gen) {
|
function insert (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
var uniqueNumber = getUniqueNumber()
|
const uniqueNumber = getUniqueNumber()
|
||||||
var content = []
|
const content = []
|
||||||
var len = prng.int32(gen, 1, 4)
|
const len = prng.int32(gen, 1, 4)
|
||||||
for (var i = 0; i < len; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
content.push(uniqueNumber)
|
content.push(uniqueNumber)
|
||||||
}
|
}
|
||||||
var pos = prng.int32(gen, 0, yarray.length)
|
const pos = prng.int32(gen, 0, yarray.length)
|
||||||
const oldContent = yarray.toArray()
|
const oldContent = yarray.toArray()
|
||||||
yarray.insert(pos, content)
|
yarray.insert(pos, content)
|
||||||
oldContent.splice(pos, 0, ...content)
|
oldContent.splice(pos, 0, ...content)
|
||||||
@ -402,28 +402,28 @@ const arrayTransactions = [
|
|||||||
},
|
},
|
||||||
function insertTypeArray (user, gen) {
|
function insertTypeArray (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
var pos = prng.int32(gen, 0, yarray.length)
|
const pos = prng.int32(gen, 0, yarray.length)
|
||||||
yarray.insert(pos, [new Y.Array()])
|
yarray.insert(pos, [new Y.Array()])
|
||||||
var array2 = yarray.get(pos)
|
const array2 = yarray.get(pos)
|
||||||
array2.insert(0, [1, 2, 3, 4])
|
array2.insert(0, [1, 2, 3, 4])
|
||||||
},
|
},
|
||||||
function insertTypeMap (user, gen) {
|
function insertTypeMap (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
var pos = prng.int32(gen, 0, yarray.length)
|
const pos = prng.int32(gen, 0, yarray.length)
|
||||||
yarray.insert(pos, [new Y.Map()])
|
yarray.insert(pos, [new Y.Map()])
|
||||||
var map = yarray.get(pos)
|
const map = yarray.get(pos)
|
||||||
map.set('someprop', 42)
|
map.set('someprop', 42)
|
||||||
map.set('someprop', 43)
|
map.set('someprop', 43)
|
||||||
map.set('someprop', 44)
|
map.set('someprop', 44)
|
||||||
},
|
},
|
||||||
function _delete (user, gen) {
|
function _delete (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
var length = yarray.length
|
const length = yarray.length
|
||||||
if (length > 0) {
|
if (length > 0) {
|
||||||
var somePos = prng.int32(gen, 0, length - 1)
|
let somePos = prng.int32(gen, 0, length - 1)
|
||||||
var delLength = prng.int32(gen, 1, math.min(2, length - somePos))
|
let delLength = prng.int32(gen, 1, math.min(2, length - somePos))
|
||||||
if (prng.bool(gen)) {
|
if (prng.bool(gen)) {
|
||||||
var type = yarray.get(somePos)
|
const type = yarray.get(somePos)
|
||||||
if (type.length > 0) {
|
if (type.length > 0) {
|
||||||
somePos = prng.int32(gen, 0, type.length - 1)
|
somePos = prng.int32(gen, 0, type.length - 1)
|
||||||
delLength = prng.int32(gen, 0, math.min(2, type.length - somePos))
|
delLength = prng.int32(gen, 0, math.min(2, type.length - somePos))
|
||||||
|
@ -138,7 +138,7 @@ export const testGetAndSetOfMapPropertySyncs = tc => {
|
|||||||
t.compare(map0.get('stuff'), 'stuffy')
|
t.compare(map0.get('stuff'), 'stuffy')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
var u = user.getMap('map')
|
const u = user.getMap('map')
|
||||||
t.compare(u.get('stuff'), 'stuffy')
|
t.compare(u.get('stuff'), 'stuffy')
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@ -153,7 +153,7 @@ export const testGetAndSetOfMapPropertyWithConflict = tc => {
|
|||||||
map1.set('stuff', 'c1')
|
map1.set('stuff', 'c1')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
var u = user.getMap('map')
|
const u = user.getMap('map')
|
||||||
t.compare(u.get('stuff'), 'c1')
|
t.compare(u.get('stuff'), 'c1')
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@ -183,7 +183,7 @@ export const testGetAndSetAndDeleteOfMapProperty = tc => {
|
|||||||
map1.delete('stuff')
|
map1.delete('stuff')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
var u = user.getMap('map')
|
const u = user.getMap('map')
|
||||||
t.assert(u.get('stuff') === undefined)
|
t.assert(u.get('stuff') === undefined)
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@ -200,7 +200,7 @@ export const testGetAndSetOfMapPropertyWithThreeConflicts = tc => {
|
|||||||
map2.set('stuff', 'c3')
|
map2.set('stuff', 'c3')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
var u = user.getMap('map')
|
const u = user.getMap('map')
|
||||||
t.compare(u.get('stuff'), 'c3')
|
t.compare(u.get('stuff'), 'c3')
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@ -223,7 +223,7 @@ export const testGetAndSetAndDeleteOfMapPropertyWithThreeConflicts = tc => {
|
|||||||
map3.delete('stuff')
|
map3.delete('stuff')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
var u = user.getMap('map')
|
const u = user.getMap('map')
|
||||||
t.assert(u.get('stuff') === undefined)
|
t.assert(u.get('stuff') === undefined)
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@ -296,7 +296,7 @@ export const testObserversUsingObservedeep = tc => {
|
|||||||
* @param {Object<string,any>} should
|
* @param {Object<string,any>} should
|
||||||
*/
|
*/
|
||||||
const compareEvent = (is, should) => {
|
const compareEvent = (is, should) => {
|
||||||
for (var key in should) {
|
for (const key in should) {
|
||||||
t.compare(should[key], is[key])
|
t.compare(should[key], is[key])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -474,12 +474,12 @@ export const testYmapEventHasCorrectValueWhenSettingAPrimitiveFromOtherUser = tc
|
|||||||
const mapTransactions = [
|
const mapTransactions = [
|
||||||
function set (user, gen) {
|
function set (user, gen) {
|
||||||
const key = prng.oneOf(gen, ['one', 'two'])
|
const key = prng.oneOf(gen, ['one', 'two'])
|
||||||
var value = prng.utf16String(gen)
|
const value = prng.utf16String(gen)
|
||||||
user.getMap('map').set(key, value)
|
user.getMap('map').set(key, value)
|
||||||
},
|
},
|
||||||
function setType (user, gen) {
|
function setType (user, gen) {
|
||||||
const key = prng.oneOf(gen, ['one', 'two'])
|
const key = prng.oneOf(gen, ['one', 'two'])
|
||||||
var type = prng.oneOf(gen, [new Y.Array(), new Y.Map()])
|
const type = prng.oneOf(gen, [new Y.Array(), new Y.Map()])
|
||||||
user.getMap('map').set(key, type)
|
user.getMap('map').set(key, type)
|
||||||
if (type instanceof Y.Array) {
|
if (type instanceof Y.Array) {
|
||||||
type.insert(0, [1, 2, 3, 4])
|
type.insert(0, [1, 2, 3, 4])
|
||||||
|
Loading…
x
Reference in New Issue
Block a user