read struct refs as array

This commit is contained in:
Kevin Jahns 2019-04-10 18:52:22 +02:00
parent 52ec698635
commit 654510f3ff
8 changed files with 161 additions and 177 deletions

View File

@ -22,7 +22,6 @@ export {
ID, ID,
createID, createID,
compareIDs, compareIDs,
writeStructs,
writeStructsFromTransaction, writeStructsFromTransaction,
readStructs, readStructs,
getState, getState,

View File

@ -131,22 +131,6 @@ export class AbstractItem extends AbstractStruct {
const parent = this.parent const parent = this.parent
const parentSub = this.parentSub const parentSub = this.parentSub
const length = this.length const length = this.length
/*
# $this has to find a unique position between origin and the next known character
# case 1: $origin equals $o.origin: the $creator parameter decides if left or right
# let $OL= [o1,o2,o3,o4], whereby $this is to be inserted between o1 and o4
# o2,o3 and o4 origin is 1 (the position of o2)
# there is the case that $this.creator < o2.creator, but o3.creator < $this.creator
# then o2 knows o3. Since on another client $OL could be [o1,o3,o4] the problem is complex
# therefore $this would be always to the right of o3
# case 2: $origin < $o.origin
# if current $this insert_position > $o origin: $this ins
# else $insert_position will not change
# (maybe we encounter case 1 later, then this will be to the right of $o)
# case 3: $origin > $o.origin
# $this insert_position is to the left of $o (forever!)
*/
// handle conflicts
/** /**
* @type {AbstractItem|null} * @type {AbstractItem|null}
*/ */

View File

@ -173,7 +173,7 @@ export class YXmlFragment extends AbstractType {
* Returns all YXmlElements that match the query. * Returns all YXmlElements that match the query.
* Similar to Dom's {@link querySelectorAll}. * Similar to Dom's {@link querySelectorAll}.
* *
* TODO: Does not yet support all queries. Currently only query by tagName. * @todo Does not yet support all queries. Currently only query by tagName.
* *
* @param {CSS_Selector} query The query on the children * @param {CSS_Selector} query The query on the children
* @return {Array<YXmlElement|YXmlText|YXmlHook|null>} The elements that match this query. * @return {Array<YXmlElement|YXmlText|YXmlHook|null>} The elements that match this query.

View File

@ -1,3 +1,4 @@
// todo rename AbstractRef to abstractStructRef
import { import {
GC, GC,
@ -16,11 +17,20 @@ export class StructStore {
*/ */
this.clients = new Map() this.clients = new Map()
/** /**
* Store uncompleted struct readers here * Store incompleted struct reads here
* @see tryResumePendingReaders * `i` denotes to the next read operation
* @type {Set<{stack:Array<AbstractRef>,structReaders:Map<number,IterableIterator<AbstractRef>>,missing:ID,structReaderIterator:IterableIterator<IterableIterator<AbstractRef>>,structReaderIteratorResult:IteratorResult<IterableIterator<AbstractRef>>}>} * We could shift the array of refs instead, but shift is incredible
* slow in Chrome for arrays with more than 100k elements
* @see tryResumePendingStructRefs
* @type {Map<number,{i:number,refs:Array<AbstractRef>}>}
*/ */
this.pendingStructReaders = new Set() this.pendingClientsStructRefs = new Map()
/**
* Stack of pending structs waiting for struct dependencies
* Maximum length of stack is structReaders.size
* @type {Array<AbstractRef>}
*/
this.pendingStack = []
/** /**
* @type {Array<decoding.Decoder>} * @type {Array<decoding.Decoder>}
*/ */

View File

@ -115,6 +115,8 @@ export const nextID = transaction => {
} }
/** /**
* Implements the functionality of `y.transact(()=>{..})`
*
* @param {Y} y * @param {Y} y
* @param {function(Transaction):void} f * @param {function(Transaction):void} f
*/ */

View File

@ -53,8 +53,6 @@ export class Y extends Observable {
* other peers. * other peers.
* *
* @param {function(Transaction):void} f The function that should be executed as a transaction * @param {function(Transaction):void} f The function that should be executed as a transaction
*
* @todo separate this into a separate function
*/ */
transact (f) { transact (f) {
transact(this, f) transact(this, f)

View File

@ -23,9 +23,7 @@ import {
import * as encoding from 'lib0/encoding.js' import * as encoding from 'lib0/encoding.js'
import * as decoding from 'lib0/decoding.js' import * as decoding from 'lib0/decoding.js'
import * as map from 'lib0/map.js'
import * as binary from 'lib0/binary.js' import * as binary from 'lib0/binary.js'
import * as iterator from 'lib0/iterator.js'
/** /**
* @typedef {Map<number, number>} StateMap * @typedef {Map<number, number>} StateMap
@ -43,48 +41,53 @@ export const structRefs = [
] ]
/** /**
* @param {decoding.Decoder} decoder * @param {encoding.Encoder} encoder
* @param {number} structsLen * @param {Array<AbstractStruct>} structs All structs by `client`
* @param {ID} nextID * @param {number} client
* @param {number} localState next expected clock by nextID.client * @param {number} clock write structs starting with `ID(client,clock)`
* @return {IterableIterator<AbstractRef>}
*/ */
const createStructReaderIterator = (decoder, structsLen, nextID, localState) => iterator.createIterator(() => { const writeStructs = (encoder, structs, client, clock) => {
let done = false // write first id
let value const startNewStructs = findIndexSS(structs, clock)
do { // write # encoded structs
if (structsLen === 0) { encoding.writeVarUint(encoder, structs.length - startNewStructs)
done = true writeID(encoder, createID(client, clock))
value = undefined const firstStruct = structs[startNewStructs]
break // write first struct with an offset
} firstStruct.write(encoder, clock - firstStruct.id.clock, 0)
const info = decoding.readUint8(decoder) for (let i = startNewStructs + 1; i < structs.length; i++) {
value = new structRefs[binary.BITS5 & info](decoder, nextID, info) structs[i].write(encoder, 0, 0)
nextID = createID(nextID.client, nextID.clock + value.length) }
structsLen-- }
} while (nextID.clock <= localState) // read until we find something new (check nextID.clock instead because it equals `clock+len`)
return { done, value }
})
/** /**
* @param {encoding.Encoder} encoder * @param {decoding.Decoder} decoder
* @param {Transaction} transaction * @param {number} numOfStructs
* @param {ID} nextID
* @return {Array<AbstractRef>}
*/ */
export const writeStructsFromTransaction = (encoder, transaction) => writeStructs(encoder, transaction.y.store, transaction.beforeState) const readStructRefs = (decoder, numOfStructs, nextID) => {
/**
* @type {Array<AbstractRef>}
*/
const refs = []
for (let i = 0; i < numOfStructs; i++) {
const info = decoding.readUint8(decoder)
const ref = new structRefs[binary.BITS5 & info](decoder, nextID, info)
nextID = createID(nextID.client, nextID.clock + ref.length)
refs.push(ref)
}
return refs
}
/** /**
* @param {encoding.Encoder} encoder * @param {encoding.Encoder} encoder
* @param {StructStore} store * @param {StructStore} store
* @param {StateMap} _sm * @param {StateMap} _sm
*/ */
export const writeStructs = (encoder, store, _sm) => { export const writeClientsStructs = (encoder, store, _sm) => {
// we filter all valid _sm entries into sm // we filter all valid _sm entries into sm
const sm = new Map() const sm = new Map()
const encoderUserPosMap = map.create()
const startMessagePos = encoding.length(encoder)
// write diff to pos of end of this message
// we use it in readStructs to jump ahead to the end of the message
encoding.writeUint32(encoder, 0)
_sm.forEach((clock, client) => { _sm.forEach((clock, client) => {
// only write if new structs are available // only write if new structs are available
if (getState(store, client) > clock) { if (getState(store, client) > clock) {
@ -99,59 +102,28 @@ export const writeStructs = (encoder, store, _sm) => {
// write # states that were updated // write # states that were updated
encoding.writeVarUint(encoder, sm.size) encoding.writeVarUint(encoder, sm.size)
sm.forEach((clock, client) => { sm.forEach((clock, client) => {
// write first id
writeID(encoder, createID(client, clock))
encoderUserPosMap.set(client, encoding.length(encoder))
// write diff to pos where structs are written
encoding.writeUint32(encoder, 0)
})
sm.forEach((clock, client) => {
const decPos = encoderUserPosMap.get(client)
// fill out diff to pos where structs are written
encoding.setUint32(encoder, decPos, encoding.length(encoder) - decPos)
/**
* @type {Array<AbstractStruct>}
*/
// @ts-ignore // @ts-ignore
const structs = store.clients.get(client) writeStructs(encoder, store.clients.get(client), client, clock)
const startNewStructs = findIndexSS(structs, clock)
// write # encoded structs
encoding.writeVarUint(encoder, structs.length - startNewStructs)
const firstStruct = structs[startNewStructs]
// write first struct with an offset (may be 0)
firstStruct.write(encoder, clock - firstStruct.id.clock, 0)
for (let i = startNewStructs + 1; i < structs.length; i++) {
structs[i].write(encoder, 0, 0)
}
}) })
// fill out diff to pos of end of message
encoding.setUint32(encoder, startMessagePos, encoding.length(encoder) - startMessagePos)
} }
/** /**
* @param {decoding.Decoder} decoder The decoder object to read data from. * @param {decoding.Decoder} decoder The decoder object to read data from.
* @param {Map<number,number>} localState * @return {Map<number,Array<AbstractRef>>}
* @return {Map<number,IterableIterator<AbstractRef>>}
*/ */
const readStructReaders = (decoder, localState) => { export const readClientsStructRefs = decoder => {
/** /**
* @type {Map<number,IterableIterator<AbstractRef>>} * @type {Map<number,Array<AbstractRef>>}
*/ */
const structReaders = new Map() const clientRefs = new Map()
const endOfMessagePos = decoder.pos + decoding.readUint32(decoder) const numOfStateUpdates = decoding.readVarUint(decoder)
const clientbeforeState = decoding.readVarUint(decoder) for (let i = 0; i < numOfStateUpdates; i++) {
for (let i = 0; i < clientbeforeState; i++) { const numberOfStructs = decoding.readVarUint(decoder)
const nextID = readID(decoder) const nextID = readID(decoder)
const decoderPos = decoder.pos + decoding.readUint32(decoder) const refs = readStructRefs(decoder, numberOfStructs, nextID)
const structReaderDecoder = decoding.clone(decoder, decoderPos) clientRefs.set(nextID.client, refs)
const numberOfStructs = decoding.readVarUint(structReaderDecoder)
structReaders.set(nextID.client, createStructReaderIterator(structReaderDecoder, numberOfStructs, nextID, localState.get(nextID.client) || 0))
} }
// Decoder is still stuck at creating struct readers. return clientRefs
// Jump ahead to end of message so that reading can continue.
// We will use the created struct readers for the remaining part of this workflow.
decoder.pos = endOfMessagePos
return structReaders
} }
/** /**
@ -175,86 +147,69 @@ const readStructReaders = (decoder, localState) => {
* *
* @param {Transaction} transaction * @param {Transaction} transaction
* @param {StructStore} store * @param {StructStore} store
* @param {Map<number,number>} localState
* @param {Map<number,IterableIterator<AbstractRef>>} structReaders
* @param {Array<AbstractRef>} stack Stack of pending structs waiting for struct dependencies.
* Maximum length of stack is structReaders.size.
* @param {IterableIterator<IterableIterator<AbstractRef>>} structReaderIterator
* @param {IteratorResult<IterableIterator<AbstractRef>>} structReaderIteratorResult
* *
* @todo reimplement without iterators - read everything in arrays instead * @todo reimplement without iterators - read everything in arrays instead
*/ */
const execStructReaders = (transaction, store, localState, structReaders, stack, structReaderIterator, structReaderIteratorResult) => { const resumeStructIntegration = (transaction, store) => {
const stack = store.pendingStack
const clientsStructRefs = store.pendingClientsStructRefs
// iterate over all struct readers until we are done // iterate over all struct readers until we are done
while (stack.length !== 0 || !structReaderIteratorResult.done) { while (stack.length !== 0 || clientsStructRefs.size !== 0) {
if (stack.length === 0) { if (stack.length === 0) {
// stack is empty. We know that there there are more structReaders to be processed // take any first struct from clientsStructRefs and put it on the stack
const nextStructRes = structReaderIteratorResult.value.next() const [client, structRefs] = clientsStructRefs.entries().next().value
if (nextStructRes.done) { stack.push(structRefs.refs[structRefs.i++])
// current structReaderIteratorResult is empty, use next one if (structRefs.refs.length === structRefs.i) {
structReaderIteratorResult = structReaderIterator.next() clientsStructRefs.delete(client)
} else {
stack.push(nextStructRes.value)
}
} else {
const ref = stack[stack.length - 1]
const m = ref._missing
while (m.length > 0) {
const missing = m[m.length - 1]
if (!exists(store, missing)) {
// get the struct reader that has the missing struct
const reader = structReaders.get(missing.client)
const nextRef = reader === undefined ? undefined : reader.next().value
if (nextRef === undefined) {
// This update message causally depends on another update message.
// Store current stack and readers in StructStore and resume the computation at another time
store.pendingStructReaders.add({ stack, structReaders, missing, structReaderIterator, structReaderIteratorResult })
return
}
stack.push(nextRef)
break
}
ref._missing.pop()
}
if (m.length === 0) {
const localClock = (localState.get(ref.id.client) || 0)
const offset = ref.id.clock < localClock ? localClock - ref.id.clock : 0
if (offset < ref.length) {
if (ref.id.clock + offset !== localClock) {
// A previous message from this client is missing
// Store current stack and readers in StructStore and resume the computation at another time
store.pendingStructReaders.add({ stack, structReaders, missing: createID(ref.id.client, localClock), structReaderIterator, structReaderIteratorResult })
return
}
ref.toStruct(transaction.y, store, offset).integrate(transaction)
}
stack.pop()
} }
} }
} const ref = stack[stack.length - 1]
if (stack.length > 0) { const m = ref._missing
store.pendingStructReaders.add({ stack, structReaders, missing: stack[stack.length - 1].id, structReaderIterator, structReaderIteratorResult }) const client = ref.id.client
} const localClock = getState(store, client)
} const offset = ref.id.clock < localClock ? localClock - ref.id.clock : 0
if (ref.id.clock + offset !== localClock) {
/** // A previous message from this client is missing
* Try to resume pending struct readers in `store.pendingReaders` while `pendingReaders.nextMissing` // check if there is a pending structRef with a smaller clock and switch them
* exists. const structRefs = clientsStructRefs.get(client)
* if (structRefs !== undefined) {
* @param {Transaction} transaction const r = structRefs.refs[structRefs.i]
* @param {StructStore} store if (r.id.clock < ref.id.clock) {
*/ // put ref with smaller clock on stack instead and continue
const tryResumePendingStructReaders = (transaction, store) => { structRefs.refs[structRefs.i] = ref
let resume = true stack[stack.length - 1] = r
const pendingReaders = store.pendingStructReaders // sort the set because this approach might bring the list out of order
while (resume) { structRefs.refs = structRefs.refs.slice(structRefs.i).sort((r1, r2) => r1.id.client - r2.id.client)
resume = false structRefs.i = 0
for (const pendingReader of pendingReaders) { continue
if (exists(store, pendingReader.missing)) { }
resume = true // found at least one more reader to execute
pendingReaders.delete(pendingReader)
execStructReaders(transaction, store, getStates(store), pendingReader.structReaders, pendingReader.stack, pendingReader.structReaderIterator, pendingReader.structReaderIteratorResult)
} }
// wait until missing struct is available
return
}
while (m.length > 0) {
const missing = m[m.length - 1]
if (!exists(store, missing)) {
const client = missing.client
// get the struct reader that has the missing struct
const structRefs = clientsStructRefs.get(client)
if (structRefs === undefined) {
// This update message causally depends on another update message.
return
}
stack.push(structRefs.refs[structRefs.i++])
if (structRefs.i === structRefs.refs.length) {
clientsStructRefs.delete(client)
}
break
}
ref._missing.pop()
}
if (m.length === 0) {
if (offset < ref.length) {
ref.toStruct(transaction.y, store, offset).integrate(transaction)
}
stack.pop()
} }
} }
} }
@ -271,6 +226,43 @@ export const tryResumePendingDeleteReaders = (transaction, store) => {
} }
} }
/**
* @param {Map<number,{refs:Array<AbstractRef>,i:number}>} pendingClientsStructRefs
* @param {number} client
* @param {Array<AbstractRef>} refs
*/
const setPendingClientsStructRefs = (pendingClientsStructRefs, client, refs) => {
pendingClientsStructRefs.set(client, { refs, i: 0 })
}
/**
* @param {encoding.Encoder} encoder
* @param {Transaction} transaction
*/
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.y.store, transaction.beforeState)
/**
* @param {StructStore} store
* @param {Map<number, Array<AbstractRef>>} clientsStructsRefs
*/
const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
const pendingClientsStructRefs = store.pendingClientsStructRefs
for (const [client, structRefs] of clientsStructsRefs) {
const pendingStructRefs = pendingClientsStructRefs.get(client)
if (pendingStructRefs === undefined) {
setPendingClientsStructRefs(pendingClientsStructRefs, client, structRefs)
} else {
// merge into existing structRefs
const merged = pendingStructRefs.i > 0 ? pendingStructRefs.refs.slice(pendingStructRefs.i) : pendingStructRefs.refs
for (let i = 0; i < structRefs.length; i++) {
merged.push(structRefs[i])
}
pendingStructRefs.i = 0
pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock)
}
}
}
/** /**
* Read the next Item in a Decoder and fill this Item with the read data. * Read the next Item in a Decoder and fill this Item with the read data.
* *
@ -283,11 +275,9 @@ export const tryResumePendingDeleteReaders = (transaction, store) => {
* @private * @private
*/ */
export const readStructs = (decoder, transaction, store) => { export const readStructs = (decoder, transaction, store) => {
const localState = getStates(store) const clientsStructRefs = readClientsStructRefs(decoder)
const readers = readStructReaders(decoder, localState) mergeReadStructsIntoPendingReads(store, clientsStructRefs)
const structReaderIterator = readers.values() resumeStructIntegration(transaction, store)
execStructReaders(transaction, store, localState, readers, [], structReaderIterator, structReaderIterator.next())
tryResumePendingStructReaders(transaction, store)
tryResumePendingDeleteReaders(transaction, store) tryResumePendingDeleteReaders(transaction, store)
} }
@ -307,6 +297,6 @@ export const readModel = (decoder, transaction, store) => {
* @param {Map<number,number>} [targetState] The state of the target that receives the update. Leave empty to write all known structs * @param {Map<number,number>} [targetState] The state of the target that receives the update. Leave empty to write all known structs
*/ */
export const writeModel = (encoder, store, targetState = new Map()) => { export const writeModel = (encoder, store, targetState = new Map()) => {
writeStructs(encoder, store, targetState) writeClientsStructs(encoder, store, targetState)
writeDeleteSet(encoder, createDeleteSetFromStructStore(store)) writeDeleteSet(encoder, createDeleteSetFromStructStore(store))
} }

View File

@ -276,7 +276,8 @@ export const compare = users => {
const userTextValues = users.map(u => u.getText('text').toDelta()) const userTextValues = users.map(u => u.getText('text').toDelta())
for (const u of users) { for (const u of users) {
t.assert(u.store.pendingDeleteReaders.length === 0) t.assert(u.store.pendingDeleteReaders.length === 0)
t.assert(u.store.pendingStructReaders.size === 0) t.assert(u.store.pendingStack.length === 0)
t.assert(u.store.pendingClientsStructRefs.size === 0)
} }
for (let i = 0; i < users.length - 1; i++) { for (let i = 0; i < users.length - 1; i++) {
t.compare(userArrayValues[i].length, users[i].getArray('array').length) t.compare(userArrayValues[i].length, users[i].getArray('array').length)