Merge 29bcb2683ef782c5e74810b335456c9e33e4948c into 00ef472d68545cb260abd35c2de4b3b78719c9e4
This commit is contained in:
commit
331a719d8f
@ -239,10 +239,10 @@ export const tryGc = (ds, store, gcFilter) => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Array<Transaction>} transactionCleanups
|
* @param {Array<Transaction>} transactionCleanups
|
||||||
* @param {number} i
|
|
||||||
*/
|
*/
|
||||||
const cleanupTransactions = (transactionCleanups, i) => {
|
const cleanupTransactions = (transactionCleanups) => {
|
||||||
if (i < transactionCleanups.length) {
|
let lastError = null;
|
||||||
|
for (let i = 0; i < transactionCleanups.length; i++) {
|
||||||
const transaction = transactionCleanups[i]
|
const transaction = transactionCleanups[i]
|
||||||
const doc = transaction.doc
|
const doc = transaction.doc
|
||||||
const store = doc.store
|
const store = doc.store
|
||||||
@ -295,81 +295,83 @@ const cleanupTransactions = (transactionCleanups, i) => {
|
|||||||
fs.push(() => doc.emit('afterTransaction', [transaction, doc]))
|
fs.push(() => doc.emit('afterTransaction', [transaction, doc]))
|
||||||
})
|
})
|
||||||
callAll(fs, [])
|
callAll(fs, [])
|
||||||
} finally {
|
} catch (e) {
|
||||||
// Replace deleted items with ItemDeleted / GC.
|
lastError = e
|
||||||
// This is where content is actually remove from the Yjs Doc.
|
}
|
||||||
if (doc.gc) {
|
// Replace deleted items with ItemDeleted / GC.
|
||||||
tryGcDeleteSet(ds, store, doc.gcFilter)
|
// This is where content is actually remove from the Yjs Doc.
|
||||||
}
|
if (doc.gc) {
|
||||||
tryMergeDeleteSet(ds, store)
|
tryGcDeleteSet(ds, store, doc.gcFilter)
|
||||||
|
}
|
||||||
|
tryMergeDeleteSet(ds, store)
|
||||||
|
|
||||||
// on all affected store.clients props, try to merge
|
// on all affected store.clients props, try to merge
|
||||||
transaction.afterState.forEach((clock, client) => {
|
transaction.afterState.forEach((clock, client) => {
|
||||||
const beforeClock = transaction.beforeState.get(client) || 0
|
const beforeClock = transaction.beforeState.get(client) || 0
|
||||||
if (beforeClock !== clock) {
|
if (beforeClock !== clock) {
|
||||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
|
||||||
// we iterate from right to left so we can safely remove entries
|
|
||||||
const firstChangePos = math.max(findIndexSS(structs, beforeClock), 1)
|
|
||||||
for (let i = structs.length - 1; i >= firstChangePos; i--) {
|
|
||||||
tryToMergeWithLeft(structs, i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
// try to merge mergeStructs
|
|
||||||
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
|
|
||||||
// but at the moment DS does not handle duplicates
|
|
||||||
for (let i = 0; i < mergeStructs.length; i++) {
|
|
||||||
const { client, clock } = mergeStructs[i].id
|
|
||||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
const replacedStructPos = findIndexSS(structs, clock)
|
// we iterate from right to left so we can safely remove entries
|
||||||
if (replacedStructPos + 1 < structs.length) {
|
const firstChangePos = math.max(findIndexSS(structs, beforeClock), 1)
|
||||||
tryToMergeWithLeft(structs, replacedStructPos + 1)
|
for (let i = structs.length - 1; i >= firstChangePos; i--) {
|
||||||
}
|
tryToMergeWithLeft(structs, i)
|
||||||
if (replacedStructPos > 0) {
|
|
||||||
tryToMergeWithLeft(structs, replacedStructPos)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!transaction.local && transaction.afterState.get(doc.clientID) !== transaction.beforeState.get(doc.clientID)) {
|
})
|
||||||
logging.print(logging.ORANGE, logging.BOLD, '[yjs] ', logging.UNBOLD, logging.RED, 'Changed the client-id because another client seems to be using it.')
|
// try to merge mergeStructs
|
||||||
doc.clientID = generateNewClientId()
|
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
|
||||||
|
// but at the moment DS does not handle duplicates
|
||||||
|
for (let i = 0; i < mergeStructs.length; i++) {
|
||||||
|
const { client, clock } = mergeStructs[i].id
|
||||||
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
|
const replacedStructPos = findIndexSS(structs, clock)
|
||||||
|
if (replacedStructPos + 1 < structs.length) {
|
||||||
|
tryToMergeWithLeft(structs, replacedStructPos + 1)
|
||||||
}
|
}
|
||||||
// @todo Merge all the transactions into one and provide send the data as a single update message
|
if (replacedStructPos > 0) {
|
||||||
doc.emit('afterTransactionCleanup', [transaction, doc])
|
tryToMergeWithLeft(structs, replacedStructPos)
|
||||||
if (doc._observers.has('update')) {
|
|
||||||
const encoder = new UpdateEncoderV1()
|
|
||||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
|
||||||
if (hasContent) {
|
|
||||||
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (doc._observers.has('updateV2')) {
|
|
||||||
const encoder = new UpdateEncoderV2()
|
|
||||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
|
||||||
if (hasContent) {
|
|
||||||
doc.emit('updateV2', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const { subdocsAdded, subdocsLoaded, subdocsRemoved } = transaction
|
|
||||||
if (subdocsAdded.size > 0 || subdocsRemoved.size > 0 || subdocsLoaded.size > 0) {
|
|
||||||
subdocsAdded.forEach(subdoc => {
|
|
||||||
subdoc.clientID = doc.clientID
|
|
||||||
if (subdoc.collectionid == null) {
|
|
||||||
subdoc.collectionid = doc.collectionid
|
|
||||||
}
|
|
||||||
doc.subdocs.add(subdoc)
|
|
||||||
})
|
|
||||||
subdocsRemoved.forEach(subdoc => doc.subdocs.delete(subdoc))
|
|
||||||
doc.emit('subdocs', [{ loaded: subdocsLoaded, added: subdocsAdded, removed: subdocsRemoved }, doc, transaction])
|
|
||||||
subdocsRemoved.forEach(subdoc => subdoc.destroy())
|
|
||||||
}
|
|
||||||
|
|
||||||
if (transactionCleanups.length <= i + 1) {
|
|
||||||
doc._transactionCleanups = []
|
|
||||||
doc.emit('afterAllTransactions', [doc, transactionCleanups])
|
|
||||||
} else {
|
|
||||||
cleanupTransactions(transactionCleanups, i + 1)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!transaction.local && transaction.afterState.get(doc.clientID) !== transaction.beforeState.get(doc.clientID)) {
|
||||||
|
logging.print(logging.ORANGE, logging.BOLD, '[yjs] ', logging.UNBOLD, logging.RED, 'Changed the client-id because another client seems to be using it.')
|
||||||
|
doc.clientID = generateNewClientId()
|
||||||
|
}
|
||||||
|
// @todo Merge all the transactions into one and provide send the data as a single update message
|
||||||
|
doc.emit('afterTransactionCleanup', [transaction, doc])
|
||||||
|
if (doc._observers.has('update')) {
|
||||||
|
const encoder = new UpdateEncoderV1()
|
||||||
|
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
||||||
|
if (hasContent) {
|
||||||
|
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (doc._observers.has('updateV2')) {
|
||||||
|
const encoder = new UpdateEncoderV2()
|
||||||
|
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
||||||
|
if (hasContent) {
|
||||||
|
doc.emit('updateV2', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const { subdocsAdded, subdocsLoaded, subdocsRemoved } = transaction
|
||||||
|
if (subdocsAdded.size > 0 || subdocsRemoved.size > 0 || subdocsLoaded.size > 0) {
|
||||||
|
subdocsAdded.forEach(subdoc => {
|
||||||
|
subdoc.clientID = doc.clientID
|
||||||
|
if (subdoc.collectionid == null) {
|
||||||
|
subdoc.collectionid = doc.collectionid
|
||||||
|
}
|
||||||
|
doc.subdocs.add(subdoc)
|
||||||
|
})
|
||||||
|
subdocsRemoved.forEach(subdoc => doc.subdocs.delete(subdoc))
|
||||||
|
doc.emit('subdocs', [{ loaded: subdocsLoaded, added: subdocsAdded, removed: subdocsRemoved }, doc, transaction])
|
||||||
|
subdocsRemoved.forEach(subdoc => subdoc.destroy())
|
||||||
|
}
|
||||||
|
|
||||||
|
if (transactionCleanups.length <= i + 1) {
|
||||||
|
doc._transactionCleanups = []
|
||||||
|
doc.emit('afterAllTransactions', [doc, transactionCleanups])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (lastError) {
|
||||||
|
throw lastError
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -415,7 +417,7 @@ export const transact = (doc, f, origin = null, local = true) => {
|
|||||||
// observes throw errors.
|
// observes throw errors.
|
||||||
// This file is full of hacky try {} finally {} blocks to ensure that an
|
// This file is full of hacky try {} finally {} blocks to ensure that an
|
||||||
// event can throw errors and also that the cleanup is called.
|
// event can throw errors and also that the cleanup is called.
|
||||||
cleanupTransactions(transactionCleanups, 0)
|
cleanupTransactions(transactionCleanups)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -210,3 +210,21 @@ export const testFormattingBug = _tc => {
|
|||||||
yxml.applyDelta(delta)
|
yxml.applyDelta(delta)
|
||||||
t.compare(yxml.toDelta(), delta)
|
t.compare(yxml.toDelta(), delta)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {t.TestCase} tc
|
||||||
|
*/
|
||||||
|
export const testCleanupTransactions = tc => {
|
||||||
|
const ydoc = new Y.Doc()
|
||||||
|
const yxml = ydoc.getXmlFragment('')
|
||||||
|
ydoc.on('afterTransaction', tr => {
|
||||||
|
if (tr.origin === 'test') {
|
||||||
|
yxml.toJSON()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
ydoc.transact(tr => {
|
||||||
|
for (let i = 0; i < 100000; i++) {
|
||||||
|
yxml.push([new Y.XmlText('a')])
|
||||||
|
}
|
||||||
|
}, 'test')
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user