Compare commits
No commits in common. "dist" and "main" have entirely different histories.
31
.github/workflows/node.js.yml
vendored
Normal file
31
.github/workflows/node.js.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
|
||||
|
||||
name: Node.js CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16.x, 20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run test
|
||||
env:
|
||||
CI: true
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,2 +1,4 @@
|
||||
node_modules
|
||||
bower_components
|
||||
dist
|
||||
.vscode
|
||||
docs
|
||||
|
52
.jsdoc.json
Normal file
52
.jsdoc.json
Normal file
@ -0,0 +1,52 @@
|
||||
{
|
||||
"sourceType": "module",
|
||||
"tags": {
|
||||
"allowUnknownTags": true,
|
||||
"dictionaries": ["jsdoc"]
|
||||
},
|
||||
"source": {
|
||||
"include": ["./src"],
|
||||
"includePattern": ".js$"
|
||||
},
|
||||
"plugins": [
|
||||
"plugins/markdown"
|
||||
],
|
||||
"templates": {
|
||||
"referenceTitle": "Yjs",
|
||||
"disableSort": false,
|
||||
"useCollapsibles": true,
|
||||
"collapse": true,
|
||||
"resources": {
|
||||
"yjs.dev": "Website",
|
||||
"docs.yjs.dev": "Docs",
|
||||
"discuss.yjs.dev": "Forum",
|
||||
"https://gitter.im/Yjs/community": "Chat"
|
||||
},
|
||||
"logo": {
|
||||
"url": "https://yjs.dev/images/logo/yjs-512x512.png",
|
||||
"width": "162px",
|
||||
"height": "162px",
|
||||
"link": "/"
|
||||
},
|
||||
"tabNames": {
|
||||
"api": "API",
|
||||
"tutorials": "Examples"
|
||||
},
|
||||
"footerText": "Shared Editing",
|
||||
"css": [
|
||||
"./style.css"
|
||||
],
|
||||
"default": {
|
||||
"staticFiles": {
|
||||
"include": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"opts": {
|
||||
"destination": "./docs/",
|
||||
"encoding": "utf8",
|
||||
"private": false,
|
||||
"recurse": true,
|
||||
"template": "./node_modules/tui-jsdoc-template"
|
||||
}
|
||||
}
|
4
.markdownlint.json
Normal file
4
.markdownlint.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"default": true,
|
||||
"no-inline-html": false
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style type="text/css" media="screen">
|
||||
#aceContainer {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
}
|
||||
.inserted {
|
||||
position:absolute;
|
||||
z-index:20;
|
||||
background-color: #FFC107;
|
||||
}
|
||||
.deleted {
|
||||
position:absolute;
|
||||
z-index:20;
|
||||
background-color: #FFC107;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<div id="aceContainer"></div>
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="../bower_components/ace-builds/src/ace.js"></script>
|
||||
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,25 +0,0 @@
|
||||
/* global Y, ace */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'ace-example'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
ace: 'Text' // y.share.textarea is of type Y.Text
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yAce = y
|
||||
|
||||
// bind the textarea to a shared text element
|
||||
var editor = ace.edit('aceContainer')
|
||||
editor.setTheme('ace/theme/chrome')
|
||||
editor.getSession().setMode('ace/mode/javascript')
|
||||
|
||||
y.share.ace.bindAce(editor)
|
||||
})
|
@ -1,18 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<style>
|
||||
#chat p span {
|
||||
color: blue;
|
||||
}
|
||||
</style>
|
||||
<div id="chat"></div>
|
||||
<form id="chatform">
|
||||
<input name="username" type="text" style="width:15%;">
|
||||
<input name="message" type="text" style="width:60%;">
|
||||
<input type="submit" value="Send">
|
||||
</form>
|
||||
<script src="../bower_components/yjs/y.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,75 +0,0 @@
|
||||
/* @flow */
|
||||
/* global Y */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'chat-example'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
chat: 'Array'
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yChat = y
|
||||
// This functions inserts a message at the specified position in the DOM
|
||||
function appendMessage(message, position) {
|
||||
var p = document.createElement('p')
|
||||
var uname = document.createElement('span')
|
||||
uname.appendChild(document.createTextNode(message.username + ": "))
|
||||
p.appendChild(uname)
|
||||
p.appendChild(document.createTextNode(message.message))
|
||||
document.querySelector('#chat').insertBefore(p, chat.children[position] || null)
|
||||
}
|
||||
// This function makes sure that only 7 messages exist in the chat history.
|
||||
// The rest is deleted
|
||||
function cleanupChat () {
|
||||
var len
|
||||
while ((len = y.share.chat.length) > 7) {
|
||||
y.share.chat.delete(0)
|
||||
}
|
||||
}
|
||||
// Insert the initial content
|
||||
y.share.chat.toArray().forEach(appendMessage)
|
||||
cleanupChat()
|
||||
|
||||
// whenever content changes, make sure to reflect the changes in the DOM
|
||||
y.share.chat.observe(function (event) {
|
||||
if (event.type === 'insert') {
|
||||
for (var i = 0; i < event.length; i++) {
|
||||
appendMessage(event.values[i], event.index + i)
|
||||
}
|
||||
} else if (event.type === 'delete') {
|
||||
for (var i = 0; i < event.length; i++) {
|
||||
chat.children[event.index].remove()
|
||||
}
|
||||
}
|
||||
// concurrent insertions may result in a history > 7, so cleanup here
|
||||
cleanupChat()
|
||||
})
|
||||
document.querySelector('#chatform').onsubmit = function (event) {
|
||||
// the form is submitted
|
||||
var message = {
|
||||
username: this.querySelector("[name=username]").value,
|
||||
message: this.querySelector("[name=message]").value
|
||||
}
|
||||
if (message.username.length > 0 && message.message.length > 0) {
|
||||
if (y.share.chat.length > 6) {
|
||||
// If we are goint to insert the 8th element, make sure to delete first.
|
||||
y.share.chat.delete(0)
|
||||
}
|
||||
// Here we insert a message in the shared chat type.
|
||||
// This will call the observe function (see line 40)
|
||||
// and reflect the change in the DOM
|
||||
y.share.chat.push([message])
|
||||
this.querySelector("[name=message]").value = ""
|
||||
}
|
||||
// Do not send this form!
|
||||
event.preventDefault()
|
||||
return false
|
||||
}
|
||||
})
|
@ -1,23 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<div id="codeMirrorContainer"></div>
|
||||
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="../bower_components/codemirror/lib/codemirror.js"></script>
|
||||
<script src="../bower_components/codemirror/mode/javascript/javascript.js"></script>
|
||||
<link rel="stylesheet" href="../bower_components/codemirror/lib/codemirror.css">
|
||||
<style>
|
||||
.CodeMirror {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,24 +0,0 @@
|
||||
/* global Y, CodeMirror */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'codemirror-example'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
codemirror: 'Text' // y.share.codemirror is of type Y.Text
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yCodeMirror = y
|
||||
|
||||
var editor = CodeMirror(document.querySelector('#codeMirrorContainer'), {
|
||||
mode: 'javascript',
|
||||
lineNumbers: true
|
||||
})
|
||||
y.share.codemirror.bindCodeMirror(editor)
|
||||
})
|
@ -1,19 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<style>
|
||||
path {
|
||||
fill: none;
|
||||
stroke: blue;
|
||||
stroke-width: 1px;
|
||||
stroke-linejoin: round;
|
||||
stroke-linecap: round;
|
||||
}
|
||||
</style>
|
||||
<button type="button" id="clearDrawingCanvas">Clear Drawing</button>
|
||||
<svg id="drawingCanvas" viewbox="0 0 100 100" width="100%"></svg>
|
||||
<script src="../bower_components/yjs/y.js"></script>
|
||||
<script src="../bower_components/d3/d3.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,84 +0,0 @@
|
||||
/* globals Y, d3 */
|
||||
'strict mode'
|
||||
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'drawing-example'
|
||||
// url: 'localhost:1234'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
drawing: 'Array'
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yDrawing = y
|
||||
var drawing = y.share.drawing
|
||||
var renderPath = d3.svg.line()
|
||||
.x(function (d) { return d[0] })
|
||||
.y(function (d) { return d[1] })
|
||||
.interpolate('basis')
|
||||
|
||||
var svg = d3.select('#drawingCanvas')
|
||||
.call(d3.behavior.drag()
|
||||
.on('dragstart', dragstart)
|
||||
.on('drag', drag)
|
||||
.on('dragend', dragend))
|
||||
|
||||
// create line from a shared array object and update the line when the array changes
|
||||
function drawLine (yarray) {
|
||||
var line = svg.append('path').datum(yarray.toArray())
|
||||
line.attr('d', renderPath)
|
||||
yarray.observe(function (event) {
|
||||
// we only implement insert events that are appended to the end of the array
|
||||
event.values.forEach(function (value) {
|
||||
line.datum().push(value)
|
||||
})
|
||||
line.attr('d', renderPath)
|
||||
})
|
||||
}
|
||||
// call drawLine every time an array is appended
|
||||
y.share.drawing.observe(function (event) {
|
||||
if (event.type === 'insert') {
|
||||
event.values.forEach(drawLine)
|
||||
} else {
|
||||
// just remove all elements (thats what we do anyway)
|
||||
svg.selectAll('path').remove()
|
||||
}
|
||||
})
|
||||
// draw all existing content
|
||||
for (var i = 0; i < drawing.length; i++) {
|
||||
drawLine(drawing.get(i))
|
||||
}
|
||||
|
||||
// clear canvas on request
|
||||
document.querySelector('#clearDrawingCanvas').onclick = function () {
|
||||
drawing.delete(0, drawing.length)
|
||||
}
|
||||
|
||||
var sharedLine = null
|
||||
function dragstart () {
|
||||
drawing.insert(drawing.length, [Y.Array])
|
||||
sharedLine = drawing.get(drawing.length - 1)
|
||||
}
|
||||
|
||||
// After one dragged event is recognized, we ignore them for 33ms.
|
||||
var ignoreDrag = null
|
||||
function drag () {
|
||||
if (sharedLine != null && ignoreDrag == null) {
|
||||
ignoreDrag = window.setTimeout(function () {
|
||||
ignoreDrag = null
|
||||
}, 33)
|
||||
sharedLine.push([d3.mouse(this)])
|
||||
}
|
||||
}
|
||||
|
||||
function dragend () {
|
||||
sharedLine = null
|
||||
window.clearTimeout(ignoreDrag)
|
||||
ignoreDrag = null
|
||||
}
|
||||
})
|
@ -1,23 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style type="text/css">
|
||||
.draggable {
|
||||
cursor: move;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<svg id="puzzle-example" width="100%" viewBox="0 0 800 800">
|
||||
<g>
|
||||
<path d="M 311.76636,154.23389 C 312.14136,171.85693 318.14087,184.97998 336.13843,184.23047 C 354.13647,183.48047 351.88647,180.48096 354.88599,178.98096 C 357.8855,177.48096 368.38452,170.35693 380.00806,169.98193 C 424.61841,168.54297 419.78296,223.6001 382.25757,223.6001 C 377.75806,223.6001 363.51001,219.10107 356.38599,211.97656 C 349.26196,204.85254 310.64185,207.10254 314.76636,236.34863 C 316.34888,247.5708 324.08374,267.90723 324.84595,286.23486 C 325.29321,296.99414 323.17603,307.00635 321.58911,315.6377 C 360.11353,305.4585 367.73462,304.30518 404.00513,312.83936 C 410.37915,314.33887 436.62573,310.21436 421.25269,290.3418 C 405.87964,270.46924 406.25464,248.34717 417.12817,240.84814 C 428.00171,233.34912 446.74976,228.84961 457.99829,234.09912 C 469.24683,239.34814 484.61987,255.84619 475.24585,271.59424 C 465.87231,287.34229 452.74878,290.7168 456.49829,303.84033 C 460.2478,316.96387 479.74536,320.33838 500.74292,321.83789 C 509.70142,322.47803 527.97192,323.28467 542.10864,320.12939 C 549.91821,318.38672 556.92212,315.89502 562.46753,313.56396 C 561.40796,277.80664 560.84888,245.71729 560.3606,241.97314 C 558.85278,230.41455 542.49536,217.28564 525.86499,223.2251 C 520.61548,225.1001 519.86548,231.84912 505.24243,232.59912 C 444.92798,235.69238 462.06958,143.26709 525.86499,180.48096 C 539.52759,188.45068 575.19409,190.7583 570.10913,156.85889 C 567.85962,141.86035 553.98608,102.86523 553.98608,102.86523 C 553.98608,102.86523 477.23755,111.82227 451.99878,91.991699 C 441.50024,83.74292 444.87476,69.494629 449.37427,61.245605 C 453.87378,52.996582 465.12231,46.622559 464.74731,36.123779 C 463.02563,-12.086426 392.96704,-10.902832 396.5061,36.873535 C 397.25562,46.997314 406.62964,52.621582 410.75415,60.495605 C 420.00757,78.161377 405.50024,96.073486 384.50757,99.490723 C 377.36206,100.65381 349.17505,102.65332 320.39429,102.23486 C 319.677,102.22461 318.95923,102.21143 318.24194,102.19775 C 315.08423,120.9751 311.55688,144.39697 311.76636,154.23389 z " style="fill:#f2c569;stroke:#000000" id="path2502"/>
|
||||
<path d="M 500.74292,321.83789 C 479.74536,320.33838 460.2478,316.96387 456.49829,303.84033 C 452.74878,290.7168 465.87231,287.34229 475.24585,271.59424 C 484.61987,255.84619 469.24683,239.34814 457.99829,234.09912 C 446.74976,228.84961 428.00171,233.34912 417.12817,240.84814 C 406.25464,248.34717 405.87964,270.46924 421.25269,290.3418 C 436.62573,310.21436 410.37915,314.33887 404.00513,312.83936 C 367.73462,304.30518 360.11353,305.4585 321.58911,315.6377 C 320.56372,321.21484 319.75854,326.2207 320.01538,330.46191 C 320.76538,342.83545 329.3894,385.95508 327.8894,392.7041 C 326.3894,399.45312 313.64136,418.20117 297.89331,407.32715 C 282.14526,396.45361 276.52075,393.4541 265.27222,394.5791 C 254.02368,395.70361 239.77563,402.07812 239.77563,419.32568 C 239.77563,436.57373 250.27417,449.69727 268.64673,447.82227 C 287.36353,445.9126 317.92163,423.11035 325.63989,452.69678 C 330.1394,469.94434 330.51392,487.19238 330.1394,498.44092 C 329.95825,503.87646 326.09985,518.06592 322.16089,531.28125 C 353.2854,532.73682 386.47095,531.26611 394.2561,529.93701 C 430.30933,523.78174 429.31909,496.09766 412.62866,477.44385 C 406.25464,470.31934 401.75513,455.32129 405.87964,444.82275 C 414.07056,423.97314 458.8064,422.17773 473.37134,438.82324 C 483.86987,450.82178 475.99585,477.44385 468.49683,482.69287 C 453.52222,493.17529 457.22485,516.83008 473.37134,528.06201 C 504.79126,549.91943 572.35913,535.56152 572.35913,535.56152 C 572.35913,535.56152 567.85962,498.06592 567.48462,471.81934 C 567.10962,445.57275 589.60669,450.07227 593.3562,450.07227 C 597.10571,450.07227 604.22974,455.32129 609.47925,459.4458 C 614.72876,463.57031 618.85327,469.94434 630.85181,470.69434 C 677.43726,473.60596 674.58813,420.7373 631.97632,413.32666 C 623.35229,411.82666 614.72876,416.32617 603.10522,424.57519 C 591.48169,432.82422 577.23315,425.32519 570.10913,417.45117 C 566.07788,412.99561 563.8479,360.16406 562.46753,313.56396 C 556.92212,315.89502 549.91821,318.38672 542.10864,320.12939 C 527.97192,323.28467 509.70142,322.47803 500.74292,321.83789 z " style="fill:#f3f3d6;stroke:#000000" id="path2504"/>
|
||||
<path d="M 240.52563,141.86035 C 257.60327,159.6499 243.94507,188.68799 214.65356,190.22949 C 185.09448,191.78516 164.66675,157.17822 190.28589,136.61621 C 200.49585,128.42139 198.05786,114.12158 179.78296,106.98975 C 154.4187,97.091553 90.54419,107.73975 90.54419,107.73975 C 90.54419,107.73975 100.88794,135.11328 101.41772,168.48242 C 101.79272,192.104 68.796875,189.47949 63.172607,186.85498 C 57.54834,184.23047 45.924805,173.73145 37.675781,173.73145 C -14.411865,173.73145 -10.013184,245.84375 39.925537,232.22412 C 48.174316,229.97461 56.42334,220.97559 68.796875,222.47559 C 81.17041,223.9751 87.544434,232.59912 87.544434,246.09766 C 87.544434,252.51709 87.0354,281.24268 86.340576,312.87012 C 119.15894,313.67676 160.60962,314.46582 170.03442,313.58887 C 186.15698,312.08936 195.90601,301.59033 188.40698,293.3418 C 180.90796,285.09277 156.16089,256.59619 179.03296,239.34814 C 201.90503,222.10059 235.65112,231.84912 239.77563,247.22217 C 243.90015,262.59521 240.52563,273.46924 234.90112,279.09326 C 229.27661,284.71777 210.52905,298.96582 221.40259,308.71484 C 232.27661,318.46338 263.77222,330.83691 302.39282,320.71338 C 309.58862,318.82715 315.92114,317.13525 321.58911,315.6377 C 323.17603,307.00635 325.29321,296.99414 324.84595,286.23486 C 324.08374,267.90723 316.34888,247.5708 314.76636,236.34863 C 310.64185,207.10254 349.26196,204.85254 356.38599,211.97656 C 363.51001,219.10107 377.75806,223.6001 382.25757,223.6001 C 419.78296,223.6001 424.61841,168.54297 380.00806,169.98193 C 368.38452,170.35693 357.8855,177.48096 354.88599,178.98096 C 351.88647,180.48096 354.13647,183.48047 336.13843,184.23047 C 318.14087,184.97998 312.14136,171.85693 311.76636,154.23389 C 311.55688,144.39697 315.08423,120.9751 318.24194,102.19775 C 290.37524,101.67725 262.46069,98.968262 254.39868,97.991211 C 233.38013,95.443359 217.17456,117.53662 240.52563,141.86035 z " style="fill:#bebcdb;stroke:#000000" id="path2506"/>
|
||||
<path d="M 325.63989,452.69678 C 317.92163,423.11035 287.36353,445.9126 268.64673,447.82227 C 250.27417,449.69727 239.77563,436.57373 239.77563,419.32568 C 239.77563,402.07812 254.02368,395.70361 265.27222,394.5791 C 276.52075,393.4541 282.14526,396.45361 297.89331,407.32715 C 313.64136,418.20117 326.3894,399.45313 327.8894,392.7041 C 329.3894,385.95508 320.76538,342.83545 320.01538,330.46191 C 319.75855,326.2207 320.56372,321.21484 321.58911,315.6377 C 315.92114,317.13525 309.58862,318.82715 302.39282,320.71338 C 263.77222,330.83691 232.27661,318.46338 221.40259,308.71484 C 210.52905,298.96582 229.27661,284.71777 234.90112,279.09326 C 240.52563,273.46924 243.90015,262.59521 239.77563,247.22217 C 235.65112,231.84912 201.90503,222.10059 179.03296,239.34814 C 156.16089,256.59619 180.90796,285.09277 188.40698,293.3418 C 195.90601,301.59033 186.15698,312.08936 170.03442,313.58887 C 160.60962,314.46582 119.15894,313.67676 86.340576,312.87012 C 85.573975,347.74561 84.581299,386.15088 83.794922,402.07812 C 82.295166,432.44922 109.29175,422.32568 115.66577,420.82568 C 122.04028,419.32568 126.16479,409.57715 143.03735,408.45215 C 185.9231,405.59326 186.09985,466.69629 144.16235,467.69482 C 128.41431,468.06982 113.79126,451.19678 108.16675,447.44727 C 102.54272,443.69775 87.919433,442.94775 83.794922,457.9458 C 82.01709,464.41113 78.118652,481.65137 78.098144,496.18994 C 78.071045,515.38037 82.295166,531.81201 82.295166,531.81201 C 82.295166,531.81201 105.54224,526.5625 149.41187,526.5625 C 193.28149,526.5625 199.65552,547.93506 194.78101,558.80859 C 189.90649,569.68213 181.28296,568.93213 179.40796,583.18066 C 172.7063,634.11133 253.34106,631.08203 249.14917,584.68018 C 247.96948,571.62354 237.16528,571.66699 232.27661,557.68359 C 222.17944,528.80273 244.64966,523.56299 257.39819,524.68799 C 263.59351,525.23437 290.95679,529.73389 320.75757,531.21582 C 321.22437,531.23877 321.69312,531.25928 322.16089,531.28125 C 326.09985,518.06592 329.95825,503.87646 330.1394,498.44092 C 330.51392,487.19238 330.1394,469.94434 325.63989,452.69678 z " style="fill:#d3ea9d;stroke:#000000" id="path2508"/>
|
||||
</g>
|
||||
</svg>
|
||||
<script src="../bower_components/yjs/y.js"></script>
|
||||
<script src="../bower_components/d3/d3.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,69 +0,0 @@
|
||||
/* @flow */
|
||||
/* global Y, d3 */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'Puzzle-example'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
piece1: 'Map',
|
||||
piece2: 'Map',
|
||||
piece3: 'Map',
|
||||
piece4: 'Map'
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yJigsaw = y
|
||||
var origin // mouse start position - translation of piece
|
||||
var drag = d3.behavior.drag()
|
||||
.on('dragstart', function (params) {
|
||||
// get the translation of the element
|
||||
var translation = d3.select(this).attr('transform').slice(10,-1).split(',').map(Number)
|
||||
// mouse coordinates
|
||||
var mouse = d3.mouse(this.parentNode)
|
||||
origin = {
|
||||
x: mouse[0] - translation[0],
|
||||
y: mouse[1] - translation[1]
|
||||
}
|
||||
})
|
||||
.on("drag", function(){
|
||||
var mouse = d3.mouse(this.parentNode)
|
||||
var x = mouse[0] - origin.x // =^= mouse - mouse at dragstart + translation at dragstart
|
||||
var y = mouse[1] - origin.y
|
||||
d3.select(this).attr("transform", "translate(" + x + "," + y + ")")
|
||||
})
|
||||
.on('dragend', function (piece, i) {
|
||||
// save the current translation of the puzzle piece
|
||||
var mouse = d3.mouse(this.parentNode)
|
||||
var x = mouse[0] - origin.x
|
||||
var y = mouse[1] - origin.y
|
||||
piece.set('translation', {x: x, y: y})
|
||||
})
|
||||
|
||||
var data = [y.share.piece1, y.share.piece2, y.share.piece3, y.share.piece4]
|
||||
var pieces = d3.select(document.querySelector("#puzzle-example")).selectAll("path").data(data)
|
||||
|
||||
pieces
|
||||
.classed('draggable', true)
|
||||
.attr("transform", function (piece) {
|
||||
var translation = piece.get('translation') || {x: 0, y: 0}
|
||||
return "translate(" + translation.x + "," + translation.y + ")"
|
||||
}).call(drag)
|
||||
|
||||
data.forEach(function(piece){
|
||||
piece.observe(function () {
|
||||
// whenever a property of a piece changes, update the translation of the pieces
|
||||
pieces
|
||||
.transition()
|
||||
.attr("transform", function (piece) {
|
||||
var translation = piece.get('translation') || {x: 0, y: 0}
|
||||
return "translate(" + translation.x + "," + translation.y + ")"
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
@ -1,24 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<div id="monacoContainer"></div>
|
||||
<style>
|
||||
#monacoContainer {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="../bower_components/y-array/y-array.es6"></script>
|
||||
<script src="../bower_components/y-text/y-text.es6"></script>
|
||||
<script src="../bower_components/y-websockets-client/y-websockets-client.es6"></script>
|
||||
<script src="../bower_components/y-memory/y-memory.es6"></script>
|
||||
<script src="../node_modules/monaco-editor/min/vs/loader.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,31 +0,0 @@
|
||||
/* global Y */
|
||||
|
||||
require.config({ paths: { 'vs': '../node_modules/monaco-editor/min/vs' }})
|
||||
require(['vs/editor/editor.main'], function() {
|
||||
|
||||
// Initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'monaco-example'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
monaco: 'Text' // y.share.monaco is of type Y.Text
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yMonaco = y
|
||||
|
||||
// Create Monaco editor
|
||||
var editor = monaco.editor.create(document.getElementById('monacoContainer'), {
|
||||
language: 'javascript'
|
||||
})
|
||||
|
||||
// Bind to y.share.monaco
|
||||
y.share.monaco.bindMonaco(editor)
|
||||
})
|
||||
})
|
||||
|
@ -1,31 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<!-- quill does not include dist files! We are using the hosted version instead -->
|
||||
<!--link rel="stylesheet" href="../bower_components/quill/dist/quill.snow.css" /-->
|
||||
<link href="https://cdn.quilljs.com/1.0.4/quill.snow.css" rel="stylesheet">
|
||||
<link href="//cdnjs.cloudflare.com/ajax/libs/KaTeX/0.5.1/katex.min.css" rel="stylesheet">
|
||||
<link href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.2.0/styles/monokai-sublime.min.css" rel="stylesheet">
|
||||
<style>
|
||||
#quill-container {
|
||||
border: 1px solid gray;
|
||||
box-shadow: 0px 0px 10px gray;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="quill-container">
|
||||
<div id="quill">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/KaTeX/0.5.1/katex.min.js" type="text/javascript"></script>
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.2.0/highlight.min.js" type="text/javascript"></script>
|
||||
<script src="https://cdn.quilljs.com/1.0.4/quill.js"></script>
|
||||
<!-- quill does not include dist files! We are using the hosted version instead (see above)
|
||||
<script src="../bower_components/quill/dist/quill.js"></script>
|
||||
-->
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,39 +0,0 @@
|
||||
/* global Y, Quill */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'richtext-example-quill-1.0-test'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
richtext: 'Richtext' // y.share.richtext is of type Y.Richtext
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yQuill = y
|
||||
|
||||
// create quill element
|
||||
window.quill = new Quill('#quill', {
|
||||
modules: {
|
||||
formula: true,
|
||||
syntax: true,
|
||||
toolbar: [
|
||||
[{ size: ['small', false, 'large', 'huge'] }],
|
||||
['bold', 'italic', 'underline'],
|
||||
[{ color: [] }, { background: [] }], // Snow theme fills in values
|
||||
[{ script: 'sub' }, { script: 'super' }],
|
||||
['link', 'image'],
|
||||
['link', 'code-block'],
|
||||
[{list: 'ordered' }]
|
||||
]
|
||||
},
|
||||
theme: 'snow'
|
||||
});
|
||||
// bind quill to richtext type
|
||||
y.share.richtext.bind(window.quill)
|
||||
})
|
@ -1,31 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<!-- quill does not include dist files! We are using the hosted version instead -->
|
||||
<!--link rel="stylesheet" href="../bower_components/quill/dist/quill.snow.css" /-->
|
||||
<link href="https://cdn.quilljs.com/1.0.4/quill.snow.css" rel="stylesheet">
|
||||
<link href="//cdnjs.cloudflare.com/ajax/libs/KaTeX/0.5.1/katex.min.css" rel="stylesheet">
|
||||
<link href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.2.0/styles/monokai-sublime.min.css" rel="stylesheet">
|
||||
<style>
|
||||
#quill-container {
|
||||
border: 1px solid gray;
|
||||
box-shadow: 0px 0px 10px gray;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="quill-container">
|
||||
<div id="quill">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/KaTeX/0.5.1/katex.min.js" type="text/javascript"></script>
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.2.0/highlight.min.js" type="text/javascript"></script>
|
||||
<script src="https://cdn.quilljs.com/1.0.4/quill.js"></script>
|
||||
<!-- quill does not include dist files! We are using the hosted version instead (see above)
|
||||
<script src="../bower_components/quill/dist/quill.js"></script>
|
||||
-->
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,49 +0,0 @@
|
||||
/* global Y, Quill */
|
||||
|
||||
// register yjs service worker
|
||||
if('serviceWorker' in navigator){
|
||||
// Register service worker
|
||||
// it is important to copy yjs-sw-template to the root directory!
|
||||
navigator.serviceWorker.register('./yjs-sw-template.js').then(function(reg){
|
||||
console.log("Yjs service worker registration succeeded. Scope is " + reg.scope);
|
||||
}).catch(function(err){
|
||||
console.error("Yjs service worker registration failed with error " + err);
|
||||
})
|
||||
}
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'serviceworker',
|
||||
room: 'ServiceWorkerExample2'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
richtext: 'Richtext' // y.share.richtext is of type Y.Richtext
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yServiceWorker = y
|
||||
|
||||
// create quill element
|
||||
window.quill = new Quill('#quill', {
|
||||
modules: {
|
||||
formula: true,
|
||||
syntax: true,
|
||||
toolbar: [
|
||||
[{ size: ['small', false, 'large', 'huge'] }],
|
||||
['bold', 'italic', 'underline'],
|
||||
[{ color: [] }, { background: [] }], // Snow theme fills in values
|
||||
[{ script: 'sub' }, { script: 'super' }],
|
||||
['link', 'image'],
|
||||
['link', 'code-block'],
|
||||
[{list: 'ordered' }]
|
||||
]
|
||||
},
|
||||
theme: 'snow'
|
||||
})
|
||||
// bind quill to richtext type
|
||||
y.share.richtext.bind(window.quill)
|
||||
})
|
@ -1,22 +0,0 @@
|
||||
/* eslint-env worker */
|
||||
|
||||
// copy and modify this file
|
||||
|
||||
self.DBConfig = {
|
||||
name: 'indexeddb'
|
||||
}
|
||||
self.ConnectorConfig = {
|
||||
name: 'websockets-client',
|
||||
// url: '..',
|
||||
options: {
|
||||
jsonp: false
|
||||
}
|
||||
}
|
||||
|
||||
importScripts(
|
||||
'/bower_components/yjs/y.js',
|
||||
'/bower_components/y-memory/y-memory.js',
|
||||
'/bower_components/y-indexeddb/y-indexeddb.js',
|
||||
'/bower_components/y-websockets-client/y-websockets-client.js',
|
||||
'/bower_components/y-serviceworker/yjs-sw-include.js'
|
||||
)
|
@ -1,8 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<textarea style="width:80%;" rows=40 id="textfield" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"></textarea>
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="./index.js"></script>
|
||||
</body>
|
||||
</html>
|
@ -1,23 +0,0 @@
|
||||
/* global Y */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'Textarea-example'
|
||||
// url: '127.0.0.1:1234'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
textarea: 'Text' // y.share.textarea is of type Y.Text
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yTextarea = y
|
||||
|
||||
// bind the textarea to a shared text element
|
||||
y.share.textarea.bind(document.getElementById('textfield'))
|
||||
// thats it..
|
||||
})
|
@ -1,39 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
</head>
|
||||
<script src="../bower_components/yjs/y.es6"></script>
|
||||
<script src="../bower_components/jquery/dist/jquery.min.js"></script>
|
||||
<script src="./index.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h1> Shared DOM Example </h1>
|
||||
<p> Use native DOM function or jQuery to manipulate the shared DOM (window.sharedDom). </p>
|
||||
<div class="command">
|
||||
<button type="button">Execute</button>
|
||||
<input type="text" value='$(sharedDom).append("<h3>Appended headline</h3>")' size="40"/>
|
||||
</div>
|
||||
<div class="command">
|
||||
<button type="button">Execute</button>
|
||||
<input type="text" value='$(sharedDom).attr("align","right")' size="40"/>
|
||||
</div>
|
||||
<div class="command">
|
||||
<button type="button">Execute</button>
|
||||
<input type="text" value='$(sharedDom).attr("style","color:blue;")' size="40"/>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
var commands = document.querySelectorAll(".command");
|
||||
Array.prototype.forEach.call(document.querySelectorAll('.command'), function (command) {
|
||||
var execute = function(){
|
||||
eval(command.querySelector("input").value);
|
||||
}
|
||||
command.querySelector("button").onclick = execute
|
||||
$(command.querySelector("input")).keyup(function (e) {
|
||||
if (e.keyCode == 13) {
|
||||
execute()
|
||||
}
|
||||
})
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@ -1,21 +0,0 @@
|
||||
/* global Y */
|
||||
|
||||
// initialize a shared object. This function call returns a promise!
|
||||
Y({
|
||||
db: {
|
||||
name: 'memory'
|
||||
},
|
||||
connector: {
|
||||
name: 'websockets-client',
|
||||
room: 'Xml-example'
|
||||
},
|
||||
sourceDir: '/bower_components',
|
||||
share: {
|
||||
xml: 'Xml("p")' // y.share.xml is of type Y.Xml with tagname "p"
|
||||
}
|
||||
}).then(function (y) {
|
||||
window.yXml = y
|
||||
// bind xml type to a dom, and put it in body
|
||||
window.sharedDom = y.share.xml.getDom()
|
||||
document.body.appendChild(window.sharedDom)
|
||||
})
|
@ -1,29 +0,0 @@
|
||||
{
|
||||
"name": "yjs-examples",
|
||||
"version": "0.0",
|
||||
"homepage": "y-js.org",
|
||||
"authors": [
|
||||
"Kevin Jahns <kevin.jahns@rwth-aachen.de>"
|
||||
],
|
||||
"description": "Examples for yjs",
|
||||
"license": "MIT",
|
||||
"ignore": [],
|
||||
"dependencies": {
|
||||
"yjs": "latest",
|
||||
"y-array": "latest",
|
||||
"y-map": "latest",
|
||||
"y-memory": "latest",
|
||||
"y-richtext": "latest",
|
||||
"y-webrtc": "latest",
|
||||
"y-websockets-client": "latest",
|
||||
"y-text": "latest",
|
||||
"y-indexeddb": "latest",
|
||||
"y-xml": "latest",
|
||||
"quill": "^1.0.0-rc.2",
|
||||
"ace": "~1.2.3",
|
||||
"ace-builds": "~1.2.3",
|
||||
"jquery": "~2.2.2",
|
||||
"d3": "^3.5.16",
|
||||
"codemirror": "^5.25.0"
|
||||
}
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
{
|
||||
"name": "examples",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"author": "Kevin Jahns",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"monaco-editor": "^0.8.3"
|
||||
}
|
||||
}
|
179
INTERNALS.md
Normal file
179
INTERNALS.md
Normal file
@ -0,0 +1,179 @@
|
||||
# Yjs Internals
|
||||
|
||||
This document roughly explains how Yjs works internally. There is a complete
|
||||
walkthrough of the Yjs codebase available as a recording:
|
||||
https://youtu.be/0l5XgnQ6rB4
|
||||
|
||||
The Yjs CRDT algorithm is described in the [YATA
|
||||
paper](https://www.researchgate.net/publication/310212186_Near_Real-Time_Peer-to-Peer_Shared_Editing_on_Extensible_Data_Types)
|
||||
from 2016. For an algorithmic view of how it works, the paper is a reasonable
|
||||
place to start. There are a handful of small improvements implemented in Yjs
|
||||
which aren't described in the paper. The most notable is that items have an
|
||||
`originRight` as well as an `origin` property, which improves performance when
|
||||
many concurrent inserts happen after the same character.
|
||||
|
||||
At its heart, Yjs is a list CRDT. Everything is squeezed into a list in order to
|
||||
reuse the CRDT resolution algorithm:
|
||||
|
||||
- Arrays are easy - they're lists of arbitrary items.
|
||||
- Text is a list of characters, optionally punctuated by formatting markers and
|
||||
embeds for rich text support. Several characters can be wrapped in a single
|
||||
linked list `Item` (this is also known as the compound representation of
|
||||
CRDTs). More information about this in [this blog
|
||||
article](https://blog.kevinjahns.de/are-crdts-suitable-for-shared-editing/).
|
||||
- Maps are lists of entries. The last inserted entry for each key is used, and
|
||||
all other duplicates for each key are flagged as deleted.
|
||||
|
||||
Each client is assigned a unique *clientID* property on first insert. This is a
|
||||
random 53-bit integer (53 bits because that fits in the javascript safe integer
|
||||
range \[JavaScript uses IEEE 754 floats\]).
|
||||
|
||||
## List items
|
||||
|
||||
Each item in a Yjs list is made up of two objects:
|
||||
|
||||
- An `Item` (*src/structs/Item.js*). This is used to relate the item to other
|
||||
adjacent items.
|
||||
- An object in the `AbstractType` hierarchy (subclasses of
|
||||
*src/types/AbstractType.js* - eg `YText`). This stores the actual content in
|
||||
the Yjs document.
|
||||
|
||||
The item and type object pair have a 1-1 mapping. The item's `content` field
|
||||
references the AbstractType object and the AbstractType object's `_item` field
|
||||
references the item.
|
||||
|
||||
Everything inserted in a Yjs document is given a unique ID, formed from a
|
||||
*ID(clientID, clock)* pair (also known as a [Lamport
|
||||
Timestamp](https://en.wikipedia.org/wiki/Lamport_timestamp)). The clock counts
|
||||
up from 0 with the first inserted character or item a client makes. This is
|
||||
similar to automerge's operation IDs, but note that the clock is only
|
||||
incremented by inserts. Deletes are handled in a very different way (see
|
||||
below).
|
||||
|
||||
If a run of characters is inserted into a document (eg `"abc"`), the clock will
|
||||
be incremented for each character (eg 3 times here). But Yjs will only add a
|
||||
single `Item` into the list. This has no effect on the core CRDT algorithm, but
|
||||
the optimization dramatically decreases the number of javascript objects
|
||||
created during normal text editing. This optimization only applies if the
|
||||
characters share the same clientID, they're inserted in order, and all
|
||||
characters have either been deleted or all characters are not deleted. The item
|
||||
will be split if the run is interrupted for any reason (eg a character in the
|
||||
middle of the run is deleted).
|
||||
|
||||
When an item is created, it stores a reference to the IDs of the preceding and
|
||||
succeeding item. These are stored in the item's `origin` and `originRight`
|
||||
fields, respectively. These are used when peers concurrently insert at the same
|
||||
location in a document. Though quite rare in practice, Yjs needs to make sure
|
||||
the list items always resolve to the same order on all peers. The actual logic
|
||||
is relatively simple - its only a couple dozen lines of code and it lives in
|
||||
the `Item#integrate()` method. The YATA paper has much more detail on this
|
||||
algorithm.
|
||||
|
||||
### Item Storage
|
||||
|
||||
The items themselves are stored in two data structures and a cache:
|
||||
|
||||
- The items are stored in a tree of doubly-linked lists in *document order*.
|
||||
Each item has `left` and `right` properties linking to its siblings in the
|
||||
document. Items also have a `parent` property to reference their parent in the
|
||||
document tree (null at the root). (And you can access an item's children, if
|
||||
any, through `item.content`).
|
||||
- All items are referenced in *insertion order* inside the struct store
|
||||
(*src/utils/StructStore.js*). This references the list of items inserted by
|
||||
for each client, in chronological order. This is used to find an item in the
|
||||
tree with a given ID (using a binary search). It is also used to efficiently
|
||||
gather the operations a peer is missing during sync (more on this below).
|
||||
|
||||
When a local insert happens, Yjs needs to map the insert position in the
|
||||
document (eg position 1000) to an ID. With just the linked list, this would
|
||||
require a slow O(n) linear scan of the list. But when editing a document, most
|
||||
inserts are either at the same position as the last insert, or nearby. To
|
||||
improve performance, Yjs stores a cache of the 80 most recently looked up
|
||||
insert positions in the document. This is consulted and updated when a position
|
||||
is looked up to improve performance in the average case. The cache is updated
|
||||
using a heuristic that is still changing (currently, it is updated when a new
|
||||
position significantly diverges from existing markers in the cache). Internally
|
||||
this is referred to as the skip list / fast search marker.
|
||||
|
||||
### Deletions
|
||||
|
||||
Deletions in Yjs are treated very differently from insertions. Insertions are
|
||||
implemented as a sequential operation based CRDT, but deletions are treated as
|
||||
a simpler state based CRDT.
|
||||
|
||||
When an item has been deleted by any peer, at any point in history, it is
|
||||
flagged as deleted on the item. (Internally Yjs uses the `info` bitfield.) Yjs
|
||||
does not record metadata about a deletion:
|
||||
|
||||
- No data is kept on *when* an item was deleted, or which user deleted it.
|
||||
- The struct store does not contain deletion records
|
||||
- The clientID's clock is not incremented
|
||||
|
||||
If garbage collection is enabled in Yjs, when an object is deleted its content
|
||||
is discarded. If a deleted object contains children (eg a field is deleted in
|
||||
an object), the content is replaced with a `GC` object (*src/structs/GC.js*).
|
||||
This is a very lightweight structure - it only stores the length of the removed
|
||||
content.
|
||||
|
||||
Yjs has some special logic to share which content in a document has been
|
||||
deleted:
|
||||
|
||||
- When a delete happens, as well as marking the item, the deleted IDs are
|
||||
listed locally within the transaction. (See below for more information about
|
||||
transactions.) When a transaction has been committed locally, the set of
|
||||
deleted items is appended to a transaction's update message.
|
||||
- A snapshot (a marked point in time in the Yjs history) is specified using
|
||||
both the set of (clientID, clock) pairs *and* the set of all deleted item
|
||||
IDs. The deleted set is O(n), but because deletions usually happen in runs,
|
||||
this data set is usually tiny in practice. (The real world editing trace from
|
||||
the B4 benchmark document contains 182k inserts and 77k deleted characters. The
|
||||
deleted set size in a snapshot is only 4.5Kb).
|
||||
|
||||
## Transactions
|
||||
|
||||
All updates in Yjs happen within a *transaction*. (Defined in
|
||||
*src/utils/Transaction.js*.)
|
||||
|
||||
The transaction collects a set of updates to the Yjs document to be applied on
|
||||
remote peers atomically. Once a transaction has been committed locally, it
|
||||
generates a compressed *update message* which is broadcast to synchronized
|
||||
remote peers to notify them of the local change. The update message contains:
|
||||
|
||||
- The set of newly inserted items
|
||||
- The set of items deleted within the transaction.
|
||||
|
||||
## Network protocol
|
||||
|
||||
The network protocol is not really a part of Yjs. There are a few relevant
|
||||
concepts that can be used to create a custom network protocol:
|
||||
|
||||
* `update`: The Yjs document can be encoded to an *update* object that can be
|
||||
parsed to reconstruct the document. Also every change on the document fires
|
||||
an incremental document update that allows clients to sync with each other.
|
||||
The update object is a Uint8Array that efficiently encodes `Item` objects and
|
||||
the delete set.
|
||||
* `state vector`: A state vector defines the known state of each user (a set of
|
||||
tuples `(client, clock)`). This object is also efficiently encoded as a
|
||||
Uint8Array.
|
||||
|
||||
The client can ask a remote client for missing document updates by sending
|
||||
their state vector (often referred to as *sync step 1*). The remote peer can
|
||||
compute the missing `Item` objects using the `clocks` of the respective clients
|
||||
and compute a minimal update message that reflects all missing updates (sync
|
||||
step 2).
|
||||
|
||||
An implementation of the syncing process is in
|
||||
[y-protocols](https://github.com/yjs/y-protocols).
|
||||
|
||||
## Snapshots
|
||||
|
||||
A snapshot can be used to restore an old document state. It is a `state vector`
|
||||
\+ `delete set`. A client can restore an old document state by iterating through
|
||||
the sequence CRDT and ignoring all Items that have an `id.clock >
|
||||
stateVector[id.client].clock`. Instead of using `item.deleted` the client will
|
||||
use the delete set to find out if an item was deleted or not.
|
||||
|
||||
It is not recommended to restore an old document state using snapshots,
|
||||
although that would certainly be possible. Instead, the old state should be
|
||||
computed by iterating through the newest state and using the additional
|
||||
information from the state vector.
|
4
LICENSE
4
LICENSE
@ -1,7 +1,7 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014
|
||||
- Kevin Jahns <kevin.jahns@rwth-aachen.de>.
|
||||
Copyright (c) 2023
|
||||
- Kevin Jahns <kevin.jahns@protonmail.com>.
|
||||
- Chair of Computer Science 5 (Databases & Information Systems), RWTH Aachen University, Germany
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
21
bower.json
21
bower.json
@ -1,21 +0,0 @@
|
||||
{
|
||||
"name": "yjs",
|
||||
"version": "12.3.3",
|
||||
"homepage": "y-js.org",
|
||||
"authors": [
|
||||
"Kevin Jahns <kevin.jahns@rwth-aachen.de>"
|
||||
],
|
||||
"description": "A Framework for shared editing on any data",
|
||||
"main": "./y.js",
|
||||
"keywords": [
|
||||
"Yjs",
|
||||
"OT",
|
||||
"Collaboration",
|
||||
"Synchronization",
|
||||
"ShareJS",
|
||||
"Coweb",
|
||||
"Concurrency"
|
||||
],
|
||||
"license": "MIT",
|
||||
"ignore": []
|
||||
}
|
142
funding.json
Normal file
142
funding.json
Normal file
@ -0,0 +1,142 @@
|
||||
{
|
||||
"version": "v1.0.0",
|
||||
"entity": {
|
||||
"type": "group",
|
||||
"role": "steward",
|
||||
"name": "Kevin Jahns",
|
||||
"email": "kevin.jahns@protonmail.com",
|
||||
"phone": "",
|
||||
"description": "OSS Developer",
|
||||
"webpageUrl": {
|
||||
"url": "https://github.com/yjs"
|
||||
}
|
||||
},
|
||||
"projects": [
|
||||
{
|
||||
"guid": "yjs",
|
||||
"name": "Yjs",
|
||||
"description": "A library for building collaborative applications. #p2p #local-first #CRDT Funding this project will also enable me to maintain the other Yjs-related technologies.",
|
||||
"webpageUrl": {
|
||||
"url": "https://github.com/yjs/yjs"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"url": "https://github.com/yjs/yjs"
|
||||
},
|
||||
"licenses": [
|
||||
"spdx:MIT"
|
||||
],
|
||||
"tags": [
|
||||
"collaboration",
|
||||
"p2p",
|
||||
"CRDT",
|
||||
"rich-text",
|
||||
"real-time"
|
||||
]
|
||||
},
|
||||
{
|
||||
"guid": "Titanic",
|
||||
"name": "Y/Titanic",
|
||||
"description": "A provider for syncing millions of docs efficiently with other peers. This will become the foundation for building real local-first apps with Yjs.",
|
||||
"webpageUrl": {
|
||||
"url": "https://github.com/yjs/titanic",
|
||||
"wellKnown": "https://github.com/yjs/titanic/blob/main/.well-known/funding-manifest-urls"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"url": "https://github.com/yjs/titanic",
|
||||
"wellKnown": "https://github.com/yjs/titanic/blob/main/.well-known/funding-manifest-urls"
|
||||
},
|
||||
"licenses": [
|
||||
"spdx:MIT"
|
||||
],
|
||||
"tags": [
|
||||
"privacy",
|
||||
"collaboration",
|
||||
"p2p",
|
||||
"CRDT",
|
||||
"rich-text",
|
||||
"real-time",
|
||||
"web-development"
|
||||
]
|
||||
}
|
||||
],
|
||||
"funding": {
|
||||
"channels": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"type": "payment-provider",
|
||||
"address": "",
|
||||
"description": "For funding of the Yjs project"
|
||||
},
|
||||
{
|
||||
"guid": "y-collective",
|
||||
"type": "payment-provider",
|
||||
"address": "https://opencollective.com/y-collective",
|
||||
"description": "For funding the Y-CRDT - the Rust implementation of Yjs and other listed projects."
|
||||
}
|
||||
],
|
||||
"plans": [
|
||||
{
|
||||
"guid": "supporter",
|
||||
"status": "active",
|
||||
"name": "Supporter",
|
||||
"description": "",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": [
|
||||
"github-sponsors",
|
||||
"y-collective"
|
||||
]
|
||||
},
|
||||
{
|
||||
"guid": "titanic-funding",
|
||||
"status": "active",
|
||||
"name": "Titanic Funding",
|
||||
"description": "Fund the next generation of local-first providers.",
|
||||
"amount": 30000,
|
||||
"currency": "USD",
|
||||
"frequency": "one-time",
|
||||
"channels": [
|
||||
"github-sponsors"
|
||||
]
|
||||
},
|
||||
{
|
||||
"guid": "bronze-sponsor",
|
||||
"status": "active",
|
||||
"name": "Bronze Sponsor",
|
||||
"description": "This is the recommended plan for companies that use Yjs.",
|
||||
"amount": 500,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": [
|
||||
"github-sponsors"
|
||||
]
|
||||
},
|
||||
{
|
||||
"guid": "silver-sponsor",
|
||||
"status": "active",
|
||||
"name": "Silver Sponsor",
|
||||
"description": "This is the recommended plan for large/successfull companies that use Yjs.",
|
||||
"amount": 1000,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": [
|
||||
"github-sponsors"
|
||||
]
|
||||
},
|
||||
{
|
||||
"guid": "gold-sponsor",
|
||||
"status": "active",
|
||||
"name": "Gold Sponsor",
|
||||
"description": "This is the recommended plan for successful companies that build their entire product around Yjs-related technologies.",
|
||||
"amount": 3000,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": [
|
||||
"github-sponsors"
|
||||
]
|
||||
}
|
||||
],
|
||||
"history": null
|
||||
}
|
||||
}
|
4987
package-lock.json
generated
Normal file
4987
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
99
package.json
Normal file
99
package.json
Normal file
@ -0,0 +1,99 @@
|
||||
{
|
||||
"name": "yjs",
|
||||
"version": "13.6.24",
|
||||
"description": "Shared Editing Library",
|
||||
"main": "./dist/yjs.cjs",
|
||||
"module": "./dist/yjs.mjs",
|
||||
"types": "./dist/src/index.d.ts",
|
||||
"type": "module",
|
||||
"sideEffects": false,
|
||||
"funding": {
|
||||
"type": "GitHub Sponsors ❤",
|
||||
"url": "https://github.com/sponsors/dmonad"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rm -rf dist docs",
|
||||
"test": "npm run dist && NODE_ENV=development node ./dist/tests.cjs --repetition-time 50",
|
||||
"test-extensive": "npm run lint && npm run dist && node ./dist/tests.cjs --production --repetition-time 10000",
|
||||
"dist": "npm run clean && rollup -c && tsc",
|
||||
"watch": "rollup -wc",
|
||||
"lint": "markdownlint README.md && standard && tsc",
|
||||
"docs": "rm -rf docs; jsdoc --configure ./.jsdoc.json --verbose --readme ./README.md --package ./package.json || true",
|
||||
"serve-docs": "npm run docs && http-server ./docs/",
|
||||
"preversion": "npm run lint && PRODUCTION=1 npm run dist && npm run docs && node ./dist/tests.cjs --repetition-time 1000 && test -e dist/src/index.d.ts && test -e dist/yjs.cjs && test -e dist/yjs.cjs",
|
||||
"debug": "concurrently 'http-server -o test.html' 'npm run watch'",
|
||||
"trace-deopt": "clear && rollup -c && node --trace-deopt dist/test.cjs",
|
||||
"trace-opt": "clear && rollup -c && node --trace-opt dist/test.cjs"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/src/index.d.ts",
|
||||
"module": "./dist/yjs.mjs",
|
||||
"import": "./dist/yjs.mjs",
|
||||
"require": "./dist/yjs.cjs"
|
||||
},
|
||||
"./src/index.js": "./src/index.js",
|
||||
"./tests/testHelper.js": "./tests/testHelper.js",
|
||||
"./testHelper": "./dist/testHelper.mjs",
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"files": [
|
||||
"dist/yjs.*",
|
||||
"dist/src",
|
||||
"src",
|
||||
"tests/testHelper.js",
|
||||
"dist/testHelper.mjs",
|
||||
"sponsor-y.js"
|
||||
],
|
||||
"dictionaries": {
|
||||
"test": "tests"
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"/dist",
|
||||
"/node_modules",
|
||||
"/docs"
|
||||
]
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/yjs/yjs.git"
|
||||
},
|
||||
"keywords": [
|
||||
"Yjs",
|
||||
"CRDT",
|
||||
"offline",
|
||||
"offline-first",
|
||||
"shared-editing",
|
||||
"concurrency",
|
||||
"collaboration"
|
||||
],
|
||||
"author": "Kevin Jahns",
|
||||
"email": "kevin.jahns@protonmail.com",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/yjs/yjs/issues"
|
||||
},
|
||||
"homepage": "https://docs.yjs.dev",
|
||||
"dependencies": {
|
||||
"lib0": "^0.2.99"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-commonjs": "^24.0.1",
|
||||
"@rollup/plugin-node-resolve": "^15.0.1",
|
||||
"@types/node": "^18.15.5",
|
||||
"concurrently": "^3.6.1",
|
||||
"http-server": "^0.12.3",
|
||||
"jsdoc": "^3.6.7",
|
||||
"markdownlint-cli": "^0.41.0",
|
||||
"rollup": "^3.20.0",
|
||||
"standard": "^16.0.4",
|
||||
"tui-jsdoc-template": "^1.2.2",
|
||||
"typescript": "^4.9.5",
|
||||
"y-protocols": "^1.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"npm": ">=8.0.0",
|
||||
"node": ">=16.0.0"
|
||||
}
|
||||
}
|
106
rollup.config.js
Normal file
106
rollup.config.js
Normal file
@ -0,0 +1,106 @@
|
||||
import nodeResolve from '@rollup/plugin-node-resolve'
|
||||
import commonjs from '@rollup/plugin-commonjs'
|
||||
|
||||
const localImports = process.env.LOCALIMPORTS
|
||||
|
||||
const customModules = new Set([
|
||||
'y-websocket',
|
||||
'y-codemirror',
|
||||
'y-ace',
|
||||
'y-textarea',
|
||||
'y-quill',
|
||||
'y-dom',
|
||||
'y-prosemirror'
|
||||
])
|
||||
/**
|
||||
* @type {Set<any>}
|
||||
*/
|
||||
const customLibModules = new Set([
|
||||
'lib0',
|
||||
'y-protocols'
|
||||
])
|
||||
const debugResolve = {
|
||||
resolveId (importee) {
|
||||
if (importee === 'yjs') {
|
||||
return `${process.cwd()}/src/index.js`
|
||||
}
|
||||
if (localImports) {
|
||||
if (customModules.has(importee.split('/')[0])) {
|
||||
return `${process.cwd()}/../${importee}/src/${importee}.js`
|
||||
}
|
||||
if (customLibModules.has(importee.split('/')[0])) {
|
||||
return `${process.cwd()}/../${importee}`
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export default [{
|
||||
input: './src/index.js',
|
||||
output: {
|
||||
name: 'Y',
|
||||
file: 'dist/yjs.cjs',
|
||||
format: 'cjs',
|
||||
sourcemap: true
|
||||
},
|
||||
external: id => /^lib0\//.test(id)
|
||||
}, {
|
||||
input: './src/index.js',
|
||||
output: {
|
||||
name: 'Y',
|
||||
file: 'dist/yjs.mjs',
|
||||
format: 'esm',
|
||||
sourcemap: true
|
||||
},
|
||||
external: id => /^lib0\//.test(id)
|
||||
}, {
|
||||
input: './tests/testHelper.js',
|
||||
output: {
|
||||
name: 'Y',
|
||||
file: 'dist/testHelper.mjs',
|
||||
format: 'esm',
|
||||
sourcemap: true
|
||||
},
|
||||
external: id => /^lib0\//.test(id) || id === 'yjs',
|
||||
plugins: [{
|
||||
resolveId (importee) {
|
||||
if (importee === '../src/index.js') {
|
||||
return 'yjs'
|
||||
}
|
||||
return null
|
||||
}
|
||||
}]
|
||||
}, {
|
||||
input: './tests/index.js',
|
||||
output: {
|
||||
name: 'test',
|
||||
file: 'dist/tests.js',
|
||||
format: 'iife',
|
||||
sourcemap: true
|
||||
},
|
||||
plugins: [
|
||||
debugResolve,
|
||||
nodeResolve({
|
||||
mainFields: ['browser', 'module', 'main']
|
||||
}),
|
||||
commonjs()
|
||||
]
|
||||
}, {
|
||||
input: './tests/index.js',
|
||||
output: {
|
||||
name: 'test',
|
||||
file: 'dist/tests.cjs',
|
||||
format: 'cjs',
|
||||
sourcemap: true
|
||||
},
|
||||
plugins: [
|
||||
debugResolve,
|
||||
nodeResolve({
|
||||
mainFields: ['node', 'module', 'main'],
|
||||
exportConditions: ['node', 'module', 'import', 'default']
|
||||
}),
|
||||
commonjs()
|
||||
],
|
||||
external: id => /^lib0\//.test(id)
|
||||
}]
|
135
src/index.js
Normal file
135
src/index.js
Normal file
@ -0,0 +1,135 @@
|
||||
/** eslint-env browser */
|
||||
|
||||
export {
|
||||
Doc,
|
||||
Transaction,
|
||||
YArray as Array,
|
||||
YMap as Map,
|
||||
YText as Text,
|
||||
YXmlText as XmlText,
|
||||
YXmlHook as XmlHook,
|
||||
YXmlElement as XmlElement,
|
||||
YXmlFragment as XmlFragment,
|
||||
YXmlEvent,
|
||||
YMapEvent,
|
||||
YArrayEvent,
|
||||
YTextEvent,
|
||||
YEvent,
|
||||
Item,
|
||||
AbstractStruct,
|
||||
GC,
|
||||
Skip,
|
||||
ContentBinary,
|
||||
ContentDeleted,
|
||||
ContentDoc,
|
||||
ContentEmbed,
|
||||
ContentFormat,
|
||||
ContentJSON,
|
||||
ContentAny,
|
||||
ContentString,
|
||||
ContentType,
|
||||
AbstractType,
|
||||
getTypeChildren,
|
||||
createRelativePositionFromTypeIndex,
|
||||
createRelativePositionFromJSON,
|
||||
createAbsolutePositionFromRelativePosition,
|
||||
compareRelativePositions,
|
||||
AbsolutePosition,
|
||||
RelativePosition,
|
||||
ID,
|
||||
createID,
|
||||
compareIDs,
|
||||
getState,
|
||||
Snapshot,
|
||||
createSnapshot,
|
||||
createDeleteSet,
|
||||
createDeleteSetFromStructStore,
|
||||
cleanupYTextFormatting,
|
||||
snapshot,
|
||||
emptySnapshot,
|
||||
findRootTypeKey,
|
||||
findIndexSS,
|
||||
getItem,
|
||||
getItemCleanStart,
|
||||
getItemCleanEnd,
|
||||
typeListToArraySnapshot,
|
||||
typeMapGetSnapshot,
|
||||
typeMapGetAllSnapshot,
|
||||
createDocFromSnapshot,
|
||||
iterateDeletedStructs,
|
||||
applyUpdate,
|
||||
applyUpdateV2,
|
||||
readUpdate,
|
||||
readUpdateV2,
|
||||
encodeStateAsUpdate,
|
||||
encodeStateAsUpdateV2,
|
||||
encodeStateVector,
|
||||
UndoManager,
|
||||
decodeSnapshot,
|
||||
encodeSnapshot,
|
||||
decodeSnapshotV2,
|
||||
encodeSnapshotV2,
|
||||
decodeStateVector,
|
||||
logUpdate,
|
||||
logUpdateV2,
|
||||
decodeUpdate,
|
||||
decodeUpdateV2,
|
||||
relativePositionToJSON,
|
||||
isDeleted,
|
||||
isParentOf,
|
||||
equalSnapshots,
|
||||
PermanentUserData, // @TODO experimental
|
||||
tryGc,
|
||||
transact,
|
||||
AbstractConnector,
|
||||
logType,
|
||||
mergeUpdates,
|
||||
mergeUpdatesV2,
|
||||
parseUpdateMeta,
|
||||
parseUpdateMetaV2,
|
||||
encodeStateVectorFromUpdate,
|
||||
encodeStateVectorFromUpdateV2,
|
||||
encodeRelativePosition,
|
||||
decodeRelativePosition,
|
||||
diffUpdate,
|
||||
diffUpdateV2,
|
||||
convertUpdateFormatV1ToV2,
|
||||
convertUpdateFormatV2ToV1,
|
||||
obfuscateUpdate,
|
||||
obfuscateUpdateV2,
|
||||
UpdateEncoderV1,
|
||||
UpdateEncoderV2,
|
||||
UpdateDecoderV1,
|
||||
UpdateDecoderV2,
|
||||
equalDeleteSets,
|
||||
mergeDeleteSets,
|
||||
snapshotContainsUpdate
|
||||
} from './internals.js'
|
||||
|
||||
const glo = /** @type {any} */ (typeof globalThis !== 'undefined'
|
||||
? globalThis
|
||||
: typeof window !== 'undefined'
|
||||
? window
|
||||
// @ts-ignore
|
||||
: typeof global !== 'undefined' ? global : {})
|
||||
|
||||
const importIdentifier = '__ $YJS$ __'
|
||||
|
||||
if (glo[importIdentifier] === true) {
|
||||
/**
|
||||
* Dear reader of this message. Please take this seriously.
|
||||
*
|
||||
* If you see this message, make sure that you only import one version of Yjs. In many cases,
|
||||
* your package manager installs two versions of Yjs that are used by different packages within your project.
|
||||
* Another reason for this message is that some parts of your project use the commonjs version of Yjs
|
||||
* and others use the EcmaScript version of Yjs.
|
||||
*
|
||||
* This often leads to issues that are hard to debug. We often need to perform constructor checks,
|
||||
* e.g. `struct instanceof GC`. If you imported different versions of Yjs, it is impossible for us to
|
||||
* do the constructor checks anymore - which might break the CRDT algorithm.
|
||||
*
|
||||
* https://github.com/yjs/yjs/issues/438
|
||||
*/
|
||||
console.error('Yjs was already imported. This breaks constructor checks and will lead to issues! - https://github.com/yjs/yjs/issues/438')
|
||||
}
|
||||
glo[importIdentifier] = true
|
42
src/internals.js
Normal file
42
src/internals.js
Normal file
@ -0,0 +1,42 @@
|
||||
export * from './utils/AbstractConnector.js'
|
||||
export * from './utils/DeleteSet.js'
|
||||
export * from './utils/Doc.js'
|
||||
export * from './utils/UpdateDecoder.js'
|
||||
export * from './utils/UpdateEncoder.js'
|
||||
export * from './utils/encoding.js'
|
||||
export * from './utils/EventHandler.js'
|
||||
export * from './utils/ID.js'
|
||||
export * from './utils/isParentOf.js'
|
||||
export * from './utils/logging.js'
|
||||
export * from './utils/PermanentUserData.js'
|
||||
export * from './utils/RelativePosition.js'
|
||||
export * from './utils/Snapshot.js'
|
||||
export * from './utils/StructStore.js'
|
||||
export * from './utils/Transaction.js'
|
||||
export * from './utils/UndoManager.js'
|
||||
export * from './utils/updates.js'
|
||||
export * from './utils/YEvent.js'
|
||||
|
||||
export * from './types/AbstractType.js'
|
||||
export * from './types/YArray.js'
|
||||
export * from './types/YMap.js'
|
||||
export * from './types/YText.js'
|
||||
export * from './types/YXmlFragment.js'
|
||||
export * from './types/YXmlElement.js'
|
||||
export * from './types/YXmlEvent.js'
|
||||
export * from './types/YXmlHook.js'
|
||||
export * from './types/YXmlText.js'
|
||||
|
||||
export * from './structs/AbstractStruct.js'
|
||||
export * from './structs/GC.js'
|
||||
export * from './structs/ContentBinary.js'
|
||||
export * from './structs/ContentDeleted.js'
|
||||
export * from './structs/ContentDoc.js'
|
||||
export * from './structs/ContentEmbed.js'
|
||||
export * from './structs/ContentFormat.js'
|
||||
export * from './structs/ContentJSON.js'
|
||||
export * from './structs/ContentAny.js'
|
||||
export * from './structs/ContentString.js'
|
||||
export * from './structs/ContentType.js'
|
||||
export * from './structs/Item.js'
|
||||
export * from './structs/Skip.js'
|
51
src/structs/AbstractStruct.js
Normal file
51
src/structs/AbstractStruct.js
Normal file
@ -0,0 +1,51 @@
|
||||
import {
|
||||
UpdateEncoderV1, UpdateEncoderV2, ID, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
export class AbstractStruct {
|
||||
/**
|
||||
* @param {ID} id
|
||||
* @param {number} length
|
||||
*/
|
||||
constructor (id, length) {
|
||||
this.id = id
|
||||
this.length = length
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
get deleted () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge this struct with the item to the right.
|
||||
* This method is already assuming that `this.id.clock + this.length === this.id.clock`.
|
||||
* Also this method does *not* remove right from StructStore!
|
||||
* @param {AbstractStruct} right
|
||||
* @return {boolean} whether this merged with right
|
||||
*/
|
||||
mergeWith (right) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||
* @param {number} offset
|
||||
* @param {number} encodingRef
|
||||
*/
|
||||
write (encoder, offset, encodingRef) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {number} offset
|
||||
*/
|
||||
integrate (transaction, offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
}
|
114
src/structs/ContentAny.js
Normal file
114
src/structs/ContentAny.js
Normal file
@ -0,0 +1,114 @@
|
||||
import {
|
||||
UpdateEncoderV1, UpdateEncoderV2, UpdateDecoderV1, UpdateDecoderV2, Transaction, Item, StructStore // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as env from 'lib0/environment'
|
||||
import * as object from 'lib0/object'
|
||||
|
||||
const isDevMode = env.getVariable('node_env') === 'development'
|
||||
|
||||
export class ContentAny {
|
||||
/**
|
||||
* @param {Array<any>} arr
|
||||
*/
|
||||
constructor (arr) {
|
||||
/**
|
||||
* @type {Array<any>}
|
||||
*/
|
||||
this.arr = arr
|
||||
isDevMode && object.deepFreeze(arr)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return this.arr.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return this.arr
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentAny}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentAny(this.arr)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentAny}
|
||||
*/
|
||||
splice (offset) {
|
||||
const right = new ContentAny(this.arr.slice(offset))
|
||||
this.arr = this.arr.slice(0, offset)
|
||||
return right
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentAny} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
this.arr = this.arr.concat(right.arr)
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {}
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
const len = this.arr.length
|
||||
encoder.writeLen(len - offset)
|
||||
for (let i = offset; i < len; i++) {
|
||||
const c = this.arr[i]
|
||||
encoder.writeAny(c)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 8
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentAny}
|
||||
*/
|
||||
export const readContentAny = decoder => {
|
||||
const len = decoder.readLen()
|
||||
const cs = []
|
||||
for (let i = 0; i < len; i++) {
|
||||
cs.push(decoder.readAny())
|
||||
}
|
||||
return new ContentAny(cs)
|
||||
}
|
92
src/structs/ContentBinary.js
Normal file
92
src/structs/ContentBinary.js
Normal file
@ -0,0 +1,92 @@
|
||||
import {
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
export class ContentBinary {
|
||||
/**
|
||||
* @param {Uint8Array} content
|
||||
*/
|
||||
constructor (content) {
|
||||
this.content = content
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return [this.content]
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentBinary}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentBinary(this.content)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentBinary}
|
||||
*/
|
||||
splice (offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentBinary} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {}
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeBuf(this.content)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 3
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
||||
* @return {ContentBinary}
|
||||
*/
|
||||
export const readContentBinary = decoder => new ContentBinary(decoder.readBuf())
|
100
src/structs/ContentDeleted.js
Normal file
100
src/structs/ContentDeleted.js
Normal file
@ -0,0 +1,100 @@
|
||||
import {
|
||||
addToDeleteSet,
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
export class ContentDeleted {
|
||||
/**
|
||||
* @param {number} len
|
||||
*/
|
||||
constructor (len) {
|
||||
this.len = len
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return this.len
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentDeleted}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentDeleted(this.len)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentDeleted}
|
||||
*/
|
||||
splice (offset) {
|
||||
const right = new ContentDeleted(this.len - offset)
|
||||
this.len = offset
|
||||
return right
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentDeleted} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
this.len += right.len
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {
|
||||
addToDeleteSet(transaction.deleteSet, item.id.client, item.id.clock, this.len)
|
||||
item.markDeleted()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeLen(this.len - offset)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
||||
* @return {ContentDeleted}
|
||||
*/
|
||||
export const readContentDeleted = decoder => new ContentDeleted(decoder.readLen())
|
140
src/structs/ContentDoc.js
Normal file
140
src/structs/ContentDoc.js
Normal file
@ -0,0 +1,140 @@
|
||||
import {
|
||||
Doc, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
/**
|
||||
* @param {string} guid
|
||||
* @param {Object<string, any>} opts
|
||||
*/
|
||||
const createDocFromOpts = (guid, opts) => new Doc({ guid, ...opts, shouldLoad: opts.shouldLoad || opts.autoLoad || false })
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class ContentDoc {
|
||||
/**
|
||||
* @param {Doc} doc
|
||||
*/
|
||||
constructor (doc) {
|
||||
if (doc._item) {
|
||||
console.error('This document was already integrated as a sub-document. You should create a second instance instead with the same guid.')
|
||||
}
|
||||
/**
|
||||
* @type {Doc}
|
||||
*/
|
||||
this.doc = doc
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
const opts = {}
|
||||
this.opts = opts
|
||||
if (!doc.gc) {
|
||||
opts.gc = false
|
||||
}
|
||||
if (doc.autoLoad) {
|
||||
opts.autoLoad = true
|
||||
}
|
||||
if (doc.meta !== null) {
|
||||
opts.meta = doc.meta
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return [this.doc]
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentDoc}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentDoc(createDocFromOpts(this.doc.guid, this.opts))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentDoc}
|
||||
*/
|
||||
splice (offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentDoc} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {
|
||||
// this needs to be reflected in doc.destroy as well
|
||||
this.doc._item = item
|
||||
transaction.subdocsAdded.add(this.doc)
|
||||
if (this.doc.shouldLoad) {
|
||||
transaction.subdocsLoaded.add(this.doc)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {
|
||||
if (transaction.subdocsAdded.has(this.doc)) {
|
||||
transaction.subdocsAdded.delete(this.doc)
|
||||
} else {
|
||||
transaction.subdocsRemoved.add(this.doc)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) { }
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeString(this.doc.guid)
|
||||
encoder.writeAny(this.opts)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 9
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentDoc}
|
||||
*/
|
||||
export const readContentDoc = decoder => new ContentDoc(createDocFromOpts(decoder.readString(), decoder.readAny()))
|
97
src/structs/ContentEmbed.js
Normal file
97
src/structs/ContentEmbed.js
Normal file
@ -0,0 +1,97 @@
|
||||
import {
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class ContentEmbed {
|
||||
/**
|
||||
* @param {Object} embed
|
||||
*/
|
||||
constructor (embed) {
|
||||
this.embed = embed
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return [this.embed]
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentEmbed}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentEmbed(this.embed)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentEmbed}
|
||||
*/
|
||||
splice (offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentEmbed} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {}
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeJSON(this.embed)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 5
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentEmbed}
|
||||
*/
|
||||
export const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON())
|
104
src/structs/ContentFormat.js
Normal file
104
src/structs/ContentFormat.js
Normal file
@ -0,0 +1,104 @@
|
||||
import {
|
||||
YText, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Item, StructStore, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class ContentFormat {
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {Object} value
|
||||
*/
|
||||
constructor (key, value) {
|
||||
this.key = key
|
||||
this.value = value
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentFormat}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentFormat(this.key, this.value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} _offset
|
||||
* @return {ContentFormat}
|
||||
*/
|
||||
splice (_offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentFormat} _right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (_right) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} _transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (_transaction, item) {
|
||||
// @todo searchmarker are currently unsupported for rich text documents
|
||||
const p = /** @type {YText} */ (item.parent)
|
||||
p._searchMarker = null
|
||||
p._hasFormatting = true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeKey(this.key)
|
||||
encoder.writeJSON(this.value)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 6
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentFormat}
|
||||
*/
|
||||
export const readContentFormat = decoder => new ContentFormat(decoder.readKey(), decoder.readJSON())
|
118
src/structs/ContentJSON.js
Normal file
118
src/structs/ContentJSON.js
Normal file
@ -0,0 +1,118 @@
|
||||
import {
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class ContentJSON {
|
||||
/**
|
||||
* @param {Array<any>} arr
|
||||
*/
|
||||
constructor (arr) {
|
||||
/**
|
||||
* @type {Array<any>}
|
||||
*/
|
||||
this.arr = arr
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return this.arr.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return this.arr
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentJSON}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentJSON(this.arr)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentJSON}
|
||||
*/
|
||||
splice (offset) {
|
||||
const right = new ContentJSON(this.arr.slice(offset))
|
||||
this.arr = this.arr.slice(0, offset)
|
||||
return right
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentJSON} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
this.arr = this.arr.concat(right.arr)
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {}
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
const len = this.arr.length
|
||||
encoder.writeLen(len - offset)
|
||||
for (let i = offset; i < len; i++) {
|
||||
const c = this.arr[i]
|
||||
encoder.writeString(c === undefined ? 'undefined' : JSON.stringify(c))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 2
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentJSON}
|
||||
*/
|
||||
export const readContentJSON = decoder => {
|
||||
const len = decoder.readLen()
|
||||
const cs = []
|
||||
for (let i = 0; i < len; i++) {
|
||||
const c = decoder.readString()
|
||||
if (c === 'undefined') {
|
||||
cs.push(undefined)
|
||||
} else {
|
||||
cs.push(JSON.parse(c))
|
||||
}
|
||||
}
|
||||
return new ContentJSON(cs)
|
||||
}
|
112
src/structs/ContentString.js
Normal file
112
src/structs/ContentString.js
Normal file
@ -0,0 +1,112 @@
|
||||
import {
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class ContentString {
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
constructor (str) {
|
||||
/**
|
||||
* @type {string}
|
||||
*/
|
||||
this.str = str
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return this.str.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return this.str.split('')
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentString}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentString(this.str)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentString}
|
||||
*/
|
||||
splice (offset) {
|
||||
const right = new ContentString(this.str.slice(offset))
|
||||
this.str = this.str.slice(0, offset)
|
||||
|
||||
// Prevent encoding invalid documents because of splitting of surrogate pairs: https://github.com/yjs/yjs/issues/248
|
||||
const firstCharCode = this.str.charCodeAt(offset - 1)
|
||||
if (firstCharCode >= 0xD800 && firstCharCode <= 0xDBFF) {
|
||||
// Last character of the left split is the start of a surrogate utf16/ucs2 pair.
|
||||
// We don't support splitting of surrogate pairs because this may lead to invalid documents.
|
||||
// Replace the invalid character with a unicode replacement character (<28> / U+FFFD)
|
||||
this.str = this.str.slice(0, offset - 1) + '<27>'
|
||||
// replace right as well
|
||||
right.str = '<27>' + right.str.slice(1)
|
||||
}
|
||||
return right
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentString} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
this.str += right.str
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {}
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {}
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {}
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeString(offset === 0 ? this.str : this.str.slice(offset))
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 4
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentString}
|
||||
*/
|
||||
export const readContentString = decoder => new ContentString(decoder.readString())
|
171
src/structs/ContentType.js
Normal file
171
src/structs/ContentType.js
Normal file
@ -0,0 +1,171 @@
|
||||
import {
|
||||
readYArray,
|
||||
readYMap,
|
||||
readYText,
|
||||
readYXmlElement,
|
||||
readYXmlFragment,
|
||||
readYXmlHook,
|
||||
readYXmlText,
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
/**
|
||||
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractType<any>>}
|
||||
* @private
|
||||
*/
|
||||
export const typeRefs = [
|
||||
readYArray,
|
||||
readYMap,
|
||||
readYText,
|
||||
readYXmlElement,
|
||||
readYXmlFragment,
|
||||
readYXmlHook,
|
||||
readYXmlText
|
||||
]
|
||||
|
||||
export const YArrayRefID = 0
|
||||
export const YMapRefID = 1
|
||||
export const YTextRefID = 2
|
||||
export const YXmlElementRefID = 3
|
||||
export const YXmlFragmentRefID = 4
|
||||
export const YXmlHookRefID = 5
|
||||
export const YXmlTextRefID = 6
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class ContentType {
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
*/
|
||||
constructor (type) {
|
||||
/**
|
||||
* @type {AbstractType<any>}
|
||||
*/
|
||||
this.type = type
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
return [this.type]
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ContentType}
|
||||
*/
|
||||
copy () {
|
||||
return new ContentType(this.type._copy())
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} offset
|
||||
* @return {ContentType}
|
||||
*/
|
||||
splice (offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ContentType} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} item
|
||||
*/
|
||||
integrate (transaction, item) {
|
||||
this.type._integrate(transaction.doc, item)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {
|
||||
let item = this.type._start
|
||||
while (item !== null) {
|
||||
if (!item.deleted) {
|
||||
item.delete(transaction)
|
||||
} else if (item.id.clock < (transaction.beforeState.get(item.id.client) || 0)) {
|
||||
// This will be gc'd later and we want to merge it if possible
|
||||
// We try to merge all deleted items after each transaction,
|
||||
// but we have no knowledge about that this needs to be merged
|
||||
// since it is not in transaction.ds. Hence we add it to transaction._mergeStructs
|
||||
transaction._mergeStructs.push(item)
|
||||
}
|
||||
item = item.right
|
||||
}
|
||||
this.type._map.forEach(item => {
|
||||
if (!item.deleted) {
|
||||
item.delete(transaction)
|
||||
} else if (item.id.clock < (transaction.beforeState.get(item.id.client) || 0)) {
|
||||
// same as above
|
||||
transaction._mergeStructs.push(item)
|
||||
}
|
||||
})
|
||||
transaction.changed.delete(this.type)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
gc (store) {
|
||||
let item = this.type._start
|
||||
while (item !== null) {
|
||||
item.gc(store, true)
|
||||
item = item.right
|
||||
}
|
||||
this.type._start = null
|
||||
this.type._map.forEach(/** @param {Item | null} item */ (item) => {
|
||||
while (item !== null) {
|
||||
item.gc(store, true)
|
||||
item = item.left
|
||||
}
|
||||
})
|
||||
this.type._map = new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
this.type._write(encoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
return 7
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
*
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {ContentType}
|
||||
*/
|
||||
export const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder))
|
60
src/structs/GC.js
Normal file
60
src/structs/GC.js
Normal file
@ -0,0 +1,60 @@
|
||||
import {
|
||||
AbstractStruct,
|
||||
addStruct,
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
export const structGCRefNumber = 0
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class GC extends AbstractStruct {
|
||||
get deleted () {
|
||||
return true
|
||||
}
|
||||
|
||||
delete () {}
|
||||
|
||||
/**
|
||||
* @param {GC} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
if (this.constructor !== right.constructor) {
|
||||
return false
|
||||
}
|
||||
this.length += right.length
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {number} offset
|
||||
*/
|
||||
integrate (transaction, offset) {
|
||||
if (offset > 0) {
|
||||
this.id.clock += offset
|
||||
this.length -= offset
|
||||
}
|
||||
addStruct(transaction.doc.store, this)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeInfo(structGCRefNumber)
|
||||
encoder.writeLen(this.length - offset)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {StructStore} store
|
||||
* @return {null | number}
|
||||
*/
|
||||
getMissing (transaction, store) {
|
||||
return null
|
||||
}
|
||||
}
|
812
src/structs/Item.js
Normal file
812
src/structs/Item.js
Normal file
@ -0,0 +1,812 @@
|
||||
import {
|
||||
GC,
|
||||
getState,
|
||||
AbstractStruct,
|
||||
replaceStruct,
|
||||
addStruct,
|
||||
addToDeleteSet,
|
||||
findRootTypeKey,
|
||||
compareIDs,
|
||||
getItem,
|
||||
getItemCleanEnd,
|
||||
getItemCleanStart,
|
||||
readContentDeleted,
|
||||
readContentBinary,
|
||||
readContentJSON,
|
||||
readContentAny,
|
||||
readContentString,
|
||||
readContentEmbed,
|
||||
readContentDoc,
|
||||
createID,
|
||||
readContentFormat,
|
||||
readContentType,
|
||||
addChangedTypeToTransaction,
|
||||
isDeleted,
|
||||
StackItem, DeleteSet, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
import * as binary from 'lib0/binary'
|
||||
import * as array from 'lib0/array'
|
||||
|
||||
/**
|
||||
* @todo This should return several items
|
||||
*
|
||||
* @param {StructStore} store
|
||||
* @param {ID} id
|
||||
* @return {{item:Item, diff:number}}
|
||||
*/
|
||||
export const followRedone = (store, id) => {
|
||||
/**
|
||||
* @type {ID|null}
|
||||
*/
|
||||
let nextID = id
|
||||
let diff = 0
|
||||
let item
|
||||
do {
|
||||
if (diff > 0) {
|
||||
nextID = createID(nextID.client, nextID.clock + diff)
|
||||
}
|
||||
item = getItem(store, nextID)
|
||||
diff = nextID.clock - item.id.clock
|
||||
nextID = item.redone
|
||||
} while (nextID !== null && item instanceof Item)
|
||||
return {
|
||||
item, diff
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure that neither item nor any of its parents is ever deleted.
|
||||
*
|
||||
* This property does not persist when storing it into a database or when
|
||||
* sending it to other peers
|
||||
*
|
||||
* @param {Item|null} item
|
||||
* @param {boolean} keep
|
||||
*/
|
||||
export const keepItem = (item, keep) => {
|
||||
while (item !== null && item.keep !== keep) {
|
||||
item.keep = keep
|
||||
item = /** @type {AbstractType<any>} */ (item.parent)._item
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Split leftItem into two items
|
||||
* @param {Transaction} transaction
|
||||
* @param {Item} leftItem
|
||||
* @param {number} diff
|
||||
* @return {Item}
|
||||
*
|
||||
* @function
|
||||
* @private
|
||||
*/
|
||||
export const splitItem = (transaction, leftItem, diff) => {
|
||||
// create rightItem
|
||||
const { client, clock } = leftItem.id
|
||||
const rightItem = new Item(
|
||||
createID(client, clock + diff),
|
||||
leftItem,
|
||||
createID(client, clock + diff - 1),
|
||||
leftItem.right,
|
||||
leftItem.rightOrigin,
|
||||
leftItem.parent,
|
||||
leftItem.parentSub,
|
||||
leftItem.content.splice(diff)
|
||||
)
|
||||
if (leftItem.deleted) {
|
||||
rightItem.markDeleted()
|
||||
}
|
||||
if (leftItem.keep) {
|
||||
rightItem.keep = true
|
||||
}
|
||||
if (leftItem.redone !== null) {
|
||||
rightItem.redone = createID(leftItem.redone.client, leftItem.redone.clock + diff)
|
||||
}
|
||||
// update left (do not set leftItem.rightOrigin as it will lead to problems when syncing)
|
||||
leftItem.right = rightItem
|
||||
// update right
|
||||
if (rightItem.right !== null) {
|
||||
rightItem.right.left = rightItem
|
||||
}
|
||||
// right is more specific.
|
||||
transaction._mergeStructs.push(rightItem)
|
||||
// update parent._map
|
||||
if (rightItem.parentSub !== null && rightItem.right === null) {
|
||||
/** @type {AbstractType<any>} */ (rightItem.parent)._map.set(rightItem.parentSub, rightItem)
|
||||
}
|
||||
leftItem.length = diff
|
||||
return rightItem
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Array<StackItem>} stack
|
||||
* @param {ID} id
|
||||
*/
|
||||
const isDeletedByUndoStack = (stack, id) => array.some(stack, /** @param {StackItem} s */ s => isDeleted(s.deletions, id))
|
||||
|
||||
/**
|
||||
* Redoes the effect of this operation.
|
||||
*
|
||||
* @param {Transaction} transaction The Yjs instance.
|
||||
* @param {Item} item
|
||||
* @param {Set<Item>} redoitems
|
||||
* @param {DeleteSet} itemsToDelete
|
||||
* @param {boolean} ignoreRemoteMapChanges
|
||||
* @param {import('../utils/UndoManager.js').UndoManager} um
|
||||
*
|
||||
* @return {Item|null}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
export const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemoteMapChanges, um) => {
|
||||
const doc = transaction.doc
|
||||
const store = doc.store
|
||||
const ownClientID = doc.clientID
|
||||
const redone = item.redone
|
||||
if (redone !== null) {
|
||||
return getItemCleanStart(transaction, redone)
|
||||
}
|
||||
let parentItem = /** @type {AbstractType<any>} */ (item.parent)._item
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let left = null
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let right
|
||||
// make sure that parent is redone
|
||||
if (parentItem !== null && parentItem.deleted === true) {
|
||||
// try to undo parent if it will be undone anyway
|
||||
if (parentItem.redone === null && (!redoitems.has(parentItem) || redoItem(transaction, parentItem, redoitems, itemsToDelete, ignoreRemoteMapChanges, um) === null)) {
|
||||
return null
|
||||
}
|
||||
while (parentItem.redone !== null) {
|
||||
parentItem = getItemCleanStart(transaction, parentItem.redone)
|
||||
}
|
||||
}
|
||||
const parentType = parentItem === null ? /** @type {AbstractType<any>} */ (item.parent) : /** @type {ContentType} */ (parentItem.content).type
|
||||
|
||||
if (item.parentSub === null) {
|
||||
// Is an array item. Insert at the old position
|
||||
left = item.left
|
||||
right = item
|
||||
// find next cloned_redo items
|
||||
while (left !== null) {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let leftTrace = left
|
||||
// trace redone until parent matches
|
||||
while (leftTrace !== null && /** @type {AbstractType<any>} */ (leftTrace.parent)._item !== parentItem) {
|
||||
leftTrace = leftTrace.redone === null ? null : getItemCleanStart(transaction, leftTrace.redone)
|
||||
}
|
||||
if (leftTrace !== null && /** @type {AbstractType<any>} */ (leftTrace.parent)._item === parentItem) {
|
||||
left = leftTrace
|
||||
break
|
||||
}
|
||||
left = left.left
|
||||
}
|
||||
while (right !== null) {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let rightTrace = right
|
||||
// trace redone until parent matches
|
||||
while (rightTrace !== null && /** @type {AbstractType<any>} */ (rightTrace.parent)._item !== parentItem) {
|
||||
rightTrace = rightTrace.redone === null ? null : getItemCleanStart(transaction, rightTrace.redone)
|
||||
}
|
||||
if (rightTrace !== null && /** @type {AbstractType<any>} */ (rightTrace.parent)._item === parentItem) {
|
||||
right = rightTrace
|
||||
break
|
||||
}
|
||||
right = right.right
|
||||
}
|
||||
} else {
|
||||
right = null
|
||||
if (item.right && !ignoreRemoteMapChanges) {
|
||||
left = item
|
||||
// Iterate right while right is in itemsToDelete
|
||||
// If it is intended to delete right while item is redone, we can expect that item should replace right.
|
||||
while (left !== null && left.right !== null && (left.right.redone || isDeleted(itemsToDelete, left.right.id) || isDeletedByUndoStack(um.undoStack, left.right.id) || isDeletedByUndoStack(um.redoStack, left.right.id))) {
|
||||
left = left.right
|
||||
// follow redone
|
||||
while (left.redone) left = getItemCleanStart(transaction, left.redone)
|
||||
}
|
||||
if (left && left.right !== null) {
|
||||
// It is not possible to redo this item because it conflicts with a
|
||||
// change from another client
|
||||
return null
|
||||
}
|
||||
} else {
|
||||
left = parentType._map.get(item.parentSub) || null
|
||||
}
|
||||
}
|
||||
const nextClock = getState(store, ownClientID)
|
||||
const nextId = createID(ownClientID, nextClock)
|
||||
const redoneItem = new Item(
|
||||
nextId,
|
||||
left, left && left.lastId,
|
||||
right, right && right.id,
|
||||
parentType,
|
||||
item.parentSub,
|
||||
item.content.copy()
|
||||
)
|
||||
item.redone = nextId
|
||||
keepItem(redoneItem, true)
|
||||
redoneItem.integrate(transaction, 0)
|
||||
return redoneItem
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract class that represents any content.
|
||||
*/
|
||||
export class Item extends AbstractStruct {
|
||||
/**
|
||||
* @param {ID} id
|
||||
* @param {Item | null} left
|
||||
* @param {ID | null} origin
|
||||
* @param {Item | null} right
|
||||
* @param {ID | null} rightOrigin
|
||||
* @param {AbstractType<any>|ID|null} parent Is a type if integrated, is null if it is possible to copy parent from left or right, is ID before integration to search for it.
|
||||
* @param {string | null} parentSub
|
||||
* @param {AbstractContent} content
|
||||
*/
|
||||
constructor (id, left, origin, right, rightOrigin, parent, parentSub, content) {
|
||||
super(id, content.getLength())
|
||||
/**
|
||||
* The item that was originally to the left of this item.
|
||||
* @type {ID | null}
|
||||
*/
|
||||
this.origin = origin
|
||||
/**
|
||||
* The item that is currently to the left of this item.
|
||||
* @type {Item | null}
|
||||
*/
|
||||
this.left = left
|
||||
/**
|
||||
* The item that is currently to the right of this item.
|
||||
* @type {Item | null}
|
||||
*/
|
||||
this.right = right
|
||||
/**
|
||||
* The item that was originally to the right of this item.
|
||||
* @type {ID | null}
|
||||
*/
|
||||
this.rightOrigin = rightOrigin
|
||||
/**
|
||||
* @type {AbstractType<any>|ID|null}
|
||||
*/
|
||||
this.parent = parent
|
||||
/**
|
||||
* If the parent refers to this item with some kind of key (e.g. YMap, the
|
||||
* key is specified here. The key is then used to refer to the list in which
|
||||
* to insert this item. If `parentSub = null` type._start is the list in
|
||||
* which to insert to. Otherwise it is `parent._map`.
|
||||
* @type {String | null}
|
||||
*/
|
||||
this.parentSub = parentSub
|
||||
/**
|
||||
* If this type's effect is redone this type refers to the type that undid
|
||||
* this operation.
|
||||
* @type {ID | null}
|
||||
*/
|
||||
this.redone = null
|
||||
/**
|
||||
* @type {AbstractContent}
|
||||
*/
|
||||
this.content = content
|
||||
/**
|
||||
* bit1: keep
|
||||
* bit2: countable
|
||||
* bit3: deleted
|
||||
* bit4: mark - mark node as fast-search-marker
|
||||
* @type {number} byte
|
||||
*/
|
||||
this.info = this.content.isCountable() ? binary.BIT2 : 0
|
||||
}
|
||||
|
||||
/**
|
||||
* This is used to mark the item as an indexed fast-search marker
|
||||
*
|
||||
* @type {boolean}
|
||||
*/
|
||||
set marker (isMarked) {
|
||||
if (((this.info & binary.BIT4) > 0) !== isMarked) {
|
||||
this.info ^= binary.BIT4
|
||||
}
|
||||
}
|
||||
|
||||
get marker () {
|
||||
return (this.info & binary.BIT4) > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* If true, do not garbage collect this Item.
|
||||
*/
|
||||
get keep () {
|
||||
return (this.info & binary.BIT1) > 0
|
||||
}
|
||||
|
||||
set keep (doKeep) {
|
||||
if (this.keep !== doKeep) {
|
||||
this.info ^= binary.BIT1
|
||||
}
|
||||
}
|
||||
|
||||
get countable () {
|
||||
return (this.info & binary.BIT2) > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether this item was deleted or not.
|
||||
* @type {Boolean}
|
||||
*/
|
||||
get deleted () {
|
||||
return (this.info & binary.BIT3) > 0
|
||||
}
|
||||
|
||||
set deleted (doDelete) {
|
||||
if (this.deleted !== doDelete) {
|
||||
this.info ^= binary.BIT3
|
||||
}
|
||||
}
|
||||
|
||||
markDeleted () {
|
||||
this.info |= binary.BIT3
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the creator clientID of the missing op or define missing items and return null.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {StructStore} store
|
||||
* @return {null | number}
|
||||
*/
|
||||
getMissing (transaction, store) {
|
||||
if (this.origin && this.origin.client !== this.id.client && this.origin.clock >= getState(store, this.origin.client)) {
|
||||
return this.origin.client
|
||||
}
|
||||
if (this.rightOrigin && this.rightOrigin.client !== this.id.client && this.rightOrigin.clock >= getState(store, this.rightOrigin.client)) {
|
||||
return this.rightOrigin.client
|
||||
}
|
||||
if (this.parent && this.parent.constructor === ID && this.id.client !== this.parent.client && this.parent.clock >= getState(store, this.parent.client)) {
|
||||
return this.parent.client
|
||||
}
|
||||
|
||||
// We have all missing ids, now find the items
|
||||
|
||||
if (this.origin) {
|
||||
this.left = getItemCleanEnd(transaction, store, this.origin)
|
||||
this.origin = this.left.lastId
|
||||
}
|
||||
if (this.rightOrigin) {
|
||||
this.right = getItemCleanStart(transaction, this.rightOrigin)
|
||||
this.rightOrigin = this.right.id
|
||||
}
|
||||
if ((this.left && this.left.constructor === GC) || (this.right && this.right.constructor === GC)) {
|
||||
this.parent = null
|
||||
} else if (!this.parent) {
|
||||
// only set parent if this shouldn't be garbage collected
|
||||
if (this.left && this.left.constructor === Item) {
|
||||
this.parent = this.left.parent
|
||||
this.parentSub = this.left.parentSub
|
||||
} else if (this.right && this.right.constructor === Item) {
|
||||
this.parent = this.right.parent
|
||||
this.parentSub = this.right.parentSub
|
||||
}
|
||||
} else if (this.parent.constructor === ID) {
|
||||
const parentItem = getItem(store, this.parent)
|
||||
if (parentItem.constructor === GC) {
|
||||
this.parent = null
|
||||
} else {
|
||||
this.parent = /** @type {ContentType} */ (parentItem.content).type
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {number} offset
|
||||
*/
|
||||
integrate (transaction, offset) {
|
||||
if (offset > 0) {
|
||||
this.id.clock += offset
|
||||
this.left = getItemCleanEnd(transaction, transaction.doc.store, createID(this.id.client, this.id.clock - 1))
|
||||
this.origin = this.left.lastId
|
||||
this.content = this.content.splice(offset)
|
||||
this.length -= offset
|
||||
}
|
||||
|
||||
if (this.parent) {
|
||||
if ((!this.left && (!this.right || this.right.left !== null)) || (this.left && this.left.right !== this.right)) {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let left = this.left
|
||||
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let o
|
||||
// set o to the first conflicting item
|
||||
if (left !== null) {
|
||||
o = left.right
|
||||
} else if (this.parentSub !== null) {
|
||||
o = /** @type {AbstractType<any>} */ (this.parent)._map.get(this.parentSub) || null
|
||||
while (o !== null && o.left !== null) {
|
||||
o = o.left
|
||||
}
|
||||
} else {
|
||||
o = /** @type {AbstractType<any>} */ (this.parent)._start
|
||||
}
|
||||
// TODO: use something like DeleteSet here (a tree implementation would be best)
|
||||
// @todo use global set definitions
|
||||
/**
|
||||
* @type {Set<Item>}
|
||||
*/
|
||||
const conflictingItems = new Set()
|
||||
/**
|
||||
* @type {Set<Item>}
|
||||
*/
|
||||
const itemsBeforeOrigin = new Set()
|
||||
// Let c in conflictingItems, b in itemsBeforeOrigin
|
||||
// ***{origin}bbbb{this}{c,b}{c,b}{o}***
|
||||
// Note that conflictingItems is a subset of itemsBeforeOrigin
|
||||
while (o !== null && o !== this.right) {
|
||||
itemsBeforeOrigin.add(o)
|
||||
conflictingItems.add(o)
|
||||
if (compareIDs(this.origin, o.origin)) {
|
||||
// case 1
|
||||
if (o.id.client < this.id.client) {
|
||||
left = o
|
||||
conflictingItems.clear()
|
||||
} else if (compareIDs(this.rightOrigin, o.rightOrigin)) {
|
||||
// this and o are conflicting and point to the same integration points. The id decides which item comes first.
|
||||
// Since this is to the left of o, we can break here
|
||||
break
|
||||
} // else, o might be integrated before an item that this conflicts with. If so, we will find it in the next iterations
|
||||
} else if (o.origin !== null && itemsBeforeOrigin.has(getItem(transaction.doc.store, o.origin))) { // use getItem instead of getItemCleanEnd because we don't want / need to split items.
|
||||
// case 2
|
||||
if (!conflictingItems.has(getItem(transaction.doc.store, o.origin))) {
|
||||
left = o
|
||||
conflictingItems.clear()
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
o = o.right
|
||||
}
|
||||
this.left = left
|
||||
}
|
||||
// reconnect left/right + update parent map/start if necessary
|
||||
if (this.left !== null) {
|
||||
const right = this.left.right
|
||||
this.right = right
|
||||
this.left.right = this
|
||||
} else {
|
||||
let r
|
||||
if (this.parentSub !== null) {
|
||||
r = /** @type {AbstractType<any>} */ (this.parent)._map.get(this.parentSub) || null
|
||||
while (r !== null && r.left !== null) {
|
||||
r = r.left
|
||||
}
|
||||
} else {
|
||||
r = /** @type {AbstractType<any>} */ (this.parent)._start
|
||||
;/** @type {AbstractType<any>} */ (this.parent)._start = this
|
||||
}
|
||||
this.right = r
|
||||
}
|
||||
if (this.right !== null) {
|
||||
this.right.left = this
|
||||
} else if (this.parentSub !== null) {
|
||||
// set as current parent value if right === null and this is parentSub
|
||||
/** @type {AbstractType<any>} */ (this.parent)._map.set(this.parentSub, this)
|
||||
if (this.left !== null) {
|
||||
// this is the current attribute value of parent. delete right
|
||||
this.left.delete(transaction)
|
||||
}
|
||||
}
|
||||
// adjust length of parent
|
||||
if (this.parentSub === null && this.countable && !this.deleted) {
|
||||
/** @type {AbstractType<any>} */ (this.parent)._length += this.length
|
||||
}
|
||||
addStruct(transaction.doc.store, this)
|
||||
this.content.integrate(transaction, this)
|
||||
// add parent to transaction.changed
|
||||
addChangedTypeToTransaction(transaction, /** @type {AbstractType<any>} */ (this.parent), this.parentSub)
|
||||
if ((/** @type {AbstractType<any>} */ (this.parent)._item !== null && /** @type {AbstractType<any>} */ (this.parent)._item.deleted) || (this.parentSub !== null && this.right !== null)) {
|
||||
// delete if parent is deleted or if this is not the current attribute value of parent
|
||||
this.delete(transaction)
|
||||
}
|
||||
} else {
|
||||
// parent is not defined. Integrate GC struct instead
|
||||
new GC(this.id, this.length).integrate(transaction, 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next non-deleted item
|
||||
*/
|
||||
get next () {
|
||||
let n = this.right
|
||||
while (n !== null && n.deleted) {
|
||||
n = n.right
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the previous non-deleted item
|
||||
*/
|
||||
get prev () {
|
||||
let n = this.left
|
||||
while (n !== null && n.deleted) {
|
||||
n = n.left
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the last content address of this Item.
|
||||
*/
|
||||
get lastId () {
|
||||
// allocating ids is pretty costly because of the amount of ids created, so we try to reuse whenever possible
|
||||
return this.length === 1 ? this.id : createID(this.id.client, this.id.clock + this.length - 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to merge two items
|
||||
*
|
||||
* @param {Item} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
if (
|
||||
this.constructor === right.constructor &&
|
||||
compareIDs(right.origin, this.lastId) &&
|
||||
this.right === right &&
|
||||
compareIDs(this.rightOrigin, right.rightOrigin) &&
|
||||
this.id.client === right.id.client &&
|
||||
this.id.clock + this.length === right.id.clock &&
|
||||
this.deleted === right.deleted &&
|
||||
this.redone === null &&
|
||||
right.redone === null &&
|
||||
this.content.constructor === right.content.constructor &&
|
||||
this.content.mergeWith(right.content)
|
||||
) {
|
||||
const searchMarker = /** @type {AbstractType<any>} */ (this.parent)._searchMarker
|
||||
if (searchMarker) {
|
||||
searchMarker.forEach(marker => {
|
||||
if (marker.p === right) {
|
||||
// right is going to be "forgotten" so we need to update the marker
|
||||
marker.p = this
|
||||
// adjust marker index
|
||||
if (!this.deleted && this.countable) {
|
||||
marker.index -= this.length
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
if (right.keep) {
|
||||
this.keep = true
|
||||
}
|
||||
this.right = right.right
|
||||
if (this.right !== null) {
|
||||
this.right.left = this
|
||||
}
|
||||
this.length += right.length
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark this Item as deleted.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
delete (transaction) {
|
||||
if (!this.deleted) {
|
||||
const parent = /** @type {AbstractType<any>} */ (this.parent)
|
||||
// adjust the length of parent
|
||||
if (this.countable && this.parentSub === null) {
|
||||
parent._length -= this.length
|
||||
}
|
||||
this.markDeleted()
|
||||
addToDeleteSet(transaction.deleteSet, this.id.client, this.id.clock, this.length)
|
||||
addChangedTypeToTransaction(transaction, parent, this.parentSub)
|
||||
this.content.delete(transaction)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
* @param {boolean} parentGCd
|
||||
*/
|
||||
gc (store, parentGCd) {
|
||||
if (!this.deleted) {
|
||||
throw error.unexpectedCase()
|
||||
}
|
||||
this.content.gc(store)
|
||||
if (parentGCd) {
|
||||
replaceStruct(store, this, new GC(this.id, this.length))
|
||||
} else {
|
||||
this.content = new ContentDeleted(this.length)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the properties of this type to binary and write it to an
|
||||
* BinaryEncoder.
|
||||
*
|
||||
* This is called when this Item is sent to a remote peer.
|
||||
*
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
const origin = offset > 0 ? createID(this.id.client, this.id.clock + offset - 1) : this.origin
|
||||
const rightOrigin = this.rightOrigin
|
||||
const parentSub = this.parentSub
|
||||
const info = (this.content.getRef() & binary.BITS5) |
|
||||
(origin === null ? 0 : binary.BIT8) | // origin is defined
|
||||
(rightOrigin === null ? 0 : binary.BIT7) | // right origin is defined
|
||||
(parentSub === null ? 0 : binary.BIT6) // parentSub is non-null
|
||||
encoder.writeInfo(info)
|
||||
if (origin !== null) {
|
||||
encoder.writeLeftID(origin)
|
||||
}
|
||||
if (rightOrigin !== null) {
|
||||
encoder.writeRightID(rightOrigin)
|
||||
}
|
||||
if (origin === null && rightOrigin === null) {
|
||||
const parent = /** @type {AbstractType<any>} */ (this.parent)
|
||||
if (parent._item !== undefined) {
|
||||
const parentItem = parent._item
|
||||
if (parentItem === null) {
|
||||
// parent type on y._map
|
||||
// find the correct key
|
||||
const ykey = findRootTypeKey(parent)
|
||||
encoder.writeParentInfo(true) // write parentYKey
|
||||
encoder.writeString(ykey)
|
||||
} else {
|
||||
encoder.writeParentInfo(false) // write parent id
|
||||
encoder.writeLeftID(parentItem.id)
|
||||
}
|
||||
} else if (parent.constructor === String) { // this edge case was added by differential updates
|
||||
encoder.writeParentInfo(true) // write parentYKey
|
||||
encoder.writeString(parent)
|
||||
} else if (parent.constructor === ID) {
|
||||
encoder.writeParentInfo(false) // write parent id
|
||||
encoder.writeLeftID(parent)
|
||||
} else {
|
||||
error.unexpectedCase()
|
||||
}
|
||||
if (parentSub !== null) {
|
||||
encoder.writeString(parentSub)
|
||||
}
|
||||
}
|
||||
this.content.write(encoder, offset)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @param {number} info
|
||||
*/
|
||||
export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder)
|
||||
|
||||
/**
|
||||
* A lookup map for reading Item content.
|
||||
*
|
||||
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractContent>}
|
||||
*/
|
||||
export const contentRefs = [
|
||||
() => { error.unexpectedCase() }, // GC is not ItemContent
|
||||
readContentDeleted, // 1
|
||||
readContentJSON, // 2
|
||||
readContentBinary, // 3
|
||||
readContentString, // 4
|
||||
readContentEmbed, // 5
|
||||
readContentFormat, // 6
|
||||
readContentType, // 7
|
||||
readContentAny, // 8
|
||||
readContentDoc, // 9
|
||||
() => { error.unexpectedCase() } // 10 - Skip is not ItemContent
|
||||
]
|
||||
|
||||
/**
|
||||
* Do not implement this class!
|
||||
*/
|
||||
export class AbstractContent {
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getLength () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
getContent () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* Should return false if this Item is some kind of meta information
|
||||
* (e.g. format information).
|
||||
*
|
||||
* * Whether this Item should be addressable via `yarray.get(i)`
|
||||
* * Whether this Item should be counted when computing yarray.length
|
||||
*
|
||||
* @return {boolean}
|
||||
*/
|
||||
isCountable () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {AbstractContent}
|
||||
*/
|
||||
copy () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} _offset
|
||||
* @return {AbstractContent}
|
||||
*/
|
||||
splice (_offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractContent} _right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (_right) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} _transaction
|
||||
* @param {Item} _item
|
||||
*/
|
||||
integrate (_transaction, _item) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} _transaction
|
||||
*/
|
||||
delete (_transaction) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} _store
|
||||
*/
|
||||
gc (_store) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} _encoder
|
||||
* @param {number} _offset
|
||||
*/
|
||||
write (_encoder, _offset) {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
getRef () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
}
|
59
src/structs/Skip.js
Normal file
59
src/structs/Skip.js
Normal file
@ -0,0 +1,59 @@
|
||||
import {
|
||||
AbstractStruct,
|
||||
UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
import * as error from 'lib0/error'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
|
||||
export const structSkipRefNumber = 10
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
export class Skip extends AbstractStruct {
|
||||
get deleted () {
|
||||
return true
|
||||
}
|
||||
|
||||
delete () {}
|
||||
|
||||
/**
|
||||
* @param {Skip} right
|
||||
* @return {boolean}
|
||||
*/
|
||||
mergeWith (right) {
|
||||
if (this.constructor !== right.constructor) {
|
||||
return false
|
||||
}
|
||||
this.length += right.length
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {number} offset
|
||||
*/
|
||||
integrate (transaction, offset) {
|
||||
// skip structs cannot be integrated
|
||||
error.unexpectedCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {number} offset
|
||||
*/
|
||||
write (encoder, offset) {
|
||||
encoder.writeInfo(structSkipRefNumber)
|
||||
// write as VarUint because Skips can't make use of predictable length-encoding
|
||||
encoding.writeVarUint(encoder.restEncoder, this.length - offset)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {StructStore} store
|
||||
* @return {null | number}
|
||||
*/
|
||||
getMissing (transaction, store) {
|
||||
return null
|
||||
}
|
||||
}
|
983
src/types/AbstractType.js
Normal file
983
src/types/AbstractType.js
Normal file
@ -0,0 +1,983 @@
|
||||
import {
|
||||
removeEventHandlerListener,
|
||||
callEventHandlerListeners,
|
||||
addEventHandlerListener,
|
||||
createEventHandler,
|
||||
getState,
|
||||
isVisible,
|
||||
ContentType,
|
||||
createID,
|
||||
ContentAny,
|
||||
ContentBinary,
|
||||
getItemCleanStart,
|
||||
ContentDoc, YText, YArray, UpdateEncoderV1, UpdateEncoderV2, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as map from 'lib0/map'
|
||||
import * as iterator from 'lib0/iterator'
|
||||
import * as error from 'lib0/error'
|
||||
import * as math from 'lib0/math'
|
||||
import * as log from 'lib0/logging'
|
||||
|
||||
/**
|
||||
* https://docs.yjs.dev/getting-started/working-with-shared-types#caveats
|
||||
*/
|
||||
export const warnPrematureAccess = () => { log.warn('Invalid access: Add Yjs type to a document before reading data.') }
|
||||
|
||||
const maxSearchMarker = 80
|
||||
|
||||
/**
|
||||
* A unique timestamp that identifies each marker.
|
||||
*
|
||||
* Time is relative,.. this is more like an ever-increasing clock.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
let globalSearchMarkerTimestamp = 0
|
||||
|
||||
export class ArraySearchMarker {
|
||||
/**
|
||||
* @param {Item} p
|
||||
* @param {number} index
|
||||
*/
|
||||
constructor (p, index) {
|
||||
p.marker = true
|
||||
this.p = p
|
||||
this.index = index
|
||||
this.timestamp = globalSearchMarkerTimestamp++
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ArraySearchMarker} marker
|
||||
*/
|
||||
const refreshMarkerTimestamp = marker => { marker.timestamp = globalSearchMarkerTimestamp++ }
|
||||
|
||||
/**
|
||||
* This is rather complex so this function is the only thing that should overwrite a marker
|
||||
*
|
||||
* @param {ArraySearchMarker} marker
|
||||
* @param {Item} p
|
||||
* @param {number} index
|
||||
*/
|
||||
const overwriteMarker = (marker, p, index) => {
|
||||
marker.p.marker = false
|
||||
marker.p = p
|
||||
p.marker = true
|
||||
marker.index = index
|
||||
marker.timestamp = globalSearchMarkerTimestamp++
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Array<ArraySearchMarker>} searchMarker
|
||||
* @param {Item} p
|
||||
* @param {number} index
|
||||
*/
|
||||
const markPosition = (searchMarker, p, index) => {
|
||||
if (searchMarker.length >= maxSearchMarker) {
|
||||
// override oldest marker (we don't want to create more objects)
|
||||
const marker = searchMarker.reduce((a, b) => a.timestamp < b.timestamp ? a : b)
|
||||
overwriteMarker(marker, p, index)
|
||||
return marker
|
||||
} else {
|
||||
// create new marker
|
||||
const pm = new ArraySearchMarker(p, index)
|
||||
searchMarker.push(pm)
|
||||
return pm
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search marker help us to find positions in the associative array faster.
|
||||
*
|
||||
* They speed up the process of finding a position without much bookkeeping.
|
||||
*
|
||||
* A maximum of `maxSearchMarker` objects are created.
|
||||
*
|
||||
* This function always returns a refreshed marker (updated timestamp)
|
||||
*
|
||||
* @param {AbstractType<any>} yarray
|
||||
* @param {number} index
|
||||
*/
|
||||
export const findMarker = (yarray, index) => {
|
||||
if (yarray._start === null || index === 0 || yarray._searchMarker === null) {
|
||||
return null
|
||||
}
|
||||
const marker = yarray._searchMarker.length === 0 ? null : yarray._searchMarker.reduce((a, b) => math.abs(index - a.index) < math.abs(index - b.index) ? a : b)
|
||||
let p = yarray._start
|
||||
let pindex = 0
|
||||
if (marker !== null) {
|
||||
p = marker.p
|
||||
pindex = marker.index
|
||||
refreshMarkerTimestamp(marker) // we used it, we might need to use it again
|
||||
}
|
||||
// iterate to right if possible
|
||||
while (p.right !== null && pindex < index) {
|
||||
if (!p.deleted && p.countable) {
|
||||
if (index < pindex + p.length) {
|
||||
break
|
||||
}
|
||||
pindex += p.length
|
||||
}
|
||||
p = p.right
|
||||
}
|
||||
// iterate to left if necessary (might be that pindex > index)
|
||||
while (p.left !== null && pindex > index) {
|
||||
p = p.left
|
||||
if (!p.deleted && p.countable) {
|
||||
pindex -= p.length
|
||||
}
|
||||
}
|
||||
// we want to make sure that p can't be merged with left, because that would screw up everything
|
||||
// in that cas just return what we have (it is most likely the best marker anyway)
|
||||
// iterate to left until p can't be merged with left
|
||||
while (p.left !== null && p.left.id.client === p.id.client && p.left.id.clock + p.left.length === p.id.clock) {
|
||||
p = p.left
|
||||
if (!p.deleted && p.countable) {
|
||||
pindex -= p.length
|
||||
}
|
||||
}
|
||||
|
||||
// @todo remove!
|
||||
// assure position
|
||||
// {
|
||||
// let start = yarray._start
|
||||
// let pos = 0
|
||||
// while (start !== p) {
|
||||
// if (!start.deleted && start.countable) {
|
||||
// pos += start.length
|
||||
// }
|
||||
// start = /** @type {Item} */ (start.right)
|
||||
// }
|
||||
// if (pos !== pindex) {
|
||||
// debugger
|
||||
// throw new Error('Gotcha position fail!')
|
||||
// }
|
||||
// }
|
||||
// if (marker) {
|
||||
// if (window.lengths == null) {
|
||||
// window.lengths = []
|
||||
// window.getLengths = () => window.lengths.sort((a, b) => a - b)
|
||||
// }
|
||||
// window.lengths.push(marker.index - pindex)
|
||||
// console.log('distance', marker.index - pindex, 'len', p && p.parent.length)
|
||||
// }
|
||||
if (marker !== null && math.abs(marker.index - pindex) < /** @type {YText|YArray<any>} */ (p.parent).length / maxSearchMarker) {
|
||||
// adjust existing marker
|
||||
overwriteMarker(marker, p, pindex)
|
||||
return marker
|
||||
} else {
|
||||
// create new marker
|
||||
return markPosition(yarray._searchMarker, p, pindex)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update markers when a change happened.
|
||||
*
|
||||
* This should be called before doing a deletion!
|
||||
*
|
||||
* @param {Array<ArraySearchMarker>} searchMarker
|
||||
* @param {number} index
|
||||
* @param {number} len If insertion, len is positive. If deletion, len is negative.
|
||||
*/
|
||||
export const updateMarkerChanges = (searchMarker, index, len) => {
|
||||
for (let i = searchMarker.length - 1; i >= 0; i--) {
|
||||
const m = searchMarker[i]
|
||||
if (len > 0) {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let p = m.p
|
||||
p.marker = false
|
||||
// Ideally we just want to do a simple position comparison, but this will only work if
|
||||
// search markers don't point to deleted items for formats.
|
||||
// Iterate marker to prev undeleted countable position so we know what to do when updating a position
|
||||
while (p && (p.deleted || !p.countable)) {
|
||||
p = p.left
|
||||
if (p && !p.deleted && p.countable) {
|
||||
// adjust position. the loop should break now
|
||||
m.index -= p.length
|
||||
}
|
||||
}
|
||||
if (p === null || p.marker === true) {
|
||||
// remove search marker if updated position is null or if position is already marked
|
||||
searchMarker.splice(i, 1)
|
||||
continue
|
||||
}
|
||||
m.p = p
|
||||
p.marker = true
|
||||
}
|
||||
if (index < m.index || (len > 0 && index === m.index)) { // a simple index <= m.index check would actually suffice
|
||||
m.index = math.max(index, m.index + len)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Accumulate all (list) children of a type and return them as an Array.
|
||||
*
|
||||
* @param {AbstractType<any>} t
|
||||
* @return {Array<Item>}
|
||||
*/
|
||||
export const getTypeChildren = t => {
|
||||
t.doc ?? warnPrematureAccess()
|
||||
let s = t._start
|
||||
const arr = []
|
||||
while (s) {
|
||||
arr.push(s)
|
||||
s = s.right
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
/**
|
||||
* Call event listeners with an event. This will also add an event to all
|
||||
* parents (for `.observeDeep` handlers).
|
||||
*
|
||||
* @template EventType
|
||||
* @param {AbstractType<EventType>} type
|
||||
* @param {Transaction} transaction
|
||||
* @param {EventType} event
|
||||
*/
|
||||
export const callTypeObservers = (type, transaction, event) => {
|
||||
const changedType = type
|
||||
const changedParentTypes = transaction.changedParentTypes
|
||||
while (true) {
|
||||
// @ts-ignore
|
||||
map.setIfUndefined(changedParentTypes, type, () => []).push(event)
|
||||
if (type._item === null) {
|
||||
break
|
||||
}
|
||||
type = /** @type {AbstractType<any>} */ (type._item.parent)
|
||||
}
|
||||
callEventHandlerListeners(changedType._eH, event, transaction)
|
||||
}
|
||||
|
||||
/**
|
||||
* @template EventType
|
||||
* Abstract Yjs Type class
|
||||
*/
|
||||
export class AbstractType {
|
||||
constructor () {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
this._item = null
|
||||
/**
|
||||
* @type {Map<string,Item>}
|
||||
*/
|
||||
this._map = new Map()
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
this._start = null
|
||||
/**
|
||||
* @type {Doc|null}
|
||||
*/
|
||||
this.doc = null
|
||||
this._length = 0
|
||||
/**
|
||||
* Event handlers
|
||||
* @type {EventHandler<EventType,Transaction>}
|
||||
*/
|
||||
this._eH = createEventHandler()
|
||||
/**
|
||||
* Deep event handlers
|
||||
* @type {EventHandler<Array<YEvent<any>>,Transaction>}
|
||||
*/
|
||||
this._dEH = createEventHandler()
|
||||
/**
|
||||
* @type {null | Array<ArraySearchMarker>}
|
||||
*/
|
||||
this._searchMarker = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {AbstractType<any>|null}
|
||||
*/
|
||||
get parent () {
|
||||
return this._item ? /** @type {AbstractType<any>} */ (this._item.parent) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Integrate this type into the Yjs instance.
|
||||
*
|
||||
* * Save this struct in the os
|
||||
* * This type is sent to other client
|
||||
* * Observer functions are fired
|
||||
*
|
||||
* @param {Doc} y The Yjs instance
|
||||
* @param {Item|null} item
|
||||
*/
|
||||
_integrate (y, item) {
|
||||
this.doc = y
|
||||
this._item = item
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {AbstractType<EventType>}
|
||||
*/
|
||||
_copy () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {AbstractType<EventType>}
|
||||
*/
|
||||
clone () {
|
||||
throw error.methodUnimplemented()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} _encoder
|
||||
*/
|
||||
_write (_encoder) { }
|
||||
|
||||
/**
|
||||
* The first non-deleted item
|
||||
*/
|
||||
get _first () {
|
||||
let n = this._start
|
||||
while (n !== null && n.deleted) {
|
||||
n = n.right
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates YEvent and calls all type observers.
|
||||
* Must be implemented by each type.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {Set<null|string>} _parentSubs Keys changed on this type. `null` if list was modified.
|
||||
*/
|
||||
_callObserver (transaction, _parentSubs) {
|
||||
if (!transaction.local && this._searchMarker) {
|
||||
this._searchMarker.length = 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Observe all events that are created on this type.
|
||||
*
|
||||
* @param {function(EventType, Transaction):void} f Observer function
|
||||
*/
|
||||
observe (f) {
|
||||
addEventHandlerListener(this._eH, f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Observe all events that are created by this type and its children.
|
||||
*
|
||||
* @param {function(Array<YEvent<any>>,Transaction):void} f Observer function
|
||||
*/
|
||||
observeDeep (f) {
|
||||
addEventHandlerListener(this._dEH, f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister an observer function.
|
||||
*
|
||||
* @param {function(EventType,Transaction):void} f Observer function
|
||||
*/
|
||||
unobserve (f) {
|
||||
removeEventHandlerListener(this._eH, f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister an observer function.
|
||||
*
|
||||
* @param {function(Array<YEvent<any>>,Transaction):void} f Observer function
|
||||
*/
|
||||
unobserveDeep (f) {
|
||||
removeEventHandlerListener(this._dEH, f)
|
||||
}
|
||||
|
||||
/**
|
||||
* @abstract
|
||||
* @return {any}
|
||||
*/
|
||||
toJSON () {}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {number} start
|
||||
* @param {number} end
|
||||
* @return {Array<any>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListSlice = (type, start, end) => {
|
||||
type.doc ?? warnPrematureAccess()
|
||||
if (start < 0) {
|
||||
start = type._length + start
|
||||
}
|
||||
if (end < 0) {
|
||||
end = type._length + end
|
||||
}
|
||||
let len = end - start
|
||||
const cs = []
|
||||
let n = type._start
|
||||
while (n !== null && len > 0) {
|
||||
if (n.countable && !n.deleted) {
|
||||
const c = n.content.getContent()
|
||||
if (c.length <= start) {
|
||||
start -= c.length
|
||||
} else {
|
||||
for (let i = start; i < c.length && len > 0; i++) {
|
||||
cs.push(c[i])
|
||||
len--
|
||||
}
|
||||
start = 0
|
||||
}
|
||||
}
|
||||
n = n.right
|
||||
}
|
||||
return cs
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @return {Array<any>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListToArray = type => {
|
||||
type.doc ?? warnPrematureAccess()
|
||||
const cs = []
|
||||
let n = type._start
|
||||
while (n !== null) {
|
||||
if (n.countable && !n.deleted) {
|
||||
const c = n.content.getContent()
|
||||
for (let i = 0; i < c.length; i++) {
|
||||
cs.push(c[i])
|
||||
}
|
||||
}
|
||||
n = n.right
|
||||
}
|
||||
return cs
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {Snapshot} snapshot
|
||||
* @return {Array<any>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListToArraySnapshot = (type, snapshot) => {
|
||||
const cs = []
|
||||
let n = type._start
|
||||
while (n !== null) {
|
||||
if (n.countable && isVisible(n, snapshot)) {
|
||||
const c = n.content.getContent()
|
||||
for (let i = 0; i < c.length; i++) {
|
||||
cs.push(c[i])
|
||||
}
|
||||
}
|
||||
n = n.right
|
||||
}
|
||||
return cs
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a provided function on once on every element of this YArray.
|
||||
*
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {function(any,number,any):void} f A function to execute on every element of this YArray.
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListForEach = (type, f) => {
|
||||
let index = 0
|
||||
let n = type._start
|
||||
type.doc ?? warnPrematureAccess()
|
||||
while (n !== null) {
|
||||
if (n.countable && !n.deleted) {
|
||||
const c = n.content.getContent()
|
||||
for (let i = 0; i < c.length; i++) {
|
||||
f(c[i], index++, type)
|
||||
}
|
||||
}
|
||||
n = n.right
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template C,R
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {function(C,number,AbstractType<any>):R} f
|
||||
* @return {Array<R>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListMap = (type, f) => {
|
||||
/**
|
||||
* @type {Array<any>}
|
||||
*/
|
||||
const result = []
|
||||
typeListForEach(type, (c, i) => {
|
||||
result.push(f(c, i, type))
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @return {IterableIterator<any>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListCreateIterator = type => {
|
||||
let n = type._start
|
||||
/**
|
||||
* @type {Array<any>|null}
|
||||
*/
|
||||
let currentContent = null
|
||||
let currentContentIndex = 0
|
||||
return {
|
||||
[Symbol.iterator] () {
|
||||
return this
|
||||
},
|
||||
next: () => {
|
||||
// find some content
|
||||
if (currentContent === null) {
|
||||
while (n !== null && n.deleted) {
|
||||
n = n.right
|
||||
}
|
||||
// check if we reached the end, no need to check currentContent, because it does not exist
|
||||
if (n === null) {
|
||||
return {
|
||||
done: true,
|
||||
value: undefined
|
||||
}
|
||||
}
|
||||
// we found n, so we can set currentContent
|
||||
currentContent = n.content.getContent()
|
||||
currentContentIndex = 0
|
||||
n = n.right // we used the content of n, now iterate to next
|
||||
}
|
||||
const value = currentContent[currentContentIndex++]
|
||||
// check if we need to empty currentContent
|
||||
if (currentContent.length <= currentContentIndex) {
|
||||
currentContent = null
|
||||
}
|
||||
return {
|
||||
done: false,
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a provided function on once on every element of this YArray.
|
||||
* Operates on a snapshotted state of the document.
|
||||
*
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {function(any,number,AbstractType<any>):void} f A function to execute on every element of this YArray.
|
||||
* @param {Snapshot} snapshot
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListForEachSnapshot = (type, f, snapshot) => {
|
||||
let index = 0
|
||||
let n = type._start
|
||||
while (n !== null) {
|
||||
if (n.countable && isVisible(n, snapshot)) {
|
||||
const c = n.content.getContent()
|
||||
for (let i = 0; i < c.length; i++) {
|
||||
f(c[i], index++, type)
|
||||
}
|
||||
}
|
||||
n = n.right
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {number} index
|
||||
* @return {any}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListGet = (type, index) => {
|
||||
type.doc ?? warnPrematureAccess()
|
||||
const marker = findMarker(type, index)
|
||||
let n = type._start
|
||||
if (marker !== null) {
|
||||
n = marker.p
|
||||
index -= marker.index
|
||||
}
|
||||
for (; n !== null; n = n.right) {
|
||||
if (!n.deleted && n.countable) {
|
||||
if (index < n.length) {
|
||||
return n.content.getContent()[index]
|
||||
}
|
||||
index -= n.length
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {Item?} referenceItem
|
||||
* @param {Array<Object<string,any>|Array<any>|boolean|number|null|string|Uint8Array>} content
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListInsertGenericsAfter = (transaction, parent, referenceItem, content) => {
|
||||
let left = referenceItem
|
||||
const doc = transaction.doc
|
||||
const ownClientId = doc.clientID
|
||||
const store = doc.store
|
||||
const right = referenceItem === null ? parent._start : referenceItem.right
|
||||
/**
|
||||
* @type {Array<Object|Array<any>|number|null>}
|
||||
*/
|
||||
let jsonContent = []
|
||||
const packJsonContent = () => {
|
||||
if (jsonContent.length > 0) {
|
||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentAny(jsonContent))
|
||||
left.integrate(transaction, 0)
|
||||
jsonContent = []
|
||||
}
|
||||
}
|
||||
content.forEach(c => {
|
||||
if (c === null) {
|
||||
jsonContent.push(c)
|
||||
} else {
|
||||
switch (c.constructor) {
|
||||
case Number:
|
||||
case Object:
|
||||
case Boolean:
|
||||
case Array:
|
||||
case String:
|
||||
jsonContent.push(c)
|
||||
break
|
||||
default:
|
||||
packJsonContent()
|
||||
switch (c.constructor) {
|
||||
case Uint8Array:
|
||||
case ArrayBuffer:
|
||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentBinary(new Uint8Array(/** @type {Uint8Array} */ (c))))
|
||||
left.integrate(transaction, 0)
|
||||
break
|
||||
case Doc:
|
||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentDoc(/** @type {Doc} */ (c)))
|
||||
left.integrate(transaction, 0)
|
||||
break
|
||||
default:
|
||||
if (c instanceof AbstractType) {
|
||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentType(c))
|
||||
left.integrate(transaction, 0)
|
||||
} else {
|
||||
throw new Error('Unexpected content type in insert operation')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
packJsonContent()
|
||||
}
|
||||
|
||||
const lengthExceeded = () => error.create('Length exceeded!')
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {number} index
|
||||
* @param {Array<Object<string,any>|Array<any>|number|null|string|Uint8Array>} content
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListInsertGenerics = (transaction, parent, index, content) => {
|
||||
if (index > parent._length) {
|
||||
throw lengthExceeded()
|
||||
}
|
||||
if (index === 0) {
|
||||
if (parent._searchMarker) {
|
||||
updateMarkerChanges(parent._searchMarker, index, content.length)
|
||||
}
|
||||
return typeListInsertGenericsAfter(transaction, parent, null, content)
|
||||
}
|
||||
const startIndex = index
|
||||
const marker = findMarker(parent, index)
|
||||
let n = parent._start
|
||||
if (marker !== null) {
|
||||
n = marker.p
|
||||
index -= marker.index
|
||||
// we need to iterate one to the left so that the algorithm works
|
||||
if (index === 0) {
|
||||
// @todo refactor this as it actually doesn't consider formats
|
||||
n = n.prev // important! get the left undeleted item so that we can actually decrease index
|
||||
index += (n && n.countable && !n.deleted) ? n.length : 0
|
||||
}
|
||||
}
|
||||
for (; n !== null; n = n.right) {
|
||||
if (!n.deleted && n.countable) {
|
||||
if (index <= n.length) {
|
||||
if (index < n.length) {
|
||||
// insert in-between
|
||||
getItemCleanStart(transaction, createID(n.id.client, n.id.clock + index))
|
||||
}
|
||||
break
|
||||
}
|
||||
index -= n.length
|
||||
}
|
||||
}
|
||||
if (parent._searchMarker) {
|
||||
updateMarkerChanges(parent._searchMarker, startIndex, content.length)
|
||||
}
|
||||
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
||||
}
|
||||
|
||||
/**
|
||||
* Pushing content is special as we generally want to push after the last item. So we don't have to update
|
||||
* the search marker.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {Array<Object<string,any>|Array<any>|number|null|string|Uint8Array>} content
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListPushGenerics = (transaction, parent, content) => {
|
||||
// Use the marker with the highest index and iterate to the right.
|
||||
const marker = (parent._searchMarker || []).reduce((maxMarker, currMarker) => currMarker.index > maxMarker.index ? currMarker : maxMarker, { index: 0, p: parent._start })
|
||||
let n = marker.p
|
||||
if (n) {
|
||||
while (n.right) {
|
||||
n = n.right
|
||||
}
|
||||
}
|
||||
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {number} index
|
||||
* @param {number} length
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeListDelete = (transaction, parent, index, length) => {
|
||||
if (length === 0) { return }
|
||||
const startIndex = index
|
||||
const startLength = length
|
||||
const marker = findMarker(parent, index)
|
||||
let n = parent._start
|
||||
if (marker !== null) {
|
||||
n = marker.p
|
||||
index -= marker.index
|
||||
}
|
||||
// compute the first item to be deleted
|
||||
for (; n !== null && index > 0; n = n.right) {
|
||||
if (!n.deleted && n.countable) {
|
||||
if (index < n.length) {
|
||||
getItemCleanStart(transaction, createID(n.id.client, n.id.clock + index))
|
||||
}
|
||||
index -= n.length
|
||||
}
|
||||
}
|
||||
// delete all items until done
|
||||
while (length > 0 && n !== null) {
|
||||
if (!n.deleted) {
|
||||
if (length < n.length) {
|
||||
getItemCleanStart(transaction, createID(n.id.client, n.id.clock + length))
|
||||
}
|
||||
n.delete(transaction)
|
||||
length -= n.length
|
||||
}
|
||||
n = n.right
|
||||
}
|
||||
if (length > 0) {
|
||||
throw lengthExceeded()
|
||||
}
|
||||
if (parent._searchMarker) {
|
||||
updateMarkerChanges(parent._searchMarker, startIndex, -startLength + length /* in case we remove the above exception */)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {string} key
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapDelete = (transaction, parent, key) => {
|
||||
const c = parent._map.get(key)
|
||||
if (c !== undefined) {
|
||||
c.delete(transaction)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {string} key
|
||||
* @param {Object|number|null|Array<any>|string|Uint8Array|AbstractType<any>} value
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapSet = (transaction, parent, key, value) => {
|
||||
const left = parent._map.get(key) || null
|
||||
const doc = transaction.doc
|
||||
const ownClientId = doc.clientID
|
||||
let content
|
||||
if (value == null) {
|
||||
content = new ContentAny([value])
|
||||
} else {
|
||||
switch (value.constructor) {
|
||||
case Number:
|
||||
case Object:
|
||||
case Boolean:
|
||||
case Array:
|
||||
case String:
|
||||
content = new ContentAny([value])
|
||||
break
|
||||
case Uint8Array:
|
||||
content = new ContentBinary(/** @type {Uint8Array} */ (value))
|
||||
break
|
||||
case Doc:
|
||||
content = new ContentDoc(/** @type {Doc} */ (value))
|
||||
break
|
||||
default:
|
||||
if (value instanceof AbstractType) {
|
||||
content = new ContentType(value)
|
||||
} else {
|
||||
throw new Error('Unexpected content type')
|
||||
}
|
||||
}
|
||||
}
|
||||
new Item(createID(ownClientId, getState(doc.store, ownClientId)), left, left && left.lastId, null, null, parent, key, content).integrate(transaction, 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {string} key
|
||||
* @return {Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapGet = (parent, key) => {
|
||||
parent.doc ?? warnPrematureAccess()
|
||||
const val = parent._map.get(key)
|
||||
return val !== undefined && !val.deleted ? val.content.getContent()[val.length - 1] : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} parent
|
||||
* @return {Object<string,Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapGetAll = (parent) => {
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
const res = {}
|
||||
parent.doc ?? warnPrematureAccess()
|
||||
parent._map.forEach((value, key) => {
|
||||
if (!value.deleted) {
|
||||
res[key] = value.content.getContent()[value.length - 1]
|
||||
}
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {string} key
|
||||
* @return {boolean}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapHas = (parent, key) => {
|
||||
parent.doc ?? warnPrematureAccess()
|
||||
const val = parent._map.get(key)
|
||||
return val !== undefined && !val.deleted
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {string} key
|
||||
* @param {Snapshot} snapshot
|
||||
* @return {Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapGetSnapshot = (parent, key, snapshot) => {
|
||||
let v = parent._map.get(key) || null
|
||||
while (v !== null && (!snapshot.sv.has(v.id.client) || v.id.clock >= (snapshot.sv.get(v.id.client) || 0))) {
|
||||
v = v.left
|
||||
}
|
||||
return v !== null && isVisible(v, snapshot) ? v.content.getContent()[v.length - 1] : undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {Snapshot} snapshot
|
||||
* @return {Object<string,Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const typeMapGetAllSnapshot = (parent, snapshot) => {
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
const res = {}
|
||||
parent._map.forEach((value, key) => {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let v = value
|
||||
while (v !== null && (!snapshot.sv.has(v.id.client) || v.id.clock >= (snapshot.sv.get(v.id.client) || 0))) {
|
||||
v = v.left
|
||||
}
|
||||
if (v !== null && isVisible(v, snapshot)) {
|
||||
res[key] = v.content.getContent()[v.length - 1]
|
||||
}
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any> & { _map: Map<string, Item> }} type
|
||||
* @return {IterableIterator<Array<any>>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const createMapIterator = type => {
|
||||
type.doc ?? warnPrematureAccess()
|
||||
return iterator.iteratorFilter(type._map.entries(), /** @param {any} entry */ entry => !entry[1].deleted)
|
||||
}
|
274
src/types/YArray.js
Normal file
274
src/types/YArray.js
Normal file
@ -0,0 +1,274 @@
|
||||
/**
|
||||
* @module YArray
|
||||
*/
|
||||
|
||||
import {
|
||||
YEvent,
|
||||
AbstractType,
|
||||
typeListGet,
|
||||
typeListToArray,
|
||||
typeListForEach,
|
||||
typeListCreateIterator,
|
||||
typeListInsertGenerics,
|
||||
typeListPushGenerics,
|
||||
typeListDelete,
|
||||
typeListMap,
|
||||
YArrayRefID,
|
||||
callTypeObservers,
|
||||
transact,
|
||||
warnPrematureAccess,
|
||||
ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
import { typeListSlice } from './AbstractType.js'
|
||||
|
||||
/**
|
||||
* Event that describes the changes on a YArray
|
||||
* @template T
|
||||
* @extends YEvent<YArray<T>>
|
||||
*/
|
||||
export class YArrayEvent extends YEvent {}
|
||||
|
||||
/**
|
||||
* A shared Array implementation.
|
||||
* @template T
|
||||
* @extends AbstractType<YArrayEvent<T>>
|
||||
* @implements {Iterable<T>}
|
||||
*/
|
||||
export class YArray extends AbstractType {
|
||||
constructor () {
|
||||
super()
|
||||
/**
|
||||
* @type {Array<any>?}
|
||||
* @private
|
||||
*/
|
||||
this._prelimContent = []
|
||||
/**
|
||||
* @type {Array<ArraySearchMarker>}
|
||||
*/
|
||||
this._searchMarker = []
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new YArray containing the specified items.
|
||||
* @template {Object<string,any>|Array<any>|number|null|string|Uint8Array} T
|
||||
* @param {Array<T>} items
|
||||
* @return {YArray<T>}
|
||||
*/
|
||||
static from (items) {
|
||||
/**
|
||||
* @type {YArray<T>}
|
||||
*/
|
||||
const a = new YArray()
|
||||
a.push(items)
|
||||
return a
|
||||
}
|
||||
|
||||
/**
|
||||
* Integrate this type into the Yjs instance.
|
||||
*
|
||||
* * Save this struct in the os
|
||||
* * This type is sent to other client
|
||||
* * Observer functions are fired
|
||||
*
|
||||
* @param {Doc} y The Yjs instance
|
||||
* @param {Item} item
|
||||
*/
|
||||
_integrate (y, item) {
|
||||
super._integrate(y, item)
|
||||
this.insert(0, /** @type {Array<any>} */ (this._prelimContent))
|
||||
this._prelimContent = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {YArray<T>}
|
||||
*/
|
||||
_copy () {
|
||||
return new YArray()
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {YArray<T>}
|
||||
*/
|
||||
clone () {
|
||||
/**
|
||||
* @type {YArray<T>}
|
||||
*/
|
||||
const arr = new YArray()
|
||||
arr.insert(0, this.toArray().map(el =>
|
||||
el instanceof AbstractType ? /** @type {typeof el} */ (el.clone()) : el
|
||||
))
|
||||
return arr
|
||||
}
|
||||
|
||||
get length () {
|
||||
this.doc ?? warnPrematureAccess()
|
||||
return this._length
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates YArrayEvent and calls observers.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||||
*/
|
||||
_callObserver (transaction, parentSubs) {
|
||||
super._callObserver(transaction, parentSubs)
|
||||
callTypeObservers(this, transaction, new YArrayEvent(this, transaction))
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts new content at an index.
|
||||
*
|
||||
* Important: This function expects an array of content. Not just a content
|
||||
* object. The reason for this "weirdness" is that inserting several elements
|
||||
* is very efficient when it is done as a single operation.
|
||||
*
|
||||
* @example
|
||||
* // Insert character 'a' at position 0
|
||||
* yarray.insert(0, ['a'])
|
||||
* // Insert numbers 1, 2 at position 1
|
||||
* yarray.insert(1, [1, 2])
|
||||
*
|
||||
* @param {number} index The index to insert content at.
|
||||
* @param {Array<T>} content The array of content
|
||||
*/
|
||||
insert (index, content) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeListInsertGenerics(transaction, this, index, /** @type {any} */ (content))
|
||||
})
|
||||
} else {
|
||||
/** @type {Array<any>} */ (this._prelimContent).splice(index, 0, ...content)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends content to this YArray.
|
||||
*
|
||||
* @param {Array<T>} content Array of content to append.
|
||||
*
|
||||
* @todo Use the following implementation in all types.
|
||||
*/
|
||||
push (content) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeListPushGenerics(transaction, this, /** @type {any} */ (content))
|
||||
})
|
||||
} else {
|
||||
/** @type {Array<any>} */ (this._prelimContent).push(...content)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepends content to this YArray.
|
||||
*
|
||||
* @param {Array<T>} content Array of content to prepend.
|
||||
*/
|
||||
unshift (content) {
|
||||
this.insert(0, content)
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes elements starting from an index.
|
||||
*
|
||||
* @param {number} index Index at which to start deleting elements
|
||||
* @param {number} length The number of elements to remove. Defaults to 1.
|
||||
*/
|
||||
delete (index, length = 1) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeListDelete(transaction, this, index, length)
|
||||
})
|
||||
} else {
|
||||
/** @type {Array<any>} */ (this._prelimContent).splice(index, length)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the i-th element from a YArray.
|
||||
*
|
||||
* @param {number} index The index of the element to return from the YArray
|
||||
* @return {T}
|
||||
*/
|
||||
get (index) {
|
||||
return typeListGet(this, index)
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms this YArray to a JavaScript Array.
|
||||
*
|
||||
* @return {Array<T>}
|
||||
*/
|
||||
toArray () {
|
||||
return typeListToArray(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a portion of this YArray into a JavaScript Array selected
|
||||
* from start to end (end not included).
|
||||
*
|
||||
* @param {number} [start]
|
||||
* @param {number} [end]
|
||||
* @return {Array<T>}
|
||||
*/
|
||||
slice (start = 0, end = this.length) {
|
||||
return typeListSlice(this, start, end)
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms this Shared Type to a JSON object.
|
||||
*
|
||||
* @return {Array<any>}
|
||||
*/
|
||||
toJSON () {
|
||||
return this.map(c => c instanceof AbstractType ? c.toJSON() : c)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an Array with the result of calling a provided function on every
|
||||
* element of this YArray.
|
||||
*
|
||||
* @template M
|
||||
* @param {function(T,number,YArray<T>):M} f Function that produces an element of the new Array
|
||||
* @return {Array<M>} A new array with each element being the result of the
|
||||
* callback function
|
||||
*/
|
||||
map (f) {
|
||||
return typeListMap(this, /** @type {any} */ (f))
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a provided function once on every element of this YArray.
|
||||
*
|
||||
* @param {function(T,number,YArray<T>):void} f A function to execute on every element of this YArray.
|
||||
*/
|
||||
forEach (f) {
|
||||
typeListForEach(this, f)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {IterableIterator<T>}
|
||||
*/
|
||||
[Symbol.iterator] () {
|
||||
return typeListCreateIterator(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
*/
|
||||
_write (encoder) {
|
||||
encoder.writeTypeRef(YArrayRefID)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} _decoder
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readYArray = _decoder => new YArray()
|
281
src/types/YMap.js
Normal file
281
src/types/YMap.js
Normal file
@ -0,0 +1,281 @@
|
||||
/**
|
||||
* @module YMap
|
||||
*/
|
||||
|
||||
import {
|
||||
YEvent,
|
||||
AbstractType,
|
||||
typeMapDelete,
|
||||
typeMapSet,
|
||||
typeMapGet,
|
||||
typeMapHas,
|
||||
createMapIterator,
|
||||
YMapRefID,
|
||||
callTypeObservers,
|
||||
transact,
|
||||
warnPrematureAccess,
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as iterator from 'lib0/iterator'
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @extends YEvent<YMap<T>>
|
||||
* Event that describes the changes on a YMap.
|
||||
*/
|
||||
export class YMapEvent extends YEvent {
|
||||
/**
|
||||
* @param {YMap<T>} ymap The YArray that changed.
|
||||
* @param {Transaction} transaction
|
||||
* @param {Set<any>} subs The keys that changed.
|
||||
*/
|
||||
constructor (ymap, transaction, subs) {
|
||||
super(ymap, transaction)
|
||||
this.keysChanged = subs
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template MapType
|
||||
* A shared Map implementation.
|
||||
*
|
||||
* @extends AbstractType<YMapEvent<MapType>>
|
||||
* @implements {Iterable<[string, MapType]>}
|
||||
*/
|
||||
export class YMap extends AbstractType {
|
||||
/**
|
||||
*
|
||||
* @param {Iterable<readonly [string, any]>=} entries - an optional iterable to initialize the YMap
|
||||
*/
|
||||
constructor (entries) {
|
||||
super()
|
||||
/**
|
||||
* @type {Map<string,any>?}
|
||||
* @private
|
||||
*/
|
||||
this._prelimContent = null
|
||||
|
||||
if (entries === undefined) {
|
||||
this._prelimContent = new Map()
|
||||
} else {
|
||||
this._prelimContent = new Map(entries)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Integrate this type into the Yjs instance.
|
||||
*
|
||||
* * Save this struct in the os
|
||||
* * This type is sent to other client
|
||||
* * Observer functions are fired
|
||||
*
|
||||
* @param {Doc} y The Yjs instance
|
||||
* @param {Item} item
|
||||
*/
|
||||
_integrate (y, item) {
|
||||
super._integrate(y, item)
|
||||
;/** @type {Map<string, any>} */ (this._prelimContent).forEach((value, key) => {
|
||||
this.set(key, value)
|
||||
})
|
||||
this._prelimContent = null
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {YMap<MapType>}
|
||||
*/
|
||||
_copy () {
|
||||
return new YMap()
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {YMap<MapType>}
|
||||
*/
|
||||
clone () {
|
||||
/**
|
||||
* @type {YMap<MapType>}
|
||||
*/
|
||||
const map = new YMap()
|
||||
this.forEach((value, key) => {
|
||||
map.set(key, value instanceof AbstractType ? /** @type {typeof value} */ (value.clone()) : value)
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates YMapEvent and calls observers.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||||
*/
|
||||
_callObserver (transaction, parentSubs) {
|
||||
callTypeObservers(this, transaction, new YMapEvent(this, transaction, parentSubs))
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms this Shared Type to a JSON object.
|
||||
*
|
||||
* @return {Object<string,any>}
|
||||
*/
|
||||
toJSON () {
|
||||
this.doc ?? warnPrematureAccess()
|
||||
/**
|
||||
* @type {Object<string,MapType>}
|
||||
*/
|
||||
const map = {}
|
||||
this._map.forEach((item, key) => {
|
||||
if (!item.deleted) {
|
||||
const v = item.content.getContent()[item.length - 1]
|
||||
map[key] = v instanceof AbstractType ? v.toJSON() : v
|
||||
}
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the size of the YMap (count of key/value pairs)
|
||||
*
|
||||
* @return {number}
|
||||
*/
|
||||
get size () {
|
||||
return [...createMapIterator(this)].length
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the keys for each element in the YMap Type.
|
||||
*
|
||||
* @return {IterableIterator<string>}
|
||||
*/
|
||||
keys () {
|
||||
return iterator.iteratorMap(createMapIterator(this), /** @param {any} v */ v => v[0])
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the values for each element in the YMap Type.
|
||||
*
|
||||
* @return {IterableIterator<MapType>}
|
||||
*/
|
||||
values () {
|
||||
return iterator.iteratorMap(createMapIterator(this), /** @param {any} v */ v => v[1].content.getContent()[v[1].length - 1])
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an Iterator of [key, value] pairs
|
||||
*
|
||||
* @return {IterableIterator<[string, MapType]>}
|
||||
*/
|
||||
entries () {
|
||||
return iterator.iteratorMap(createMapIterator(this), /** @param {any} v */ v => /** @type {any} */ ([v[0], v[1].content.getContent()[v[1].length - 1]]))
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a provided function on once on every key-value pair.
|
||||
*
|
||||
* @param {function(MapType,string,YMap<MapType>):void} f A function to execute on every element of this YArray.
|
||||
*/
|
||||
forEach (f) {
|
||||
this.doc ?? warnPrematureAccess()
|
||||
this._map.forEach((item, key) => {
|
||||
if (!item.deleted) {
|
||||
f(item.content.getContent()[item.length - 1], key, this)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an Iterator of [key, value] pairs
|
||||
*
|
||||
* @return {IterableIterator<[string, MapType]>}
|
||||
*/
|
||||
[Symbol.iterator] () {
|
||||
return this.entries()
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a specified element from this YMap.
|
||||
*
|
||||
* @param {string} key The key of the element to remove.
|
||||
*/
|
||||
delete (key) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeMapDelete(transaction, this, key)
|
||||
})
|
||||
} else {
|
||||
/** @type {Map<string, any>} */ (this._prelimContent).delete(key)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds or updates an element with a specified key and value.
|
||||
* @template {MapType} VAL
|
||||
*
|
||||
* @param {string} key The key of the element to add to this YMap
|
||||
* @param {VAL} value The value of the element to add
|
||||
* @return {VAL}
|
||||
*/
|
||||
set (key, value) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeMapSet(transaction, this, key, /** @type {any} */ (value))
|
||||
})
|
||||
} else {
|
||||
/** @type {Map<string, any>} */ (this._prelimContent).set(key, value)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a specified element from this YMap.
|
||||
*
|
||||
* @param {string} key
|
||||
* @return {MapType|undefined}
|
||||
*/
|
||||
get (key) {
|
||||
return /** @type {any} */ (typeMapGet(this, key))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a boolean indicating whether the specified key exists or not.
|
||||
*
|
||||
* @param {string} key The key to test.
|
||||
* @return {boolean}
|
||||
*/
|
||||
has (key) {
|
||||
return typeMapHas(this, key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all elements from this YMap.
|
||||
*/
|
||||
clear () {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
this.forEach(function (_value, key, map) {
|
||||
typeMapDelete(transaction, map, key)
|
||||
})
|
||||
})
|
||||
} else {
|
||||
/** @type {Map<string, any>} */ (this._prelimContent).clear()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
*/
|
||||
_write (encoder) {
|
||||
encoder.writeTypeRef(YMapRefID)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} _decoder
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readYMap = _decoder => new YMap()
|
1298
src/types/YText.js
Normal file
1298
src/types/YText.js
Normal file
File diff suppressed because it is too large
Load Diff
262
src/types/YXmlElement.js
Normal file
262
src/types/YXmlElement.js
Normal file
@ -0,0 +1,262 @@
|
||||
import * as object from 'lib0/object'
|
||||
|
||||
import {
|
||||
YXmlFragment,
|
||||
transact,
|
||||
typeMapDelete,
|
||||
typeMapHas,
|
||||
typeMapSet,
|
||||
typeMapGet,
|
||||
typeMapGetAll,
|
||||
typeMapGetAllSnapshot,
|
||||
typeListForEach,
|
||||
YXmlElementRefID,
|
||||
Snapshot, YXmlText, ContentType, AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Item // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* @typedef {Object|number|null|Array<any>|string|Uint8Array|AbstractType<any>} ValueTypes
|
||||
*/
|
||||
|
||||
/**
|
||||
* An YXmlElement imitates the behavior of a
|
||||
* https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element
|
||||
*
|
||||
* * An YXmlElement has attributes (key value pairs)
|
||||
* * An YXmlElement has childElements that must inherit from YXmlElement
|
||||
*
|
||||
* @template {{ [key: string]: ValueTypes }} [KV={ [key: string]: string }]
|
||||
*/
|
||||
export class YXmlElement extends YXmlFragment {
|
||||
constructor (nodeName = 'UNDEFINED') {
|
||||
super()
|
||||
this.nodeName = nodeName
|
||||
/**
|
||||
* @type {Map<string, any>|null}
|
||||
*/
|
||||
this._prelimAttrs = new Map()
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {YXmlElement|YXmlText|null}
|
||||
*/
|
||||
get nextSibling () {
|
||||
const n = this._item ? this._item.next : null
|
||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {YXmlElement|YXmlText|null}
|
||||
*/
|
||||
get prevSibling () {
|
||||
const n = this._item ? this._item.prev : null
|
||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Integrate this type into the Yjs instance.
|
||||
*
|
||||
* * Save this struct in the os
|
||||
* * This type is sent to other client
|
||||
* * Observer functions are fired
|
||||
*
|
||||
* @param {Doc} y The Yjs instance
|
||||
* @param {Item} item
|
||||
*/
|
||||
_integrate (y, item) {
|
||||
super._integrate(y, item)
|
||||
;(/** @type {Map<string, any>} */ (this._prelimAttrs)).forEach((value, key) => {
|
||||
this.setAttribute(key, value)
|
||||
})
|
||||
this._prelimAttrs = null
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an Item with the same effect as this Item (without position effect)
|
||||
*
|
||||
* @return {YXmlElement}
|
||||
*/
|
||||
_copy () {
|
||||
return new YXmlElement(this.nodeName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {YXmlElement<KV>}
|
||||
*/
|
||||
clone () {
|
||||
/**
|
||||
* @type {YXmlElement<KV>}
|
||||
*/
|
||||
const el = new YXmlElement(this.nodeName)
|
||||
const attrs = this.getAttributes()
|
||||
object.forEach(attrs, (value, key) => {
|
||||
if (typeof value === 'string') {
|
||||
el.setAttribute(key, value)
|
||||
}
|
||||
})
|
||||
// @ts-ignore
|
||||
el.insert(0, this.toArray().map(item => item instanceof AbstractType ? item.clone() : item))
|
||||
return el
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the XML serialization of this YXmlElement.
|
||||
* The attributes are ordered by attribute-name, so you can easily use this
|
||||
* method to compare YXmlElements
|
||||
*
|
||||
* @return {string} The string representation of this type.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
toString () {
|
||||
const attrs = this.getAttributes()
|
||||
const stringBuilder = []
|
||||
const keys = []
|
||||
for (const key in attrs) {
|
||||
keys.push(key)
|
||||
}
|
||||
keys.sort()
|
||||
const keysLen = keys.length
|
||||
for (let i = 0; i < keysLen; i++) {
|
||||
const key = keys[i]
|
||||
stringBuilder.push(key + '="' + attrs[key] + '"')
|
||||
}
|
||||
const nodeName = this.nodeName.toLocaleLowerCase()
|
||||
const attrsString = stringBuilder.length > 0 ? ' ' + stringBuilder.join(' ') : ''
|
||||
return `<${nodeName}${attrsString}>${super.toString()}</${nodeName}>`
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an attribute from this YXmlElement.
|
||||
*
|
||||
* @param {string} attributeName The attribute name that is to be removed.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
removeAttribute (attributeName) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeMapDelete(transaction, this, attributeName)
|
||||
})
|
||||
} else {
|
||||
/** @type {Map<string,any>} */ (this._prelimAttrs).delete(attributeName)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets or updates an attribute.
|
||||
*
|
||||
* @template {keyof KV & string} KEY
|
||||
*
|
||||
* @param {KEY} attributeName The attribute name that is to be set.
|
||||
* @param {KV[KEY]} attributeValue The attribute value that is to be set.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
setAttribute (attributeName, attributeValue) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeMapSet(transaction, this, attributeName, attributeValue)
|
||||
})
|
||||
} else {
|
||||
/** @type {Map<string, any>} */ (this._prelimAttrs).set(attributeName, attributeValue)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an attribute value that belongs to the attribute name.
|
||||
*
|
||||
* @template {keyof KV & string} KEY
|
||||
*
|
||||
* @param {KEY} attributeName The attribute name that identifies the
|
||||
* queried value.
|
||||
* @return {KV[KEY]|undefined} The queried attribute value.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getAttribute (attributeName) {
|
||||
return /** @type {any} */ (typeMapGet(this, attributeName))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether an attribute exists
|
||||
*
|
||||
* @param {string} attributeName The attribute name to check for existence.
|
||||
* @return {boolean} whether the attribute exists.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
hasAttribute (attributeName) {
|
||||
return /** @type {any} */ (typeMapHas(this, attributeName))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all attribute name/value pairs in a JSON Object.
|
||||
*
|
||||
* @param {Snapshot} [snapshot]
|
||||
* @return {{ [Key in Extract<keyof KV,string>]?: KV[Key]}} A JSON Object that describes the attributes.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getAttributes (snapshot) {
|
||||
return /** @type {any} */ (snapshot ? typeMapGetAllSnapshot(this, snapshot) : typeMapGetAll(this))
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Dom Element that mirrors this YXmlElement.
|
||||
*
|
||||
* @param {Document} [_document=document] The document object (you must define
|
||||
* this when calling this method in
|
||||
* nodejs)
|
||||
* @param {Object<string, any>} [hooks={}] Optional property to customize how hooks
|
||||
* are presented in the DOM
|
||||
* @param {any} [binding] You should not set this property. This is
|
||||
* used if DomBinding wants to create a
|
||||
* association to the created DOM type.
|
||||
* @return {Node} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
toDOM (_document = document, hooks = {}, binding) {
|
||||
const dom = _document.createElement(this.nodeName)
|
||||
const attrs = this.getAttributes()
|
||||
for (const key in attrs) {
|
||||
const value = attrs[key]
|
||||
if (typeof value === 'string') {
|
||||
dom.setAttribute(key, value)
|
||||
}
|
||||
}
|
||||
typeListForEach(this, yxml => {
|
||||
dom.appendChild(yxml.toDOM(_document, hooks, binding))
|
||||
})
|
||||
if (binding !== undefined) {
|
||||
binding._createAssociation(dom, this)
|
||||
}
|
||||
return dom
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the properties of this type to binary and write it to an
|
||||
* BinaryEncoder.
|
||||
*
|
||||
* This is called when this Item is sent to a remote peer.
|
||||
*
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||
*/
|
||||
_write (encoder) {
|
||||
encoder.writeTypeRef(YXmlElementRefID)
|
||||
encoder.writeKey(this.nodeName)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {YXmlElement}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const readYXmlElement = decoder => new YXmlElement(decoder.readKey())
|
39
src/types/YXmlEvent.js
Normal file
39
src/types/YXmlEvent.js
Normal file
@ -0,0 +1,39 @@
|
||||
import {
|
||||
YEvent,
|
||||
YXmlText, YXmlElement, YXmlFragment, Transaction // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* @extends YEvent<YXmlElement|YXmlText|YXmlFragment>
|
||||
* An Event that describes changes on a YXml Element or Yxml Fragment
|
||||
*/
|
||||
export class YXmlEvent extends YEvent {
|
||||
/**
|
||||
* @param {YXmlElement|YXmlText|YXmlFragment} target The target on which the event is created.
|
||||
* @param {Set<string|null>} subs The set of changed attributes. `null` is included if the
|
||||
* child list changed.
|
||||
* @param {Transaction} transaction The transaction instance with which the
|
||||
* change was created.
|
||||
*/
|
||||
constructor (target, subs, transaction) {
|
||||
super(target, transaction)
|
||||
/**
|
||||
* Whether the children changed.
|
||||
* @type {Boolean}
|
||||
* @private
|
||||
*/
|
||||
this.childListChanged = false
|
||||
/**
|
||||
* Set of all changed attributes.
|
||||
* @type {Set<string>}
|
||||
*/
|
||||
this.attributesChanged = new Set()
|
||||
subs.forEach((sub) => {
|
||||
if (sub === null) {
|
||||
this.childListChanged = true
|
||||
} else {
|
||||
this.attributesChanged.add(sub)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
449
src/types/YXmlFragment.js
Normal file
449
src/types/YXmlFragment.js
Normal file
@ -0,0 +1,449 @@
|
||||
/**
|
||||
* @module YXml
|
||||
*/
|
||||
|
||||
import {
|
||||
YXmlEvent,
|
||||
YXmlElement,
|
||||
AbstractType,
|
||||
typeListMap,
|
||||
typeListForEach,
|
||||
typeListInsertGenerics,
|
||||
typeListInsertGenericsAfter,
|
||||
typeListDelete,
|
||||
typeListToArray,
|
||||
YXmlFragmentRefID,
|
||||
callTypeObservers,
|
||||
transact,
|
||||
typeListGet,
|
||||
typeListSlice,
|
||||
warnPrematureAccess,
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as error from 'lib0/error'
|
||||
import * as array from 'lib0/array'
|
||||
|
||||
/**
|
||||
* Define the elements to which a set of CSS queries apply.
|
||||
* {@link https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Selectors|CSS_Selectors}
|
||||
*
|
||||
* @example
|
||||
* query = '.classSelector'
|
||||
* query = 'nodeSelector'
|
||||
* query = '#idSelector'
|
||||
*
|
||||
* @typedef {string} CSS_Selector
|
||||
*/
|
||||
|
||||
/**
|
||||
* Dom filter function.
|
||||
*
|
||||
* @callback domFilter
|
||||
* @param {string} nodeName The nodeName of the element
|
||||
* @param {Map} attributes The map of attributes.
|
||||
* @return {boolean} Whether to include the Dom node in the YXmlElement.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Represents a subset of the nodes of a YXmlElement / YXmlFragment and a
|
||||
* position within them.
|
||||
*
|
||||
* Can be created with {@link YXmlFragment#createTreeWalker}
|
||||
*
|
||||
* @public
|
||||
* @implements {Iterable<YXmlElement|YXmlText|YXmlElement|YXmlHook>}
|
||||
*/
|
||||
export class YXmlTreeWalker {
|
||||
/**
|
||||
* @param {YXmlFragment | YXmlElement} root
|
||||
* @param {function(AbstractType<any>):boolean} [f]
|
||||
*/
|
||||
constructor (root, f = () => true) {
|
||||
this._filter = f
|
||||
this._root = root
|
||||
/**
|
||||
* @type {Item}
|
||||
*/
|
||||
this._currentNode = /** @type {Item} */ (root._start)
|
||||
this._firstCall = true
|
||||
root.doc ?? warnPrematureAccess()
|
||||
}
|
||||
|
||||
[Symbol.iterator] () {
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next node.
|
||||
*
|
||||
* @return {IteratorResult<YXmlElement|YXmlText|YXmlHook>} The next node.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
next () {
|
||||
/**
|
||||
* @type {Item|null}
|
||||
*/
|
||||
let n = this._currentNode
|
||||
let type = n && n.content && /** @type {any} */ (n.content).type
|
||||
if (n !== null && (!this._firstCall || n.deleted || !this._filter(type))) { // if first call, we check if we can use the first item
|
||||
do {
|
||||
type = /** @type {any} */ (n.content).type
|
||||
if (!n.deleted && (type.constructor === YXmlElement || type.constructor === YXmlFragment) && type._start !== null) {
|
||||
// walk down in the tree
|
||||
n = type._start
|
||||
} else {
|
||||
// walk right or up in the tree
|
||||
while (n !== null) {
|
||||
/**
|
||||
* @type {Item | null}
|
||||
*/
|
||||
const nxt = n.next
|
||||
if (nxt !== null) {
|
||||
n = nxt
|
||||
break
|
||||
} else if (n.parent === this._root) {
|
||||
n = null
|
||||
} else {
|
||||
n = /** @type {AbstractType<any>} */ (n.parent)._item
|
||||
}
|
||||
}
|
||||
}
|
||||
} while (n !== null && (n.deleted || !this._filter(/** @type {ContentType} */ (n.content).type)))
|
||||
}
|
||||
this._firstCall = false
|
||||
if (n === null) {
|
||||
// @ts-ignore
|
||||
return { value: undefined, done: true }
|
||||
}
|
||||
this._currentNode = n
|
||||
return { value: /** @type {any} */ (n.content).type, done: false }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a list of {@link YXmlElement}.and {@link YXmlText} types.
|
||||
* A YxmlFragment is similar to a {@link YXmlElement}, but it does not have a
|
||||
* nodeName and it does not have attributes. Though it can be bound to a DOM
|
||||
* element - in this case the attributes and the nodeName are not shared.
|
||||
*
|
||||
* @public
|
||||
* @extends AbstractType<YXmlEvent>
|
||||
*/
|
||||
export class YXmlFragment extends AbstractType {
|
||||
constructor () {
|
||||
super()
|
||||
/**
|
||||
* @type {Array<any>|null}
|
||||
*/
|
||||
this._prelimContent = []
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {YXmlElement|YXmlText|null}
|
||||
*/
|
||||
get firstChild () {
|
||||
const first = this._first
|
||||
return first ? first.content.getContent()[0] : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Integrate this type into the Yjs instance.
|
||||
*
|
||||
* * Save this struct in the os
|
||||
* * This type is sent to other client
|
||||
* * Observer functions are fired
|
||||
*
|
||||
* @param {Doc} y The Yjs instance
|
||||
* @param {Item} item
|
||||
*/
|
||||
_integrate (y, item) {
|
||||
super._integrate(y, item)
|
||||
this.insert(0, /** @type {Array<any>} */ (this._prelimContent))
|
||||
this._prelimContent = null
|
||||
}
|
||||
|
||||
_copy () {
|
||||
return new YXmlFragment()
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {YXmlFragment}
|
||||
*/
|
||||
clone () {
|
||||
const el = new YXmlFragment()
|
||||
// @ts-ignore
|
||||
el.insert(0, this.toArray().map(item => item instanceof AbstractType ? item.clone() : item))
|
||||
return el
|
||||
}
|
||||
|
||||
get length () {
|
||||
this.doc ?? warnPrematureAccess()
|
||||
return this._prelimContent === null ? this._length : this._prelimContent.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a subtree of childNodes.
|
||||
*
|
||||
* @example
|
||||
* const walker = elem.createTreeWalker(dom => dom.nodeName === 'div')
|
||||
* for (let node in walker) {
|
||||
* // `node` is a div node
|
||||
* nop(node)
|
||||
* }
|
||||
*
|
||||
* @param {function(AbstractType<any>):boolean} filter Function that is called on each child element and
|
||||
* returns a Boolean indicating whether the child
|
||||
* is to be included in the subtree.
|
||||
* @return {YXmlTreeWalker} A subtree and a position within it.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
createTreeWalker (filter) {
|
||||
return new YXmlTreeWalker(this, filter)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first YXmlElement that matches the query.
|
||||
* Similar to DOM's {@link querySelector}.
|
||||
*
|
||||
* Query support:
|
||||
* - tagname
|
||||
* TODO:
|
||||
* - id
|
||||
* - attribute
|
||||
*
|
||||
* @param {CSS_Selector} query The query on the children.
|
||||
* @return {YXmlElement|YXmlText|YXmlHook|null} The first element that matches the query or null.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
querySelector (query) {
|
||||
query = query.toUpperCase()
|
||||
// @ts-ignore
|
||||
const iterator = new YXmlTreeWalker(this, element => element.nodeName && element.nodeName.toUpperCase() === query)
|
||||
const next = iterator.next()
|
||||
if (next.done) {
|
||||
return null
|
||||
} else {
|
||||
return next.value
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all YXmlElements that match the query.
|
||||
* Similar to Dom's {@link querySelectorAll}.
|
||||
*
|
||||
* @todo Does not yet support all queries. Currently only query by tagName.
|
||||
*
|
||||
* @param {CSS_Selector} query The query on the children
|
||||
* @return {Array<YXmlElement|YXmlText|YXmlHook|null>} The elements that match this query.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
querySelectorAll (query) {
|
||||
query = query.toUpperCase()
|
||||
// @ts-ignore
|
||||
return array.from(new YXmlTreeWalker(this, element => element.nodeName && element.nodeName.toUpperCase() === query))
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates YXmlEvent and calls observers.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||||
*/
|
||||
_callObserver (transaction, parentSubs) {
|
||||
callTypeObservers(this, transaction, new YXmlEvent(this, parentSubs, transaction))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the string representation of all the children of this YXmlFragment.
|
||||
*
|
||||
* @return {string} The string representation of all children.
|
||||
*/
|
||||
toString () {
|
||||
return typeListMap(this, xml => xml.toString()).join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
toJSON () {
|
||||
return this.toString()
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Dom Element that mirrors this YXmlElement.
|
||||
*
|
||||
* @param {Document} [_document=document] The document object (you must define
|
||||
* this when calling this method in
|
||||
* nodejs)
|
||||
* @param {Object<string, any>} [hooks={}] Optional property to customize how hooks
|
||||
* are presented in the DOM
|
||||
* @param {any} [binding] You should not set this property. This is
|
||||
* used if DomBinding wants to create a
|
||||
* association to the created DOM type.
|
||||
* @return {Node} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
toDOM (_document = document, hooks = {}, binding) {
|
||||
const fragment = _document.createDocumentFragment()
|
||||
if (binding !== undefined) {
|
||||
binding._createAssociation(fragment, this)
|
||||
}
|
||||
typeListForEach(this, xmlType => {
|
||||
fragment.insertBefore(xmlType.toDOM(_document, hooks, binding), null)
|
||||
})
|
||||
return fragment
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts new content at an index.
|
||||
*
|
||||
* @example
|
||||
* // Insert character 'a' at position 0
|
||||
* xml.insert(0, [new Y.XmlText('text')])
|
||||
*
|
||||
* @param {number} index The index to insert content at
|
||||
* @param {Array<YXmlElement|YXmlText>} content The array of content
|
||||
*/
|
||||
insert (index, content) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeListInsertGenerics(transaction, this, index, content)
|
||||
})
|
||||
} else {
|
||||
// @ts-ignore _prelimContent is defined because this is not yet integrated
|
||||
this._prelimContent.splice(index, 0, ...content)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts new content at an index.
|
||||
*
|
||||
* @example
|
||||
* // Insert character 'a' at position 0
|
||||
* xml.insert(0, [new Y.XmlText('text')])
|
||||
*
|
||||
* @param {null|Item|YXmlElement|YXmlText} ref The index to insert content at
|
||||
* @param {Array<YXmlElement|YXmlText>} content The array of content
|
||||
*/
|
||||
insertAfter (ref, content) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
const refItem = (ref && ref instanceof AbstractType) ? ref._item : ref
|
||||
typeListInsertGenericsAfter(transaction, this, refItem, content)
|
||||
})
|
||||
} else {
|
||||
const pc = /** @type {Array<any>} */ (this._prelimContent)
|
||||
const index = ref === null ? 0 : pc.findIndex(el => el === ref) + 1
|
||||
if (index === 0 && ref !== null) {
|
||||
throw error.create('Reference item not found')
|
||||
}
|
||||
pc.splice(index, 0, ...content)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes elements starting from an index.
|
||||
*
|
||||
* @param {number} index Index at which to start deleting elements
|
||||
* @param {number} [length=1] The number of elements to remove. Defaults to 1.
|
||||
*/
|
||||
delete (index, length = 1) {
|
||||
if (this.doc !== null) {
|
||||
transact(this.doc, transaction => {
|
||||
typeListDelete(transaction, this, index, length)
|
||||
})
|
||||
} else {
|
||||
// @ts-ignore _prelimContent is defined because this is not yet integrated
|
||||
this._prelimContent.splice(index, length)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms this YArray to a JavaScript Array.
|
||||
*
|
||||
* @return {Array<YXmlElement|YXmlText|YXmlHook>}
|
||||
*/
|
||||
toArray () {
|
||||
return typeListToArray(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends content to this YArray.
|
||||
*
|
||||
* @param {Array<YXmlElement|YXmlText>} content Array of content to append.
|
||||
*/
|
||||
push (content) {
|
||||
this.insert(this.length, content)
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepends content to this YArray.
|
||||
*
|
||||
* @param {Array<YXmlElement|YXmlText>} content Array of content to prepend.
|
||||
*/
|
||||
unshift (content) {
|
||||
this.insert(0, content)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the i-th element from a YArray.
|
||||
*
|
||||
* @param {number} index The index of the element to return from the YArray
|
||||
* @return {YXmlElement|YXmlText}
|
||||
*/
|
||||
get (index) {
|
||||
return typeListGet(this, index)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a portion of this YXmlFragment into a JavaScript Array selected
|
||||
* from start to end (end not included).
|
||||
*
|
||||
* @param {number} [start]
|
||||
* @param {number} [end]
|
||||
* @return {Array<YXmlElement|YXmlText>}
|
||||
*/
|
||||
slice (start = 0, end = this.length) {
|
||||
return typeListSlice(this, start, end)
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a provided function on once on every child element.
|
||||
*
|
||||
* @param {function(YXmlElement|YXmlText,number, typeof self):void} f A function to execute on every element of this YArray.
|
||||
*/
|
||||
forEach (f) {
|
||||
typeListForEach(this, f)
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the properties of this type to binary and write it to an
|
||||
* BinaryEncoder.
|
||||
*
|
||||
* This is called when this Item is sent to a remote peer.
|
||||
*
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||
*/
|
||||
_write (encoder) {
|
||||
encoder.writeTypeRef(YXmlFragmentRefID)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} _decoder
|
||||
* @return {YXmlFragment}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readYXmlFragment = _decoder => new YXmlFragment()
|
98
src/types/YXmlHook.js
Normal file
98
src/types/YXmlHook.js
Normal file
@ -0,0 +1,98 @@
|
||||
import {
|
||||
YMap,
|
||||
YXmlHookRefID,
|
||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* You can manage binding to a custom type with YXmlHook.
|
||||
*
|
||||
* @extends {YMap<any>}
|
||||
*/
|
||||
export class YXmlHook extends YMap {
|
||||
/**
|
||||
* @param {string} hookName nodeName of the Dom Node.
|
||||
*/
|
||||
constructor (hookName) {
|
||||
super()
|
||||
/**
|
||||
* @type {string}
|
||||
*/
|
||||
this.hookName = hookName
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an Item with the same effect as this Item (without position effect)
|
||||
*/
|
||||
_copy () {
|
||||
return new YXmlHook(this.hookName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {YXmlHook}
|
||||
*/
|
||||
clone () {
|
||||
const el = new YXmlHook(this.hookName)
|
||||
this.forEach((value, key) => {
|
||||
el.set(key, value)
|
||||
})
|
||||
return el
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Dom Element that mirrors this YXmlElement.
|
||||
*
|
||||
* @param {Document} [_document=document] The document object (you must define
|
||||
* this when calling this method in
|
||||
* nodejs)
|
||||
* @param {Object.<string, any>} [hooks] Optional property to customize how hooks
|
||||
* are presented in the DOM
|
||||
* @param {any} [binding] You should not set this property. This is
|
||||
* used if DomBinding wants to create a
|
||||
* association to the created DOM type
|
||||
* @return {Element} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
toDOM (_document = document, hooks = {}, binding) {
|
||||
const hook = hooks[this.hookName]
|
||||
let dom
|
||||
if (hook !== undefined) {
|
||||
dom = hook.createDom(this)
|
||||
} else {
|
||||
dom = document.createElement(this.hookName)
|
||||
}
|
||||
dom.setAttribute('data-yjs-hook', this.hookName)
|
||||
if (binding !== undefined) {
|
||||
binding._createAssociation(dom, this)
|
||||
}
|
||||
return dom
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the properties of this type to binary and write it to an
|
||||
* BinaryEncoder.
|
||||
*
|
||||
* This is called when this Item is sent to a remote peer.
|
||||
*
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
||||
*/
|
||||
_write (encoder) {
|
||||
encoder.writeTypeRef(YXmlHookRefID)
|
||||
encoder.writeKey(this.hookName)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {YXmlHook}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readYXmlHook = decoder =>
|
||||
new YXmlHook(decoder.readKey())
|
124
src/types/YXmlText.js
Normal file
124
src/types/YXmlText.js
Normal file
@ -0,0 +1,124 @@
|
||||
import {
|
||||
YText,
|
||||
YXmlTextRefID,
|
||||
ContentType, YXmlElement, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* Represents text in a Dom Element. In the future this type will also handle
|
||||
* simple formatting information like bold and italic.
|
||||
*/
|
||||
export class YXmlText extends YText {
|
||||
/**
|
||||
* @type {YXmlElement|YXmlText|null}
|
||||
*/
|
||||
get nextSibling () {
|
||||
const n = this._item ? this._item.next : null
|
||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {YXmlElement|YXmlText|null}
|
||||
*/
|
||||
get prevSibling () {
|
||||
const n = this._item ? this._item.prev : null
|
||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
||||
}
|
||||
|
||||
_copy () {
|
||||
return new YXmlText()
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a copy of this data type that can be included somewhere else.
|
||||
*
|
||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
||||
*
|
||||
* @return {YXmlText}
|
||||
*/
|
||||
clone () {
|
||||
const text = new YXmlText()
|
||||
text.applyDelta(this.toDelta())
|
||||
return text
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a Dom Element that mirrors this YXmlText.
|
||||
*
|
||||
* @param {Document} [_document=document] The document object (you must define
|
||||
* this when calling this method in
|
||||
* nodejs)
|
||||
* @param {Object<string, any>} [hooks] Optional property to customize how hooks
|
||||
* are presented in the DOM
|
||||
* @param {any} [binding] You should not set this property. This is
|
||||
* used if DomBinding wants to create a
|
||||
* association to the created DOM type.
|
||||
* @return {Text} The {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
toDOM (_document = document, hooks, binding) {
|
||||
const dom = _document.createTextNode(this.toString())
|
||||
if (binding !== undefined) {
|
||||
binding._createAssociation(dom, this)
|
||||
}
|
||||
return dom
|
||||
}
|
||||
|
||||
toString () {
|
||||
// @ts-ignore
|
||||
return this.toDelta().map(delta => {
|
||||
const nestedNodes = []
|
||||
for (const nodeName in delta.attributes) {
|
||||
const attrs = []
|
||||
for (const key in delta.attributes[nodeName]) {
|
||||
attrs.push({ key, value: delta.attributes[nodeName][key] })
|
||||
}
|
||||
// sort attributes to get a unique order
|
||||
attrs.sort((a, b) => a.key < b.key ? -1 : 1)
|
||||
nestedNodes.push({ nodeName, attrs })
|
||||
}
|
||||
// sort node order to get a unique order
|
||||
nestedNodes.sort((a, b) => a.nodeName < b.nodeName ? -1 : 1)
|
||||
// now convert to dom string
|
||||
let str = ''
|
||||
for (let i = 0; i < nestedNodes.length; i++) {
|
||||
const node = nestedNodes[i]
|
||||
str += `<${node.nodeName}`
|
||||
for (let j = 0; j < node.attrs.length; j++) {
|
||||
const attr = node.attrs[j]
|
||||
str += ` ${attr.key}="${attr.value}"`
|
||||
}
|
||||
str += '>'
|
||||
}
|
||||
str += delta.insert
|
||||
for (let i = nestedNodes.length - 1; i >= 0; i--) {
|
||||
str += `</${nestedNodes[i].nodeName}>`
|
||||
}
|
||||
return str
|
||||
}).join('')
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
toJSON () {
|
||||
return this.toString()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
*/
|
||||
_write (encoder) {
|
||||
encoder.writeTypeRef(YXmlTextRefID)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @return {YXmlText}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readYXmlText = decoder => new YXmlText()
|
25
src/utils/AbstractConnector.js
Normal file
25
src/utils/AbstractConnector.js
Normal file
@ -0,0 +1,25 @@
|
||||
import { ObservableV2 } from 'lib0/observable'
|
||||
|
||||
import {
|
||||
Doc // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* This is an abstract interface that all Connectors should implement to keep them interchangeable.
|
||||
*
|
||||
* @note This interface is experimental and it is not advised to actually inherit this class.
|
||||
* It just serves as typing information.
|
||||
*
|
||||
* @extends {ObservableV2<any>}
|
||||
*/
|
||||
export class AbstractConnector extends ObservableV2 {
|
||||
/**
|
||||
* @param {Doc} ydoc
|
||||
* @param {any} awareness
|
||||
*/
|
||||
constructor (ydoc, awareness) {
|
||||
super()
|
||||
this.doc = ydoc
|
||||
this.awareness = awareness
|
||||
}
|
||||
}
|
349
src/utils/DeleteSet.js
Normal file
349
src/utils/DeleteSet.js
Normal file
@ -0,0 +1,349 @@
|
||||
import {
|
||||
findIndexSS,
|
||||
getState,
|
||||
splitItem,
|
||||
iterateStructs,
|
||||
UpdateEncoderV2,
|
||||
DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as array from 'lib0/array'
|
||||
import * as math from 'lib0/math'
|
||||
import * as map from 'lib0/map'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
|
||||
export class DeleteItem {
|
||||
/**
|
||||
* @param {number} clock
|
||||
* @param {number} len
|
||||
*/
|
||||
constructor (clock, len) {
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
this.clock = clock
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
this.len = len
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* We no longer maintain a DeleteStore. DeleteSet is a temporary object that is created when needed.
|
||||
* - When created in a transaction, it must only be accessed after sorting, and merging
|
||||
* - This DeleteSet is send to other clients
|
||||
* - We do not create a DeleteSet when we send a sync message. The DeleteSet message is created directly from StructStore
|
||||
* - We read a DeleteSet as part of a sync/update message. In this case the DeleteSet is already sorted and merged.
|
||||
*/
|
||||
export class DeleteSet {
|
||||
constructor () {
|
||||
/**
|
||||
* @type {Map<number,Array<DeleteItem>>}
|
||||
*/
|
||||
this.clients = new Map()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over all structs that the DeleteSet gc's.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {DeleteSet} ds
|
||||
* @param {function(GC|Item):void} f
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const iterateDeletedStructs = (transaction, ds, f) =>
|
||||
ds.clients.forEach((deletes, clientid) => {
|
||||
const structs = /** @type {Array<GC|Item>} */ (transaction.doc.store.clients.get(clientid))
|
||||
for (let i = 0; i < deletes.length; i++) {
|
||||
const del = deletes[i]
|
||||
iterateStructs(transaction, structs, del.clock, del.len, f)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* @param {Array<DeleteItem>} dis
|
||||
* @param {number} clock
|
||||
* @return {number|null}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const findIndexDS = (dis, clock) => {
|
||||
let left = 0
|
||||
let right = dis.length - 1
|
||||
while (left <= right) {
|
||||
const midindex = math.floor((left + right) / 2)
|
||||
const mid = dis[midindex]
|
||||
const midclock = mid.clock
|
||||
if (midclock <= clock) {
|
||||
if (clock < midclock + mid.len) {
|
||||
return midindex
|
||||
}
|
||||
left = midindex + 1
|
||||
} else {
|
||||
right = midindex - 1
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {ID} id
|
||||
* @return {boolean}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const isDeleted = (ds, id) => {
|
||||
const dis = ds.clients.get(id.client)
|
||||
return dis !== undefined && findIndexDS(dis, id.clock) !== null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const sortAndMergeDeleteSet = ds => {
|
||||
ds.clients.forEach(dels => {
|
||||
dels.sort((a, b) => a.clock - b.clock)
|
||||
// merge items without filtering or splicing the array
|
||||
// i is the current pointer
|
||||
// j refers to the current insert position for the pointed item
|
||||
// try to merge dels[i] into dels[j-1] or set dels[j]=dels[i]
|
||||
let i, j
|
||||
for (i = 1, j = 1; i < dels.length; i++) {
|
||||
const left = dels[j - 1]
|
||||
const right = dels[i]
|
||||
if (left.clock + left.len >= right.clock) {
|
||||
left.len = math.max(left.len, right.clock + right.len - left.clock)
|
||||
} else {
|
||||
if (j < i) {
|
||||
dels[j] = right
|
||||
}
|
||||
j++
|
||||
}
|
||||
}
|
||||
dels.length = j
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Array<DeleteSet>} dss
|
||||
* @return {DeleteSet} A fresh DeleteSet
|
||||
*/
|
||||
export const mergeDeleteSets = dss => {
|
||||
const merged = new DeleteSet()
|
||||
for (let dssI = 0; dssI < dss.length; dssI++) {
|
||||
dss[dssI].clients.forEach((delsLeft, client) => {
|
||||
if (!merged.clients.has(client)) {
|
||||
// Write all missing keys from current ds and all following.
|
||||
// If merged already contains `client` current ds has already been added.
|
||||
/**
|
||||
* @type {Array<DeleteItem>}
|
||||
*/
|
||||
const dels = delsLeft.slice()
|
||||
for (let i = dssI + 1; i < dss.length; i++) {
|
||||
array.appendTo(dels, dss[i].clients.get(client) || [])
|
||||
}
|
||||
merged.clients.set(client, dels)
|
||||
}
|
||||
})
|
||||
}
|
||||
sortAndMergeDeleteSet(merged)
|
||||
return merged
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {number} client
|
||||
* @param {number} clock
|
||||
* @param {number} length
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const addToDeleteSet = (ds, client, clock, length) => {
|
||||
map.setIfUndefined(ds.clients, client, () => /** @type {Array<DeleteItem>} */ ([])).push(new DeleteItem(clock, length))
|
||||
}
|
||||
|
||||
export const createDeleteSet = () => new DeleteSet()
|
||||
|
||||
/**
|
||||
* @param {StructStore} ss
|
||||
* @return {DeleteSet} Merged and sorted DeleteSet
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const createDeleteSetFromStructStore = ss => {
|
||||
const ds = createDeleteSet()
|
||||
ss.clients.forEach((structs, client) => {
|
||||
/**
|
||||
* @type {Array<DeleteItem>}
|
||||
*/
|
||||
const dsitems = []
|
||||
for (let i = 0; i < structs.length; i++) {
|
||||
const struct = structs[i]
|
||||
if (struct.deleted) {
|
||||
const clock = struct.id.clock
|
||||
let len = struct.length
|
||||
if (i + 1 < structs.length) {
|
||||
for (let next = structs[i + 1]; i + 1 < structs.length && next.deleted; next = structs[++i + 1]) {
|
||||
len += next.length
|
||||
}
|
||||
}
|
||||
dsitems.push(new DeleteItem(clock, len))
|
||||
}
|
||||
}
|
||||
if (dsitems.length > 0) {
|
||||
ds.clients.set(client, dsitems)
|
||||
}
|
||||
})
|
||||
return ds
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||||
* @param {DeleteSet} ds
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const writeDeleteSet = (encoder, ds) => {
|
||||
encoding.writeVarUint(encoder.restEncoder, ds.clients.size)
|
||||
|
||||
// Ensure that the delete set is written in a deterministic order
|
||||
array.from(ds.clients.entries())
|
||||
.sort((a, b) => b[0] - a[0])
|
||||
.forEach(([client, dsitems]) => {
|
||||
encoder.resetDsCurVal()
|
||||
encoding.writeVarUint(encoder.restEncoder, client)
|
||||
const len = dsitems.length
|
||||
encoding.writeVarUint(encoder.restEncoder, len)
|
||||
for (let i = 0; i < len; i++) {
|
||||
const item = dsitems[i]
|
||||
encoder.writeDsClock(item.clock)
|
||||
encoder.writeDsLen(item.len)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||||
* @return {DeleteSet}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readDeleteSet = decoder => {
|
||||
const ds = new DeleteSet()
|
||||
const numClients = decoding.readVarUint(decoder.restDecoder)
|
||||
for (let i = 0; i < numClients; i++) {
|
||||
decoder.resetDsCurVal()
|
||||
const client = decoding.readVarUint(decoder.restDecoder)
|
||||
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
|
||||
if (numberOfDeletes > 0) {
|
||||
const dsField = map.setIfUndefined(ds.clients, client, () => /** @type {Array<DeleteItem>} */ ([]))
|
||||
for (let i = 0; i < numberOfDeletes; i++) {
|
||||
dsField.push(new DeleteItem(decoder.readDsClock(), decoder.readDsLen()))
|
||||
}
|
||||
}
|
||||
}
|
||||
return ds
|
||||
}
|
||||
|
||||
/**
|
||||
* @todo YDecoder also contains references to String and other Decoders. Would make sense to exchange YDecoder.toUint8Array for YDecoder.DsToUint8Array()..
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||||
* @param {Transaction} transaction
|
||||
* @param {StructStore} store
|
||||
* @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully.
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
||||
const unappliedDS = new DeleteSet()
|
||||
const numClients = decoding.readVarUint(decoder.restDecoder)
|
||||
for (let i = 0; i < numClients; i++) {
|
||||
decoder.resetDsCurVal()
|
||||
const client = decoding.readVarUint(decoder.restDecoder)
|
||||
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
|
||||
const structs = store.clients.get(client) || []
|
||||
const state = getState(store, client)
|
||||
for (let i = 0; i < numberOfDeletes; i++) {
|
||||
const clock = decoder.readDsClock()
|
||||
const clockEnd = clock + decoder.readDsLen()
|
||||
if (clock < state) {
|
||||
if (state < clockEnd) {
|
||||
addToDeleteSet(unappliedDS, client, state, clockEnd - state)
|
||||
}
|
||||
let index = findIndexSS(structs, clock)
|
||||
/**
|
||||
* We can ignore the case of GC and Delete structs, because we are going to skip them
|
||||
* @type {Item}
|
||||
*/
|
||||
// @ts-ignore
|
||||
let struct = structs[index]
|
||||
// split the first item if necessary
|
||||
if (!struct.deleted && struct.id.clock < clock) {
|
||||
structs.splice(index + 1, 0, splitItem(transaction, struct, clock - struct.id.clock))
|
||||
index++ // increase we now want to use the next struct
|
||||
}
|
||||
while (index < structs.length) {
|
||||
// @ts-ignore
|
||||
struct = structs[index++]
|
||||
if (struct.id.clock < clockEnd) {
|
||||
if (!struct.deleted) {
|
||||
if (clockEnd < struct.id.clock + struct.length) {
|
||||
structs.splice(index, 0, splitItem(transaction, struct, clockEnd - struct.id.clock))
|
||||
}
|
||||
struct.delete(transaction)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
addToDeleteSet(unappliedDS, client, clock, clockEnd - clock)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (unappliedDS.clients.size > 0) {
|
||||
const ds = new UpdateEncoderV2()
|
||||
encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs
|
||||
writeDeleteSet(ds, unappliedDS)
|
||||
return ds.toUint8Array()
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds1
|
||||
* @param {DeleteSet} ds2
|
||||
*/
|
||||
export const equalDeleteSets = (ds1, ds2) => {
|
||||
if (ds1.clients.size !== ds2.clients.size) return false
|
||||
for (const [client, deleteItems1] of ds1.clients.entries()) {
|
||||
const deleteItems2 = /** @type {Array<import('../internals.js').DeleteItem>} */ (ds2.clients.get(client))
|
||||
if (deleteItems2 === undefined || deleteItems1.length !== deleteItems2.length) return false
|
||||
for (let i = 0; i < deleteItems1.length; i++) {
|
||||
const di1 = deleteItems1[i]
|
||||
const di2 = deleteItems2[i]
|
||||
if (di1.clock !== di2.clock || di1.len !== di2.len) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
347
src/utils/Doc.js
Normal file
347
src/utils/Doc.js
Normal file
@ -0,0 +1,347 @@
|
||||
/**
|
||||
* @module Y
|
||||
*/
|
||||
|
||||
import {
|
||||
StructStore,
|
||||
AbstractType,
|
||||
YArray,
|
||||
YText,
|
||||
YMap,
|
||||
YXmlElement,
|
||||
YXmlFragment,
|
||||
transact,
|
||||
ContentDoc, Item, Transaction, YEvent // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import { ObservableV2 } from 'lib0/observable'
|
||||
import * as random from 'lib0/random'
|
||||
import * as map from 'lib0/map'
|
||||
import * as array from 'lib0/array'
|
||||
import * as promise from 'lib0/promise'
|
||||
|
||||
export const generateNewClientId = random.uint32
|
||||
|
||||
/**
|
||||
* @typedef {Object} DocOpts
|
||||
* @property {boolean} [DocOpts.gc=true] Disable garbage collection (default: gc=true)
|
||||
* @property {function(Item):boolean} [DocOpts.gcFilter] Will be called before an Item is garbage collected. Return false to keep the Item.
|
||||
* @property {string} [DocOpts.guid] Define a globally unique identifier for this document
|
||||
* @property {string | null} [DocOpts.collectionid] Associate this document with a collection. This only plays a role if your provider has a concept of collection.
|
||||
* @property {any} [DocOpts.meta] Any kind of meta information you want to associate with this document. If this is a subdocument, remote peers will store the meta information as well.
|
||||
* @property {boolean} [DocOpts.autoLoad] If a subdocument, automatically load document. If this is a subdocument, remote peers will load the document as well automatically.
|
||||
* @property {boolean} [DocOpts.shouldLoad] Whether the document should be synced by the provider now. This is toggled to true when you call ydoc.load()
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} DocEvents
|
||||
* @property {function(Doc):void} DocEvents.destroy
|
||||
* @property {function(Doc):void} DocEvents.load
|
||||
* @property {function(boolean, Doc):void} DocEvents.sync
|
||||
* @property {function(Uint8Array, any, Doc, Transaction):void} DocEvents.update
|
||||
* @property {function(Uint8Array, any, Doc, Transaction):void} DocEvents.updateV2
|
||||
* @property {function(Doc):void} DocEvents.beforeAllTransactions
|
||||
* @property {function(Transaction, Doc):void} DocEvents.beforeTransaction
|
||||
* @property {function(Transaction, Doc):void} DocEvents.beforeObserverCalls
|
||||
* @property {function(Transaction, Doc):void} DocEvents.afterTransaction
|
||||
* @property {function(Transaction, Doc):void} DocEvents.afterTransactionCleanup
|
||||
* @property {function(Doc, Array<Transaction>):void} DocEvents.afterAllTransactions
|
||||
* @property {function({ loaded: Set<Doc>, added: Set<Doc>, removed: Set<Doc> }, Doc, Transaction):void} DocEvents.subdocs
|
||||
*/
|
||||
|
||||
/**
|
||||
* A Yjs instance handles the state of shared data.
|
||||
* @extends ObservableV2<DocEvents>
|
||||
*/
|
||||
export class Doc extends ObservableV2 {
|
||||
/**
|
||||
* @param {DocOpts} opts configuration
|
||||
*/
|
||||
constructor ({ guid = random.uuidv4(), collectionid = null, gc = true, gcFilter = () => true, meta = null, autoLoad = false, shouldLoad = true } = {}) {
|
||||
super()
|
||||
this.gc = gc
|
||||
this.gcFilter = gcFilter
|
||||
this.clientID = generateNewClientId()
|
||||
this.guid = guid
|
||||
this.collectionid = collectionid
|
||||
/**
|
||||
* @type {Map<string, AbstractType<YEvent<any>>>}
|
||||
*/
|
||||
this.share = new Map()
|
||||
this.store = new StructStore()
|
||||
/**
|
||||
* @type {Transaction | null}
|
||||
*/
|
||||
this._transaction = null
|
||||
/**
|
||||
* @type {Array<Transaction>}
|
||||
*/
|
||||
this._transactionCleanups = []
|
||||
/**
|
||||
* @type {Set<Doc>}
|
||||
*/
|
||||
this.subdocs = new Set()
|
||||
/**
|
||||
* If this document is a subdocument - a document integrated into another document - then _item is defined.
|
||||
* @type {Item?}
|
||||
*/
|
||||
this._item = null
|
||||
this.shouldLoad = shouldLoad
|
||||
this.autoLoad = autoLoad
|
||||
this.meta = meta
|
||||
/**
|
||||
* This is set to true when the persistence provider loaded the document from the database or when the `sync` event fires.
|
||||
* Note that not all providers implement this feature. Provider authors are encouraged to fire the `load` event when the doc content is loaded from the database.
|
||||
*
|
||||
* @type {boolean}
|
||||
*/
|
||||
this.isLoaded = false
|
||||
/**
|
||||
* This is set to true when the connection provider has successfully synced with a backend.
|
||||
* Note that when using peer-to-peer providers this event may not provide very useful.
|
||||
* Also note that not all providers implement this feature. Provider authors are encouraged to fire
|
||||
* the `sync` event when the doc has been synced (with `true` as a parameter) or if connection is
|
||||
* lost (with false as a parameter).
|
||||
*/
|
||||
this.isSynced = false
|
||||
this.isDestroyed = false
|
||||
/**
|
||||
* Promise that resolves once the document has been loaded from a persistence provider.
|
||||
*/
|
||||
this.whenLoaded = promise.create(resolve => {
|
||||
this.on('load', () => {
|
||||
this.isLoaded = true
|
||||
resolve(this)
|
||||
})
|
||||
})
|
||||
const provideSyncedPromise = () => promise.create(resolve => {
|
||||
/**
|
||||
* @param {boolean} isSynced
|
||||
*/
|
||||
const eventHandler = (isSynced) => {
|
||||
if (isSynced === undefined || isSynced === true) {
|
||||
this.off('sync', eventHandler)
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
this.on('sync', eventHandler)
|
||||
})
|
||||
this.on('sync', isSynced => {
|
||||
if (isSynced === false && this.isSynced) {
|
||||
this.whenSynced = provideSyncedPromise()
|
||||
}
|
||||
this.isSynced = isSynced === undefined || isSynced === true
|
||||
if (this.isSynced && !this.isLoaded) {
|
||||
this.emit('load', [this])
|
||||
}
|
||||
})
|
||||
/**
|
||||
* Promise that resolves once the document has been synced with a backend.
|
||||
* This promise is recreated when the connection is lost.
|
||||
* Note the documentation about the `isSynced` property.
|
||||
*/
|
||||
this.whenSynced = provideSyncedPromise()
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify the parent document that you request to load data into this subdocument (if it is a subdocument).
|
||||
*
|
||||
* `load()` might be used in the future to request any provider to load the most current data.
|
||||
*
|
||||
* It is safe to call `load()` multiple times.
|
||||
*/
|
||||
load () {
|
||||
const item = this._item
|
||||
if (item !== null && !this.shouldLoad) {
|
||||
transact(/** @type {any} */ (item.parent).doc, transaction => {
|
||||
transaction.subdocsLoaded.add(this)
|
||||
}, null, true)
|
||||
}
|
||||
this.shouldLoad = true
|
||||
}
|
||||
|
||||
getSubdocs () {
|
||||
return this.subdocs
|
||||
}
|
||||
|
||||
getSubdocGuids () {
|
||||
return new Set(array.from(this.subdocs).map(doc => doc.guid))
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes that happen inside of a transaction are bundled. This means that
|
||||
* the observer fires _after_ the transaction is finished and that all changes
|
||||
* that happened inside of the transaction are sent as one message to the
|
||||
* other peers.
|
||||
*
|
||||
* @template T
|
||||
* @param {function(Transaction):T} f The function that should be executed as a transaction
|
||||
* @param {any} [origin] Origin of who started the transaction. Will be stored on transaction.origin
|
||||
* @return T
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
transact (f, origin = null) {
|
||||
return transact(this, f, origin)
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a shared data type.
|
||||
*
|
||||
* Multiple calls of `ydoc.get(name, TypeConstructor)` yield the same result
|
||||
* and do not overwrite each other. I.e.
|
||||
* `ydoc.get(name, Y.Array) === ydoc.get(name, Y.Array)`
|
||||
*
|
||||
* After this method is called, the type is also available on `ydoc.share.get(name)`.
|
||||
*
|
||||
* *Best Practices:*
|
||||
* Define all types right after the Y.Doc instance is created and store them in a separate object.
|
||||
* Also use the typed methods `getText(name)`, `getArray(name)`, ..
|
||||
*
|
||||
* @template {typeof AbstractType<any>} Type
|
||||
* @example
|
||||
* const ydoc = new Y.Doc(..)
|
||||
* const appState = {
|
||||
* document: ydoc.getText('document')
|
||||
* comments: ydoc.getArray('comments')
|
||||
* }
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {Type} TypeConstructor The constructor of the type definition. E.g. Y.Text, Y.Array, Y.Map, ...
|
||||
* @return {InstanceType<Type>} The created type. Constructed with TypeConstructor
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
get (name, TypeConstructor = /** @type {any} */ (AbstractType)) {
|
||||
const type = map.setIfUndefined(this.share, name, () => {
|
||||
// @ts-ignore
|
||||
const t = new TypeConstructor()
|
||||
t._integrate(this, null)
|
||||
return t
|
||||
})
|
||||
const Constr = type.constructor
|
||||
if (TypeConstructor !== AbstractType && Constr !== TypeConstructor) {
|
||||
if (Constr === AbstractType) {
|
||||
// @ts-ignore
|
||||
const t = new TypeConstructor()
|
||||
t._map = type._map
|
||||
type._map.forEach(/** @param {Item?} n */ n => {
|
||||
for (; n !== null; n = n.left) {
|
||||
// @ts-ignore
|
||||
n.parent = t
|
||||
}
|
||||
})
|
||||
t._start = type._start
|
||||
for (let n = t._start; n !== null; n = n.right) {
|
||||
n.parent = t
|
||||
}
|
||||
t._length = type._length
|
||||
this.share.set(name, t)
|
||||
t._integrate(this, null)
|
||||
return /** @type {InstanceType<Type>} */ (t)
|
||||
} else {
|
||||
throw new Error(`Type with the name ${name} has already been defined with a different constructor`)
|
||||
}
|
||||
}
|
||||
return /** @type {InstanceType<Type>} */ (type)
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {string} [name]
|
||||
* @return {YArray<T>}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getArray (name = '') {
|
||||
return /** @type {YArray<T>} */ (this.get(name, YArray))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} [name]
|
||||
* @return {YText}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getText (name = '') {
|
||||
return this.get(name, YText)
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {string} [name]
|
||||
* @return {YMap<T>}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getMap (name = '') {
|
||||
return /** @type {YMap<T>} */ (this.get(name, YMap))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} [name]
|
||||
* @return {YXmlElement}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getXmlElement (name = '') {
|
||||
return /** @type {YXmlElement<{[key:string]:string}>} */ (this.get(name, YXmlElement))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} [name]
|
||||
* @return {YXmlFragment}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
getXmlFragment (name = '') {
|
||||
return this.get(name, YXmlFragment)
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the entire document into a js object, recursively traversing each yjs type
|
||||
* Doesn't log types that have not been defined (using ydoc.getType(..)).
|
||||
*
|
||||
* @deprecated Do not use this method and rather call toJSON directly on the shared types.
|
||||
*
|
||||
* @return {Object<string, any>}
|
||||
*/
|
||||
toJSON () {
|
||||
/**
|
||||
* @type {Object<string, any>}
|
||||
*/
|
||||
const doc = {}
|
||||
|
||||
this.share.forEach((value, key) => {
|
||||
doc[key] = value.toJSON()
|
||||
})
|
||||
|
||||
return doc
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit `destroy` event and unregister all event handlers.
|
||||
*/
|
||||
destroy () {
|
||||
this.isDestroyed = true
|
||||
array.from(this.subdocs).forEach(subdoc => subdoc.destroy())
|
||||
const item = this._item
|
||||
if (item !== null) {
|
||||
this._item = null
|
||||
const content = /** @type {ContentDoc} */ (item.content)
|
||||
content.doc = new Doc({ guid: this.guid, ...content.opts, shouldLoad: false })
|
||||
content.doc._item = item
|
||||
transact(/** @type {any} */ (item).parent.doc, transaction => {
|
||||
const doc = content.doc
|
||||
if (!item.deleted) {
|
||||
transaction.subdocsAdded.add(doc)
|
||||
}
|
||||
transaction.subdocsRemoved.add(this)
|
||||
}, null, true)
|
||||
}
|
||||
// @ts-ignore
|
||||
this.emit('destroyed', [true]) // DEPRECATED!
|
||||
this.emit('destroy', [this])
|
||||
super.destroy()
|
||||
}
|
||||
}
|
87
src/utils/EventHandler.js
Normal file
87
src/utils/EventHandler.js
Normal file
@ -0,0 +1,87 @@
|
||||
import * as f from 'lib0/function'
|
||||
|
||||
/**
|
||||
* General event handler implementation.
|
||||
*
|
||||
* @template ARG0, ARG1
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
export class EventHandler {
|
||||
constructor () {
|
||||
/**
|
||||
* @type {Array<function(ARG0, ARG1):void>}
|
||||
*/
|
||||
this.l = []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template ARG0,ARG1
|
||||
* @returns {EventHandler<ARG0,ARG1>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const createEventHandler = () => new EventHandler()
|
||||
|
||||
/**
|
||||
* Adds an event listener that is called when
|
||||
* {@link EventHandler#callEventListeners} is called.
|
||||
*
|
||||
* @template ARG0,ARG1
|
||||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||||
* @param {function(ARG0,ARG1):void} f The event handler.
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const addEventHandlerListener = (eventHandler, f) =>
|
||||
eventHandler.l.push(f)
|
||||
|
||||
/**
|
||||
* Removes an event listener.
|
||||
*
|
||||
* @template ARG0,ARG1
|
||||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||||
* @param {function(ARG0,ARG1):void} f The event handler that was added with
|
||||
* {@link EventHandler#addEventListener}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const removeEventHandlerListener = (eventHandler, f) => {
|
||||
const l = eventHandler.l
|
||||
const len = l.length
|
||||
eventHandler.l = l.filter(g => f !== g)
|
||||
if (len === eventHandler.l.length) {
|
||||
console.error('[yjs] Tried to remove event handler that doesn\'t exist.')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all event listeners.
|
||||
* @template ARG0,ARG1
|
||||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const removeAllEventHandlerListeners = eventHandler => {
|
||||
eventHandler.l.length = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Call all event listeners that were added via
|
||||
* {@link EventHandler#addEventListener}.
|
||||
*
|
||||
* @template ARG0,ARG1
|
||||
* @param {EventHandler<ARG0,ARG1>} eventHandler
|
||||
* @param {ARG0} arg0
|
||||
* @param {ARG1} arg1
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const callEventHandlerListeners = (eventHandler, arg0, arg1) =>
|
||||
f.callAll(eventHandler.l, [arg0, arg1])
|
89
src/utils/ID.js
Normal file
89
src/utils/ID.js
Normal file
@ -0,0 +1,89 @@
|
||||
import { AbstractType } from '../internals.js' // eslint-disable-line
|
||||
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
export class ID {
|
||||
/**
|
||||
* @param {number} client client id
|
||||
* @param {number} clock unique per client id, continuous number
|
||||
*/
|
||||
constructor (client, clock) {
|
||||
/**
|
||||
* Client id
|
||||
* @type {number}
|
||||
*/
|
||||
this.client = client
|
||||
/**
|
||||
* unique per client id, continuous number
|
||||
* @type {number}
|
||||
*/
|
||||
this.clock = clock
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ID | null} a
|
||||
* @param {ID | null} b
|
||||
* @return {boolean}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const compareIDs = (a, b) => a === b || (a !== null && b !== null && a.client === b.client && a.clock === b.clock)
|
||||
|
||||
/**
|
||||
* @param {number} client
|
||||
* @param {number} clock
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const createID = (client, clock) => new ID(client, clock)
|
||||
|
||||
/**
|
||||
* @param {encoding.Encoder} encoder
|
||||
* @param {ID} id
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const writeID = (encoder, id) => {
|
||||
encoding.writeVarUint(encoder, id.client)
|
||||
encoding.writeVarUint(encoder, id.clock)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read ID.
|
||||
* * If first varUint read is 0xFFFFFF a RootID is returned.
|
||||
* * Otherwise an ID is returned
|
||||
*
|
||||
* @param {decoding.Decoder} decoder
|
||||
* @return {ID}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readID = decoder =>
|
||||
createID(decoding.readVarUint(decoder), decoding.readVarUint(decoder))
|
||||
|
||||
/**
|
||||
* The top types are mapped from y.share.get(keyname) => type.
|
||||
* `type` does not store any information about the `keyname`.
|
||||
* This function finds the correct `keyname` for `type` and throws otherwise.
|
||||
*
|
||||
* @param {AbstractType<any>} type
|
||||
* @return {string}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const findRootTypeKey = type => {
|
||||
// @ts-ignore _y must be defined, otherwise unexpected case
|
||||
for (const [key, value] of type.doc.share.entries()) {
|
||||
if (value === type) {
|
||||
return key
|
||||
}
|
||||
}
|
||||
throw error.unexpectedCase()
|
||||
}
|
141
src/utils/PermanentUserData.js
Normal file
141
src/utils/PermanentUserData.js
Normal file
@ -0,0 +1,141 @@
|
||||
import {
|
||||
YArray,
|
||||
YMap,
|
||||
readDeleteSet,
|
||||
writeDeleteSet,
|
||||
createDeleteSet,
|
||||
DSEncoderV1, DSDecoderV1, ID, DeleteSet, YArrayEvent, Transaction, Doc // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as decoding from 'lib0/decoding'
|
||||
|
||||
import { mergeDeleteSets, isDeleted } from './DeleteSet.js'
|
||||
|
||||
export class PermanentUserData {
|
||||
/**
|
||||
* @param {Doc} doc
|
||||
* @param {YMap<any>} [storeType]
|
||||
*/
|
||||
constructor (doc, storeType = doc.getMap('users')) {
|
||||
/**
|
||||
* @type {Map<string,DeleteSet>}
|
||||
*/
|
||||
const dss = new Map()
|
||||
this.yusers = storeType
|
||||
this.doc = doc
|
||||
/**
|
||||
* Maps from clientid to userDescription
|
||||
*
|
||||
* @type {Map<number,string>}
|
||||
*/
|
||||
this.clients = new Map()
|
||||
this.dss = dss
|
||||
/**
|
||||
* @param {YMap<any>} user
|
||||
* @param {string} userDescription
|
||||
*/
|
||||
const initUser = (user, userDescription) => {
|
||||
/**
|
||||
* @type {YArray<Uint8Array>}
|
||||
*/
|
||||
const ds = user.get('ds')
|
||||
const ids = user.get('ids')
|
||||
const addClientId = /** @param {number} clientid */ clientid => this.clients.set(clientid, userDescription)
|
||||
ds.observe(/** @param {YArrayEvent<any>} event */ event => {
|
||||
event.changes.added.forEach(item => {
|
||||
item.content.getContent().forEach(encodedDs => {
|
||||
if (encodedDs instanceof Uint8Array) {
|
||||
this.dss.set(userDescription, mergeDeleteSets([this.dss.get(userDescription) || createDeleteSet(), readDeleteSet(new DSDecoderV1(decoding.createDecoder(encodedDs)))]))
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
this.dss.set(userDescription, mergeDeleteSets(ds.map(encodedDs => readDeleteSet(new DSDecoderV1(decoding.createDecoder(encodedDs))))))
|
||||
ids.observe(/** @param {YArrayEvent<any>} event */ event =>
|
||||
event.changes.added.forEach(item => item.content.getContent().forEach(addClientId))
|
||||
)
|
||||
ids.forEach(addClientId)
|
||||
}
|
||||
// observe users
|
||||
storeType.observe(event => {
|
||||
event.keysChanged.forEach(userDescription =>
|
||||
initUser(storeType.get(userDescription), userDescription)
|
||||
)
|
||||
})
|
||||
// add initial data
|
||||
storeType.forEach(initUser)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Doc} doc
|
||||
* @param {number} clientid
|
||||
* @param {string} userDescription
|
||||
* @param {Object} conf
|
||||
* @param {function(Transaction, DeleteSet):boolean} [conf.filter]
|
||||
*/
|
||||
setUserMapping (doc, clientid, userDescription, { filter = () => true } = {}) {
|
||||
const users = this.yusers
|
||||
let user = users.get(userDescription)
|
||||
if (!user) {
|
||||
user = new YMap()
|
||||
user.set('ids', new YArray())
|
||||
user.set('ds', new YArray())
|
||||
users.set(userDescription, user)
|
||||
}
|
||||
user.get('ids').push([clientid])
|
||||
users.observe(_event => {
|
||||
setTimeout(() => {
|
||||
const userOverwrite = users.get(userDescription)
|
||||
if (userOverwrite !== user) {
|
||||
// user was overwritten, port all data over to the next user object
|
||||
// @todo Experiment with Y.Sets here
|
||||
user = userOverwrite
|
||||
// @todo iterate over old type
|
||||
this.clients.forEach((_userDescription, clientid) => {
|
||||
if (userDescription === _userDescription) {
|
||||
user.get('ids').push([clientid])
|
||||
}
|
||||
})
|
||||
const encoder = new DSEncoderV1()
|
||||
const ds = this.dss.get(userDescription)
|
||||
if (ds) {
|
||||
writeDeleteSet(encoder, ds)
|
||||
user.get('ds').push([encoder.toUint8Array()])
|
||||
}
|
||||
}
|
||||
}, 0)
|
||||
})
|
||||
doc.on('afterTransaction', /** @param {Transaction} transaction */ transaction => {
|
||||
setTimeout(() => {
|
||||
const yds = user.get('ds')
|
||||
const ds = transaction.deleteSet
|
||||
if (transaction.local && ds.clients.size > 0 && filter(transaction, ds)) {
|
||||
const encoder = new DSEncoderV1()
|
||||
writeDeleteSet(encoder, ds)
|
||||
yds.push([encoder.toUint8Array()])
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} clientid
|
||||
* @return {any}
|
||||
*/
|
||||
getUserByClientId (clientid) {
|
||||
return this.clients.get(clientid) || null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ID} id
|
||||
* @return {string | null}
|
||||
*/
|
||||
getUserByDeletedId (id) {
|
||||
for (const [userDescription, ds] of this.dss.entries()) {
|
||||
if (isDeleted(ds, id)) {
|
||||
return userDescription
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
353
src/utils/RelativePosition.js
Normal file
353
src/utils/RelativePosition.js
Normal file
@ -0,0 +1,353 @@
|
||||
import {
|
||||
writeID,
|
||||
readID,
|
||||
compareIDs,
|
||||
getState,
|
||||
findRootTypeKey,
|
||||
Item,
|
||||
createID,
|
||||
ContentType,
|
||||
followRedone,
|
||||
getItem,
|
||||
StructStore, ID, Doc, AbstractType, // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
/**
|
||||
* A relative position is based on the Yjs model and is not affected by document changes.
|
||||
* E.g. If you place a relative position before a certain character, it will always point to this character.
|
||||
* If you place a relative position at the end of a type, it will always point to the end of the type.
|
||||
*
|
||||
* A numeric position is often unsuited for user selections, because it does not change when content is inserted
|
||||
* before or after.
|
||||
*
|
||||
* ```Insert(0, 'x')('a|bc') = 'xa|bc'``` Where | is the relative position.
|
||||
*
|
||||
* One of the properties must be defined.
|
||||
*
|
||||
* @example
|
||||
* // Current cursor position is at position 10
|
||||
* const relativePosition = createRelativePositionFromIndex(yText, 10)
|
||||
* // modify yText
|
||||
* yText.insert(0, 'abc')
|
||||
* yText.delete(3, 10)
|
||||
* // Compute the cursor position
|
||||
* const absolutePosition = createAbsolutePositionFromRelativePosition(y, relativePosition)
|
||||
* absolutePosition.type === yText // => true
|
||||
* console.log('cursor location is ' + absolutePosition.index) // => cursor location is 3
|
||||
*
|
||||
*/
|
||||
export class RelativePosition {
|
||||
/**
|
||||
* @param {ID|null} type
|
||||
* @param {string|null} tname
|
||||
* @param {ID|null} item
|
||||
* @param {number} assoc
|
||||
*/
|
||||
constructor (type, tname, item, assoc = 0) {
|
||||
/**
|
||||
* @type {ID|null}
|
||||
*/
|
||||
this.type = type
|
||||
/**
|
||||
* @type {string|null}
|
||||
*/
|
||||
this.tname = tname
|
||||
/**
|
||||
* @type {ID | null}
|
||||
*/
|
||||
this.item = item
|
||||
/**
|
||||
* A relative position is associated to a specific character. By default
|
||||
* assoc >= 0, the relative position is associated to the character
|
||||
* after the meant position.
|
||||
* I.e. position 1 in 'ab' is associated to character 'b'.
|
||||
*
|
||||
* If assoc < 0, then the relative position is associated to the character
|
||||
* before the meant position.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
this.assoc = assoc
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RelativePosition} rpos
|
||||
* @return {any}
|
||||
*/
|
||||
export const relativePositionToJSON = rpos => {
|
||||
const json = {}
|
||||
if (rpos.type) {
|
||||
json.type = rpos.type
|
||||
}
|
||||
if (rpos.tname) {
|
||||
json.tname = rpos.tname
|
||||
}
|
||||
if (rpos.item) {
|
||||
json.item = rpos.item
|
||||
}
|
||||
if (rpos.assoc != null) {
|
||||
json.assoc = rpos.assoc
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} json
|
||||
* @return {RelativePosition}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const createRelativePositionFromJSON = json => new RelativePosition(json.type == null ? null : createID(json.type.client, json.type.clock), json.tname ?? null, json.item == null ? null : createID(json.item.client, json.item.clock), json.assoc == null ? 0 : json.assoc)
|
||||
|
||||
export class AbsolutePosition {
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {number} index
|
||||
* @param {number} [assoc]
|
||||
*/
|
||||
constructor (type, index, assoc = 0) {
|
||||
/**
|
||||
* @type {AbstractType<any>}
|
||||
*/
|
||||
this.type = type
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
this.index = index
|
||||
this.assoc = assoc
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {number} index
|
||||
* @param {number} [assoc]
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const createAbsolutePosition = (type, index, assoc = 0) => new AbsolutePosition(type, index, assoc)
|
||||
|
||||
/**
|
||||
* @param {AbstractType<any>} type
|
||||
* @param {ID|null} item
|
||||
* @param {number} [assoc]
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const createRelativePosition = (type, item, assoc) => {
|
||||
let typeid = null
|
||||
let tname = null
|
||||
if (type._item === null) {
|
||||
tname = findRootTypeKey(type)
|
||||
} else {
|
||||
typeid = createID(type._item.id.client, type._item.id.clock)
|
||||
}
|
||||
return new RelativePosition(typeid, tname, item, assoc)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a relativePosition based on a absolute position.
|
||||
*
|
||||
* @param {AbstractType<any>} type The base type (e.g. YText or YArray).
|
||||
* @param {number} index The absolute position.
|
||||
* @param {number} [assoc]
|
||||
* @return {RelativePosition}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const createRelativePositionFromTypeIndex = (type, index, assoc = 0) => {
|
||||
let t = type._start
|
||||
if (assoc < 0) {
|
||||
// associated to the left character or the beginning of a type, increment index if possible.
|
||||
if (index === 0) {
|
||||
return createRelativePosition(type, null, assoc)
|
||||
}
|
||||
index--
|
||||
}
|
||||
while (t !== null) {
|
||||
if (!t.deleted && t.countable) {
|
||||
if (t.length > index) {
|
||||
// case 1: found position somewhere in the linked list
|
||||
return createRelativePosition(type, createID(t.id.client, t.id.clock + index), assoc)
|
||||
}
|
||||
index -= t.length
|
||||
}
|
||||
if (t.right === null && assoc < 0) {
|
||||
// left-associated position, return last available id
|
||||
return createRelativePosition(type, t.lastId, assoc)
|
||||
}
|
||||
t = t.right
|
||||
}
|
||||
return createRelativePosition(type, null, assoc)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {encoding.Encoder} encoder
|
||||
* @param {RelativePosition} rpos
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const writeRelativePosition = (encoder, rpos) => {
|
||||
const { type, tname, item, assoc } = rpos
|
||||
if (item !== null) {
|
||||
encoding.writeVarUint(encoder, 0)
|
||||
writeID(encoder, item)
|
||||
} else if (tname !== null) {
|
||||
// case 2: found position at the end of the list and type is stored in y.share
|
||||
encoding.writeUint8(encoder, 1)
|
||||
encoding.writeVarString(encoder, tname)
|
||||
} else if (type !== null) {
|
||||
// case 3: found position at the end of the list and type is attached to an item
|
||||
encoding.writeUint8(encoder, 2)
|
||||
writeID(encoder, type)
|
||||
} else {
|
||||
throw error.unexpectedCase()
|
||||
}
|
||||
encoding.writeVarInt(encoder, assoc)
|
||||
return encoder
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RelativePosition} rpos
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const encodeRelativePosition = rpos => {
|
||||
const encoder = encoding.createEncoder()
|
||||
writeRelativePosition(encoder, rpos)
|
||||
return encoding.toUint8Array(encoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {decoding.Decoder} decoder
|
||||
* @return {RelativePosition}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const readRelativePosition = decoder => {
|
||||
let type = null
|
||||
let tname = null
|
||||
let itemID = null
|
||||
switch (decoding.readVarUint(decoder)) {
|
||||
case 0:
|
||||
// case 1: found position somewhere in the linked list
|
||||
itemID = readID(decoder)
|
||||
break
|
||||
case 1:
|
||||
// case 2: found position at the end of the list and type is stored in y.share
|
||||
tname = decoding.readVarString(decoder)
|
||||
break
|
||||
case 2: {
|
||||
// case 3: found position at the end of the list and type is attached to an item
|
||||
type = readID(decoder)
|
||||
}
|
||||
}
|
||||
const assoc = decoding.hasContent(decoder) ? decoding.readVarInt(decoder) : 0
|
||||
return new RelativePosition(type, tname, itemID, assoc)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} uint8Array
|
||||
* @return {RelativePosition}
|
||||
*/
|
||||
export const decodeRelativePosition = uint8Array => readRelativePosition(decoding.createDecoder(uint8Array))
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
* @param {ID} id
|
||||
*/
|
||||
const getItemWithOffset = (store, id) => {
|
||||
const item = getItem(store, id)
|
||||
const diff = id.clock - item.id.clock
|
||||
return {
|
||||
item, diff
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a relative position to an absolute position.
|
||||
*
|
||||
* If you want to share the relative position with other users, you should set
|
||||
* `followUndoneDeletions` to false to get consistent results across all clients.
|
||||
*
|
||||
* When calculating the absolute position, we try to follow the "undone deletions". This yields
|
||||
* better results for the user who performed undo. However, only the user who performed the undo
|
||||
* will get the better results, the other users don't know which operations recreated a deleted
|
||||
* range of content. There is more information in this ticket: https://github.com/yjs/yjs/issues/638
|
||||
*
|
||||
* @param {RelativePosition} rpos
|
||||
* @param {Doc} doc
|
||||
* @param {boolean} followUndoneDeletions - whether to follow undone deletions - see https://github.com/yjs/yjs/issues/638
|
||||
* @return {AbsolutePosition|null}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const createAbsolutePositionFromRelativePosition = (rpos, doc, followUndoneDeletions = true) => {
|
||||
const store = doc.store
|
||||
const rightID = rpos.item
|
||||
const typeID = rpos.type
|
||||
const tname = rpos.tname
|
||||
const assoc = rpos.assoc
|
||||
let type = null
|
||||
let index = 0
|
||||
if (rightID !== null) {
|
||||
if (getState(store, rightID.client) <= rightID.clock) {
|
||||
return null
|
||||
}
|
||||
const res = followUndoneDeletions ? followRedone(store, rightID) : getItemWithOffset(store, rightID)
|
||||
const right = res.item
|
||||
if (!(right instanceof Item)) {
|
||||
return null
|
||||
}
|
||||
type = /** @type {AbstractType<any>} */ (right.parent)
|
||||
if (type._item === null || !type._item.deleted) {
|
||||
index = (right.deleted || !right.countable) ? 0 : (res.diff + (assoc >= 0 ? 0 : 1)) // adjust position based on left association if necessary
|
||||
let n = right.left
|
||||
while (n !== null) {
|
||||
if (!n.deleted && n.countable) {
|
||||
index += n.length
|
||||
}
|
||||
n = n.left
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (tname !== null) {
|
||||
type = doc.get(tname)
|
||||
} else if (typeID !== null) {
|
||||
if (getState(store, typeID.client) <= typeID.clock) {
|
||||
// type does not exist yet
|
||||
return null
|
||||
}
|
||||
const { item } = followUndoneDeletions ? followRedone(store, typeID) : { item: getItem(store, typeID) }
|
||||
if (item instanceof Item && item.content instanceof ContentType) {
|
||||
type = item.content.type
|
||||
} else {
|
||||
// struct is garbage collected
|
||||
return null
|
||||
}
|
||||
} else {
|
||||
throw error.unexpectedCase()
|
||||
}
|
||||
if (assoc >= 0) {
|
||||
index = type._length
|
||||
} else {
|
||||
index = 0
|
||||
}
|
||||
}
|
||||
return createAbsolutePosition(type, index, rpos.assoc)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {RelativePosition|null} a
|
||||
* @param {RelativePosition|null} b
|
||||
* @return {boolean}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const compareRelativePositions = (a, b) => a === b || (
|
||||
a !== null && b !== null && a.tname === b.tname && compareIDs(a.item, b.item) && compareIDs(a.type, b.type) && a.assoc === b.assoc
|
||||
)
|
236
src/utils/Snapshot.js
Normal file
236
src/utils/Snapshot.js
Normal file
@ -0,0 +1,236 @@
|
||||
import {
|
||||
isDeleted,
|
||||
createDeleteSetFromStructStore,
|
||||
getStateVector,
|
||||
getItemCleanStart,
|
||||
iterateDeletedStructs,
|
||||
writeDeleteSet,
|
||||
writeStateVector,
|
||||
readDeleteSet,
|
||||
readStateVector,
|
||||
createDeleteSet,
|
||||
createID,
|
||||
getState,
|
||||
findIndexSS,
|
||||
UpdateEncoderV2,
|
||||
applyUpdateV2,
|
||||
LazyStructReader,
|
||||
equalDeleteSets,
|
||||
UpdateDecoderV1, UpdateDecoderV2, DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item, // eslint-disable-line
|
||||
mergeDeleteSets
|
||||
} from '../internals.js'
|
||||
|
||||
import * as map from 'lib0/map'
|
||||
import * as set from 'lib0/set'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
|
||||
export class Snapshot {
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {Map<number,number>} sv state map
|
||||
*/
|
||||
constructor (ds, sv) {
|
||||
/**
|
||||
* @type {DeleteSet}
|
||||
*/
|
||||
this.ds = ds
|
||||
/**
|
||||
* State Map
|
||||
* @type {Map<number,number>}
|
||||
*/
|
||||
this.sv = sv
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Snapshot} snap1
|
||||
* @param {Snapshot} snap2
|
||||
* @return {boolean}
|
||||
*/
|
||||
export const equalSnapshots = (snap1, snap2) => {
|
||||
const ds1 = snap1.ds.clients
|
||||
const ds2 = snap2.ds.clients
|
||||
const sv1 = snap1.sv
|
||||
const sv2 = snap2.sv
|
||||
if (sv1.size !== sv2.size || ds1.size !== ds2.size) {
|
||||
return false
|
||||
}
|
||||
for (const [key, value] of sv1.entries()) {
|
||||
if (sv2.get(key) !== value) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
for (const [client, dsitems1] of ds1.entries()) {
|
||||
const dsitems2 = ds2.get(client) || []
|
||||
if (dsitems1.length !== dsitems2.length) {
|
||||
return false
|
||||
}
|
||||
for (let i = 0; i < dsitems1.length; i++) {
|
||||
const dsitem1 = dsitems1[i]
|
||||
const dsitem2 = dsitems2[i]
|
||||
if (dsitem1.clock !== dsitem2.clock || dsitem1.len !== dsitem2.len) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Snapshot} snapshot
|
||||
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => {
|
||||
writeDeleteSet(encoder, snapshot.ds)
|
||||
writeStateVector(encoder, snapshot.sv)
|
||||
return encoder.toUint8Array()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Snapshot} snapshot
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncoderV1())
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} buf
|
||||
* @param {DSDecoderV1 | DSDecoderV2} [decoder]
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => {
|
||||
return new Snapshot(readDeleteSet(decoder), readStateVector(decoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} buf
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
export const decodeSnapshot = buf => decodeSnapshotV2(buf, new DSDecoderV1(decoding.createDecoder(buf)))
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {Map<number,number>} sm
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
export const createSnapshot = (ds, sm) => new Snapshot(ds, sm)
|
||||
|
||||
export const emptySnapshot = createSnapshot(createDeleteSet(), new Map())
|
||||
|
||||
/**
|
||||
* @param {Doc} doc
|
||||
* @return {Snapshot}
|
||||
*/
|
||||
export const snapshot = doc => createSnapshot(createDeleteSetFromStructStore(doc.store), getStateVector(doc.store))
|
||||
|
||||
/**
|
||||
* @param {Item} item
|
||||
* @param {Snapshot|undefined} snapshot
|
||||
*
|
||||
* @protected
|
||||
* @function
|
||||
*/
|
||||
export const isVisible = (item, snapshot) => snapshot === undefined
|
||||
? !item.deleted
|
||||
: snapshot.sv.has(item.id.client) && (snapshot.sv.get(item.id.client) || 0) > item.id.clock && !isDeleted(snapshot.ds, item.id)
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Snapshot} snapshot
|
||||
*/
|
||||
export const splitSnapshotAffectedStructs = (transaction, snapshot) => {
|
||||
const meta = map.setIfUndefined(transaction.meta, splitSnapshotAffectedStructs, set.create)
|
||||
const store = transaction.doc.store
|
||||
// check if we already split for this snapshot
|
||||
if (!meta.has(snapshot)) {
|
||||
snapshot.sv.forEach((clock, client) => {
|
||||
if (clock < getState(store, client)) {
|
||||
getItemCleanStart(transaction, createID(client, clock))
|
||||
}
|
||||
})
|
||||
iterateDeletedStructs(transaction, snapshot.ds, _item => {})
|
||||
meta.add(snapshot)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @example
|
||||
* const ydoc = new Y.Doc({ gc: false })
|
||||
* ydoc.getText().insert(0, 'world!')
|
||||
* const snapshot = Y.snapshot(ydoc)
|
||||
* ydoc.getText().insert(0, 'hello ')
|
||||
* const restored = Y.createDocFromSnapshot(ydoc, snapshot)
|
||||
* assert(restored.getText().toString() === 'world!')
|
||||
*
|
||||
* @param {Doc} originDoc
|
||||
* @param {Snapshot} snapshot
|
||||
* @param {Doc} [newDoc] Optionally, you may define the Yjs document that receives the data from originDoc
|
||||
* @return {Doc}
|
||||
*/
|
||||
export const createDocFromSnapshot = (originDoc, snapshot, newDoc = new Doc()) => {
|
||||
if (originDoc.gc) {
|
||||
// we should not try to restore a GC-ed document, because some of the restored items might have their content deleted
|
||||
throw new Error('Garbage-collection must be disabled in `originDoc`!')
|
||||
}
|
||||
const { sv, ds } = snapshot
|
||||
|
||||
const encoder = new UpdateEncoderV2()
|
||||
originDoc.transact(transaction => {
|
||||
let size = 0
|
||||
sv.forEach(clock => {
|
||||
if (clock > 0) {
|
||||
size++
|
||||
}
|
||||
})
|
||||
encoding.writeVarUint(encoder.restEncoder, size)
|
||||
// splitting the structs before writing them to the encoder
|
||||
for (const [client, clock] of sv) {
|
||||
if (clock === 0) {
|
||||
continue
|
||||
}
|
||||
if (clock < getState(originDoc.store, client)) {
|
||||
getItemCleanStart(transaction, createID(client, clock))
|
||||
}
|
||||
const structs = originDoc.store.clients.get(client) || []
|
||||
const lastStructIndex = findIndexSS(structs, clock - 1)
|
||||
// write # encoded structs
|
||||
encoding.writeVarUint(encoder.restEncoder, lastStructIndex + 1)
|
||||
encoder.writeClient(client)
|
||||
// first clock written is 0
|
||||
encoding.writeVarUint(encoder.restEncoder, 0)
|
||||
for (let i = 0; i <= lastStructIndex; i++) {
|
||||
structs[i].write(encoder, 0)
|
||||
}
|
||||
}
|
||||
writeDeleteSet(encoder, ds)
|
||||
})
|
||||
|
||||
applyUpdateV2(newDoc, encoder.toUint8Array(), 'snapshot')
|
||||
return newDoc
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Snapshot} snapshot
|
||||
* @param {Uint8Array} update
|
||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
||||
*/
|
||||
export const snapshotContainsUpdateV2 = (snapshot, update, YDecoder = UpdateDecoderV2) => {
|
||||
const structs = []
|
||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
||||
structs.push(curr)
|
||||
if ((snapshot.sv.get(curr.id.client) || 0) < curr.id.clock + curr.length) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
const mergedDS = mergeDeleteSets([snapshot.ds, readDeleteSet(updateDecoder)])
|
||||
return equalDeleteSets(snapshot.ds, mergedDS)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Snapshot} snapshot
|
||||
* @param {Uint8Array} update
|
||||
*/
|
||||
export const snapshotContainsUpdate = (snapshot, update) => snapshotContainsUpdateV2(snapshot, update, UpdateDecoderV1)
|
261
src/utils/StructStore.js
Normal file
261
src/utils/StructStore.js
Normal file
@ -0,0 +1,261 @@
|
||||
import {
|
||||
GC,
|
||||
splitItem,
|
||||
Transaction, ID, Item, DSDecoderV2 // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as math from 'lib0/math'
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
export class StructStore {
|
||||
constructor () {
|
||||
/**
|
||||
* @type {Map<number,Array<GC|Item>>}
|
||||
*/
|
||||
this.clients = new Map()
|
||||
/**
|
||||
* @type {null | { missing: Map<number, number>, update: Uint8Array }}
|
||||
*/
|
||||
this.pendingStructs = null
|
||||
/**
|
||||
* @type {null | Uint8Array}
|
||||
*/
|
||||
this.pendingDs = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the states as a Map<client,clock>.
|
||||
* Note that clock refers to the next expected clock id.
|
||||
*
|
||||
* @param {StructStore} store
|
||||
* @return {Map<number,number>}
|
||||
*
|
||||
* @public
|
||||
* @function
|
||||
*/
|
||||
export const getStateVector = store => {
|
||||
const sm = new Map()
|
||||
store.clients.forEach((structs, client) => {
|
||||
const struct = structs[structs.length - 1]
|
||||
sm.set(client, struct.id.clock + struct.length)
|
||||
})
|
||||
return sm
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
* @param {number} client
|
||||
* @return {number}
|
||||
*
|
||||
* @public
|
||||
* @function
|
||||
*/
|
||||
export const getState = (store, client) => {
|
||||
const structs = store.clients.get(client)
|
||||
if (structs === undefined) {
|
||||
return 0
|
||||
}
|
||||
const lastStruct = structs[structs.length - 1]
|
||||
return lastStruct.id.clock + lastStruct.length
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const integrityCheck = store => {
|
||||
store.clients.forEach(structs => {
|
||||
for (let i = 1; i < structs.length; i++) {
|
||||
const l = structs[i - 1]
|
||||
const r = structs[i]
|
||||
if (l.id.clock + l.length !== r.id.clock) {
|
||||
throw new Error('StructStore failed integrity check')
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {StructStore} store
|
||||
* @param {GC|Item} struct
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const addStruct = (store, struct) => {
|
||||
let structs = store.clients.get(struct.id.client)
|
||||
if (structs === undefined) {
|
||||
structs = []
|
||||
store.clients.set(struct.id.client, structs)
|
||||
} else {
|
||||
const lastStruct = structs[structs.length - 1]
|
||||
if (lastStruct.id.clock + lastStruct.length !== struct.id.clock) {
|
||||
throw error.unexpectedCase()
|
||||
}
|
||||
}
|
||||
structs.push(struct)
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a binary search on a sorted array
|
||||
* @param {Array<Item|GC>} structs
|
||||
* @param {number} clock
|
||||
* @return {number}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const findIndexSS = (structs, clock) => {
|
||||
let left = 0
|
||||
let right = structs.length - 1
|
||||
let mid = structs[right]
|
||||
let midclock = mid.id.clock
|
||||
if (midclock === clock) {
|
||||
return right
|
||||
}
|
||||
// @todo does it even make sense to pivot the search?
|
||||
// If a good split misses, it might actually increase the time to find the correct item.
|
||||
// Currently, the only advantage is that search with pivoting might find the item on the first try.
|
||||
let midindex = math.floor((clock / (midclock + mid.length - 1)) * right) // pivoting the search
|
||||
while (left <= right) {
|
||||
mid = structs[midindex]
|
||||
midclock = mid.id.clock
|
||||
if (midclock <= clock) {
|
||||
if (clock < midclock + mid.length) {
|
||||
return midindex
|
||||
}
|
||||
left = midindex + 1
|
||||
} else {
|
||||
right = midindex - 1
|
||||
}
|
||||
midindex = math.floor((left + right) / 2)
|
||||
}
|
||||
// Always check state before looking for a struct in StructStore
|
||||
// Therefore the case of not finding a struct is unexpected
|
||||
throw error.unexpectedCase()
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||||
*
|
||||
* @param {StructStore} store
|
||||
* @param {ID} id
|
||||
* @return {GC|Item}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const find = (store, id) => {
|
||||
/**
|
||||
* @type {Array<GC|Item>}
|
||||
*/
|
||||
// @ts-ignore
|
||||
const structs = store.clients.get(id.client)
|
||||
return structs[findIndexSS(structs, id.clock)]
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const getItem = /** @type {function(StructStore,ID):Item} */ (find)
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
* @param {Array<Item|GC>} structs
|
||||
* @param {number} clock
|
||||
*/
|
||||
export const findIndexCleanStart = (transaction, structs, clock) => {
|
||||
const index = findIndexSS(structs, clock)
|
||||
const struct = structs[index]
|
||||
if (struct.id.clock < clock && struct instanceof Item) {
|
||||
structs.splice(index + 1, 0, splitItem(transaction, struct, clock - struct.id.clock))
|
||||
return index + 1
|
||||
}
|
||||
return index
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {ID} id
|
||||
* @return {Item}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const getItemCleanStart = (transaction, id) => {
|
||||
const structs = /** @type {Array<Item>} */ (transaction.doc.store.clients.get(id.client))
|
||||
return structs[findIndexCleanStart(transaction, structs, id.clock)]
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects that id is actually in store. This function throws or is an infinite loop otherwise.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {StructStore} store
|
||||
* @param {ID} id
|
||||
* @return {Item}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const getItemCleanEnd = (transaction, store, id) => {
|
||||
/**
|
||||
* @type {Array<Item>}
|
||||
*/
|
||||
// @ts-ignore
|
||||
const structs = store.clients.get(id.client)
|
||||
const index = findIndexSS(structs, id.clock)
|
||||
const struct = structs[index]
|
||||
if (id.clock !== struct.id.clock + struct.length - 1 && struct.constructor !== GC) {
|
||||
structs.splice(index + 1, 0, splitItem(transaction, struct, id.clock - struct.id.clock + 1))
|
||||
}
|
||||
return struct
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace `item` with `newitem` in store
|
||||
* @param {StructStore} store
|
||||
* @param {GC|Item} struct
|
||||
* @param {GC|Item} newStruct
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const replaceStruct = (store, struct, newStruct) => {
|
||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(struct.id.client))
|
||||
structs[findIndexSS(structs, struct.id.clock)] = newStruct
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over a range of structs
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {Array<Item|GC>} structs
|
||||
* @param {number} clockStart Inclusive start
|
||||
* @param {number} len
|
||||
* @param {function(GC|Item):void} f
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const iterateStructs = (transaction, structs, clockStart, len, f) => {
|
||||
if (len === 0) {
|
||||
return
|
||||
}
|
||||
const clockEnd = clockStart + len
|
||||
let index = findIndexCleanStart(transaction, structs, clockStart)
|
||||
let struct
|
||||
do {
|
||||
struct = structs[index++]
|
||||
if (clockEnd < struct.id.clock + struct.length) {
|
||||
findIndexCleanStart(transaction, structs, clockEnd)
|
||||
}
|
||||
f(struct)
|
||||
} while (index < structs.length && structs[index].id.clock < clockEnd)
|
||||
}
|
444
src/utils/Transaction.js
Normal file
444
src/utils/Transaction.js
Normal file
@ -0,0 +1,444 @@
|
||||
import {
|
||||
getState,
|
||||
writeStructsFromTransaction,
|
||||
writeDeleteSet,
|
||||
DeleteSet,
|
||||
sortAndMergeDeleteSet,
|
||||
getStateVector,
|
||||
findIndexSS,
|
||||
callEventHandlerListeners,
|
||||
Item,
|
||||
generateNewClientId,
|
||||
createID,
|
||||
cleanupYTextAfterTransaction,
|
||||
UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as map from 'lib0/map'
|
||||
import * as math from 'lib0/math'
|
||||
import * as set from 'lib0/set'
|
||||
import * as logging from 'lib0/logging'
|
||||
import { callAll } from 'lib0/function'
|
||||
|
||||
/**
|
||||
* A transaction is created for every change on the Yjs model. It is possible
|
||||
* to bundle changes on the Yjs model in a single transaction to
|
||||
* minimize the number on messages sent and the number of observer calls.
|
||||
* If possible the user of this library should bundle as many changes as
|
||||
* possible. Here is an example to illustrate the advantages of bundling:
|
||||
*
|
||||
* @example
|
||||
* const ydoc = new Y.Doc()
|
||||
* const map = ydoc.getMap('map')
|
||||
* // Log content when change is triggered
|
||||
* map.observe(() => {
|
||||
* console.log('change triggered')
|
||||
* })
|
||||
* // Each change on the map type triggers a log message:
|
||||
* map.set('a', 0) // => "change triggered"
|
||||
* map.set('b', 0) // => "change triggered"
|
||||
* // When put in a transaction, it will trigger the log after the transaction:
|
||||
* ydoc.transact(() => {
|
||||
* map.set('a', 1)
|
||||
* map.set('b', 1)
|
||||
* }) // => "change triggered"
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
export class Transaction {
|
||||
/**
|
||||
* @param {Doc} doc
|
||||
* @param {any} origin
|
||||
* @param {boolean} local
|
||||
*/
|
||||
constructor (doc, origin, local) {
|
||||
/**
|
||||
* The Yjs instance.
|
||||
* @type {Doc}
|
||||
*/
|
||||
this.doc = doc
|
||||
/**
|
||||
* Describes the set of deleted items by ids
|
||||
* @type {DeleteSet}
|
||||
*/
|
||||
this.deleteSet = new DeleteSet()
|
||||
/**
|
||||
* Holds the state before the transaction started.
|
||||
* @type {Map<Number,Number>}
|
||||
*/
|
||||
this.beforeState = getStateVector(doc.store)
|
||||
/**
|
||||
* Holds the state after the transaction.
|
||||
* @type {Map<Number,Number>}
|
||||
*/
|
||||
this.afterState = new Map()
|
||||
/**
|
||||
* All types that were directly modified (property added or child
|
||||
* inserted/deleted). New types are not included in this Set.
|
||||
* Maps from type to parentSubs (`item.parentSub = null` for YArray)
|
||||
* @type {Map<AbstractType<YEvent<any>>,Set<String|null>>}
|
||||
*/
|
||||
this.changed = new Map()
|
||||
/**
|
||||
* Stores the events for the types that observe also child elements.
|
||||
* It is mainly used by `observeDeep`.
|
||||
* @type {Map<AbstractType<YEvent<any>>,Array<YEvent<any>>>}
|
||||
*/
|
||||
this.changedParentTypes = new Map()
|
||||
/**
|
||||
* @type {Array<AbstractStruct>}
|
||||
*/
|
||||
this._mergeStructs = []
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
this.origin = origin
|
||||
/**
|
||||
* Stores meta information on the transaction
|
||||
* @type {Map<any,any>}
|
||||
*/
|
||||
this.meta = new Map()
|
||||
/**
|
||||
* Whether this change originates from this doc.
|
||||
* @type {boolean}
|
||||
*/
|
||||
this.local = local
|
||||
/**
|
||||
* @type {Set<Doc>}
|
||||
*/
|
||||
this.subdocsAdded = new Set()
|
||||
/**
|
||||
* @type {Set<Doc>}
|
||||
*/
|
||||
this.subdocsRemoved = new Set()
|
||||
/**
|
||||
* @type {Set<Doc>}
|
||||
*/
|
||||
this.subdocsLoaded = new Set()
|
||||
/**
|
||||
* @type {boolean}
|
||||
*/
|
||||
this._needFormattingCleanup = false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {Transaction} transaction
|
||||
* @return {boolean} Whether data was written.
|
||||
*/
|
||||
export const writeUpdateMessageFromTransaction = (encoder, transaction) => {
|
||||
if (transaction.deleteSet.clients.size === 0 && !map.any(transaction.afterState, (clock, client) => transaction.beforeState.get(client) !== clock)) {
|
||||
return false
|
||||
}
|
||||
sortAndMergeDeleteSet(transaction.deleteSet)
|
||||
writeStructsFromTransaction(encoder, transaction)
|
||||
writeDeleteSet(encoder, transaction.deleteSet)
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const nextID = transaction => {
|
||||
const y = transaction.doc
|
||||
return createID(y.clientID, getState(y.store, y.clientID))
|
||||
}
|
||||
|
||||
/**
|
||||
* If `type.parent` was added in current transaction, `type` technically
|
||||
* did not change, it was just added and we should not fire events for `type`.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {AbstractType<YEvent<any>>} type
|
||||
* @param {string|null} parentSub
|
||||
*/
|
||||
export const addChangedTypeToTransaction = (transaction, type, parentSub) => {
|
||||
const item = type._item
|
||||
if (item === null || (item.id.clock < (transaction.beforeState.get(item.id.client) || 0) && !item.deleted)) {
|
||||
map.setIfUndefined(transaction.changed, type, set.create).add(parentSub)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Array<AbstractStruct>} structs
|
||||
* @param {number} pos
|
||||
* @return {number} # of merged structs
|
||||
*/
|
||||
const tryToMergeWithLefts = (structs, pos) => {
|
||||
let right = structs[pos]
|
||||
let left = structs[pos - 1]
|
||||
let i = pos
|
||||
for (; i > 0; right = left, left = structs[--i - 1]) {
|
||||
if (left.deleted === right.deleted && left.constructor === right.constructor) {
|
||||
if (left.mergeWith(right)) {
|
||||
if (right instanceof Item && right.parentSub !== null && /** @type {AbstractType<any>} */ (right.parent)._map.get(right.parentSub) === right) {
|
||||
/** @type {AbstractType<any>} */ (right.parent)._map.set(right.parentSub, /** @type {Item} */ (left))
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
const merged = pos - i
|
||||
if (merged) {
|
||||
// remove all merged structs from the array
|
||||
structs.splice(pos + 1 - merged, merged)
|
||||
}
|
||||
return merged
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {StructStore} store
|
||||
* @param {function(Item):boolean} gcFilter
|
||||
*/
|
||||
const tryGcDeleteSet = (ds, store, gcFilter) => {
|
||||
for (const [client, deleteItems] of ds.clients.entries()) {
|
||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
||||
const deleteItem = deleteItems[di]
|
||||
const endDeleteItemClock = deleteItem.clock + deleteItem.len
|
||||
for (
|
||||
let si = findIndexSS(structs, deleteItem.clock), struct = structs[si];
|
||||
si < structs.length && struct.id.clock < endDeleteItemClock;
|
||||
struct = structs[++si]
|
||||
) {
|
||||
const struct = structs[si]
|
||||
if (deleteItem.clock + deleteItem.len <= struct.id.clock) {
|
||||
break
|
||||
}
|
||||
if (struct instanceof Item && struct.deleted && !struct.keep && gcFilter(struct)) {
|
||||
struct.gc(store, false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {StructStore} store
|
||||
*/
|
||||
const tryMergeDeleteSet = (ds, store) => {
|
||||
// try to merge deleted / gc'd items
|
||||
// merge from right to left for better efficiency and so we don't miss any merge targets
|
||||
ds.clients.forEach((deleteItems, client) => {
|
||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
||||
const deleteItem = deleteItems[di]
|
||||
// start with merging the item next to the last deleted item
|
||||
const mostRightIndexToCheck = math.min(structs.length - 1, 1 + findIndexSS(structs, deleteItem.clock + deleteItem.len - 1))
|
||||
for (
|
||||
let si = mostRightIndexToCheck, struct = structs[si];
|
||||
si > 0 && struct.id.clock >= deleteItem.clock;
|
||||
struct = structs[si]
|
||||
) {
|
||||
si -= 1 + tryToMergeWithLefts(structs, si)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DeleteSet} ds
|
||||
* @param {StructStore} store
|
||||
* @param {function(Item):boolean} gcFilter
|
||||
*/
|
||||
export const tryGc = (ds, store, gcFilter) => {
|
||||
tryGcDeleteSet(ds, store, gcFilter)
|
||||
tryMergeDeleteSet(ds, store)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Array<Transaction>} transactionCleanups
|
||||
* @param {number} i
|
||||
*/
|
||||
const cleanupTransactions = (transactionCleanups, i) => {
|
||||
if (i < transactionCleanups.length) {
|
||||
const transaction = transactionCleanups[i]
|
||||
const doc = transaction.doc
|
||||
const store = doc.store
|
||||
const ds = transaction.deleteSet
|
||||
const mergeStructs = transaction._mergeStructs
|
||||
try {
|
||||
sortAndMergeDeleteSet(ds)
|
||||
transaction.afterState = getStateVector(transaction.doc.store)
|
||||
doc.emit('beforeObserverCalls', [transaction, doc])
|
||||
/**
|
||||
* An array of event callbacks.
|
||||
*
|
||||
* Each callback is called even if the other ones throw errors.
|
||||
*
|
||||
* @type {Array<function():void>}
|
||||
*/
|
||||
const fs = []
|
||||
// observe events on changed types
|
||||
transaction.changed.forEach((subs, itemtype) =>
|
||||
fs.push(() => {
|
||||
if (itemtype._item === null || !itemtype._item.deleted) {
|
||||
itemtype._callObserver(transaction, subs)
|
||||
}
|
||||
})
|
||||
)
|
||||
fs.push(() => {
|
||||
// deep observe events
|
||||
transaction.changedParentTypes.forEach((events, type) => {
|
||||
// We need to think about the possibility that the user transforms the
|
||||
// Y.Doc in the event.
|
||||
if (type._dEH.l.length > 0 && (type._item === null || !type._item.deleted)) {
|
||||
events = events
|
||||
.filter(event =>
|
||||
event.target._item === null || !event.target._item.deleted
|
||||
)
|
||||
events
|
||||
.forEach(event => {
|
||||
event.currentTarget = type
|
||||
// path is relative to the current target
|
||||
event._path = null
|
||||
})
|
||||
// sort events by path length so that top-level events are fired first.
|
||||
events
|
||||
.sort((event1, event2) => event1.path.length - event2.path.length)
|
||||
// We don't need to check for events.length
|
||||
// because we know it has at least one element
|
||||
callEventHandlerListeners(type._dEH, events, transaction)
|
||||
}
|
||||
})
|
||||
})
|
||||
fs.push(() => doc.emit('afterTransaction', [transaction, doc]))
|
||||
callAll(fs, [])
|
||||
if (transaction._needFormattingCleanup) {
|
||||
cleanupYTextAfterTransaction(transaction)
|
||||
}
|
||||
} finally {
|
||||
// Replace deleted items with ItemDeleted / GC.
|
||||
// This is where content is actually remove from the Yjs Doc.
|
||||
if (doc.gc) {
|
||||
tryGcDeleteSet(ds, store, doc.gcFilter)
|
||||
}
|
||||
tryMergeDeleteSet(ds, store)
|
||||
|
||||
// on all affected store.clients props, try to merge
|
||||
transaction.afterState.forEach((clock, client) => {
|
||||
const beforeClock = transaction.beforeState.get(client) || 0
|
||||
if (beforeClock !== clock) {
|
||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||
// we iterate from right to left so we can safely remove entries
|
||||
const firstChangePos = math.max(findIndexSS(structs, beforeClock), 1)
|
||||
for (let i = structs.length - 1; i >= firstChangePos;) {
|
||||
i -= 1 + tryToMergeWithLefts(structs, i)
|
||||
}
|
||||
}
|
||||
})
|
||||
// try to merge mergeStructs
|
||||
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
|
||||
// but at the moment DS does not handle duplicates
|
||||
for (let i = mergeStructs.length - 1; i >= 0; i--) {
|
||||
const { client, clock } = mergeStructs[i].id
|
||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||
const replacedStructPos = findIndexSS(structs, clock)
|
||||
if (replacedStructPos + 1 < structs.length) {
|
||||
if (tryToMergeWithLefts(structs, replacedStructPos + 1) > 1) {
|
||||
continue // no need to perform next check, both are already merged
|
||||
}
|
||||
}
|
||||
if (replacedStructPos > 0) {
|
||||
tryToMergeWithLefts(structs, replacedStructPos)
|
||||
}
|
||||
}
|
||||
if (!transaction.local && transaction.afterState.get(doc.clientID) !== transaction.beforeState.get(doc.clientID)) {
|
||||
logging.print(logging.ORANGE, logging.BOLD, '[yjs] ', logging.UNBOLD, logging.RED, 'Changed the client-id because another client seems to be using it.')
|
||||
doc.clientID = generateNewClientId()
|
||||
}
|
||||
// @todo Merge all the transactions into one and provide send the data as a single update message
|
||||
doc.emit('afterTransactionCleanup', [transaction, doc])
|
||||
if (doc._observers.has('update')) {
|
||||
const encoder = new UpdateEncoderV1()
|
||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
||||
if (hasContent) {
|
||||
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
||||
}
|
||||
}
|
||||
if (doc._observers.has('updateV2')) {
|
||||
const encoder = new UpdateEncoderV2()
|
||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
||||
if (hasContent) {
|
||||
doc.emit('updateV2', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
||||
}
|
||||
}
|
||||
const { subdocsAdded, subdocsLoaded, subdocsRemoved } = transaction
|
||||
if (subdocsAdded.size > 0 || subdocsRemoved.size > 0 || subdocsLoaded.size > 0) {
|
||||
subdocsAdded.forEach(subdoc => {
|
||||
subdoc.clientID = doc.clientID
|
||||
if (subdoc.collectionid == null) {
|
||||
subdoc.collectionid = doc.collectionid
|
||||
}
|
||||
doc.subdocs.add(subdoc)
|
||||
})
|
||||
subdocsRemoved.forEach(subdoc => doc.subdocs.delete(subdoc))
|
||||
doc.emit('subdocs', [{ loaded: subdocsLoaded, added: subdocsAdded, removed: subdocsRemoved }, doc, transaction])
|
||||
subdocsRemoved.forEach(subdoc => subdoc.destroy())
|
||||
}
|
||||
|
||||
if (transactionCleanups.length <= i + 1) {
|
||||
doc._transactionCleanups = []
|
||||
doc.emit('afterAllTransactions', [doc, transactionCleanups])
|
||||
} else {
|
||||
cleanupTransactions(transactionCleanups, i + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements the functionality of `y.transact(()=>{..})`
|
||||
*
|
||||
* @template T
|
||||
* @param {Doc} doc
|
||||
* @param {function(Transaction):T} f
|
||||
* @param {any} [origin=true]
|
||||
* @return {T}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const transact = (doc, f, origin = null, local = true) => {
|
||||
const transactionCleanups = doc._transactionCleanups
|
||||
let initialCall = false
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let result = null
|
||||
if (doc._transaction === null) {
|
||||
initialCall = true
|
||||
doc._transaction = new Transaction(doc, origin, local)
|
||||
transactionCleanups.push(doc._transaction)
|
||||
if (transactionCleanups.length === 1) {
|
||||
doc.emit('beforeAllTransactions', [doc])
|
||||
}
|
||||
doc.emit('beforeTransaction', [doc._transaction, doc])
|
||||
}
|
||||
try {
|
||||
result = f(doc._transaction)
|
||||
} finally {
|
||||
if (initialCall) {
|
||||
const finishCleanup = doc._transaction === transactionCleanups[0]
|
||||
doc._transaction = null
|
||||
if (finishCleanup) {
|
||||
// The first transaction ended, now process observer calls.
|
||||
// Observer call may create new transactions for which we need to call the observers and do cleanup.
|
||||
// We don't want to nest these calls, so we execute these calls one after
|
||||
// another.
|
||||
// Also we need to ensure that all cleanups are called, even if the
|
||||
// observes throw errors.
|
||||
// This file is full of hacky try {} finally {} blocks to ensure that an
|
||||
// event can throw errors and also that the cleanup is called.
|
||||
cleanupTransactions(transactionCleanups, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
400
src/utils/UndoManager.js
Normal file
400
src/utils/UndoManager.js
Normal file
@ -0,0 +1,400 @@
|
||||
import {
|
||||
mergeDeleteSets,
|
||||
iterateDeletedStructs,
|
||||
keepItem,
|
||||
transact,
|
||||
createID,
|
||||
redoItem,
|
||||
isParentOf,
|
||||
followRedone,
|
||||
getItemCleanStart,
|
||||
isDeleted,
|
||||
addToDeleteSet,
|
||||
YEvent, Transaction, Doc, Item, GC, DeleteSet, AbstractType // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as time from 'lib0/time'
|
||||
import * as array from 'lib0/array'
|
||||
import * as logging from 'lib0/logging'
|
||||
import { ObservableV2 } from 'lib0/observable'
|
||||
|
||||
export class StackItem {
|
||||
/**
|
||||
* @param {DeleteSet} deletions
|
||||
* @param {DeleteSet} insertions
|
||||
*/
|
||||
constructor (deletions, insertions) {
|
||||
this.insertions = insertions
|
||||
this.deletions = deletions
|
||||
/**
|
||||
* Use this to save and restore metadata like selection range
|
||||
*/
|
||||
this.meta = new Map()
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @param {Transaction} tr
|
||||
* @param {UndoManager} um
|
||||
* @param {StackItem} stackItem
|
||||
*/
|
||||
const clearUndoManagerStackItem = (tr, um, stackItem) => {
|
||||
iterateDeletedStructs(tr, stackItem.deletions, item => {
|
||||
if (item instanceof Item && um.scope.some(type => type === tr.doc || isParentOf(/** @type {AbstractType<any>} */ (type), item))) {
|
||||
keepItem(item, false)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UndoManager} undoManager
|
||||
* @param {Array<StackItem>} stack
|
||||
* @param {'undo'|'redo'} eventType
|
||||
* @return {StackItem?}
|
||||
*/
|
||||
const popStackItem = (undoManager, stack, eventType) => {
|
||||
/**
|
||||
* Keep a reference to the transaction so we can fire the event with the changedParentTypes
|
||||
* @type {any}
|
||||
*/
|
||||
let _tr = null
|
||||
const doc = undoManager.doc
|
||||
const scope = undoManager.scope
|
||||
transact(doc, transaction => {
|
||||
while (stack.length > 0 && undoManager.currStackItem === null) {
|
||||
const store = doc.store
|
||||
const stackItem = /** @type {StackItem} */ (stack.pop())
|
||||
/**
|
||||
* @type {Set<Item>}
|
||||
*/
|
||||
const itemsToRedo = new Set()
|
||||
/**
|
||||
* @type {Array<Item>}
|
||||
*/
|
||||
const itemsToDelete = []
|
||||
let performedChange = false
|
||||
iterateDeletedStructs(transaction, stackItem.insertions, struct => {
|
||||
if (struct instanceof Item) {
|
||||
if (struct.redone !== null) {
|
||||
let { item, diff } = followRedone(store, struct.id)
|
||||
if (diff > 0) {
|
||||
item = getItemCleanStart(transaction, createID(item.id.client, item.id.clock + diff))
|
||||
}
|
||||
struct = item
|
||||
}
|
||||
if (!struct.deleted && scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), /** @type {Item} */ (struct)))) {
|
||||
itemsToDelete.push(struct)
|
||||
}
|
||||
}
|
||||
})
|
||||
iterateDeletedStructs(transaction, stackItem.deletions, struct => {
|
||||
if (
|
||||
struct instanceof Item &&
|
||||
scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), struct)) &&
|
||||
// Never redo structs in stackItem.insertions because they were created and deleted in the same capture interval.
|
||||
!isDeleted(stackItem.insertions, struct.id)
|
||||
) {
|
||||
itemsToRedo.add(struct)
|
||||
}
|
||||
})
|
||||
itemsToRedo.forEach(struct => {
|
||||
performedChange = redoItem(transaction, struct, itemsToRedo, stackItem.insertions, undoManager.ignoreRemoteMapChanges, undoManager) !== null || performedChange
|
||||
})
|
||||
// We want to delete in reverse order so that children are deleted before
|
||||
// parents, so we have more information available when items are filtered.
|
||||
for (let i = itemsToDelete.length - 1; i >= 0; i--) {
|
||||
const item = itemsToDelete[i]
|
||||
if (undoManager.deleteFilter(item)) {
|
||||
item.delete(transaction)
|
||||
performedChange = true
|
||||
}
|
||||
}
|
||||
undoManager.currStackItem = performedChange ? stackItem : null
|
||||
}
|
||||
transaction.changed.forEach((subProps, type) => {
|
||||
// destroy search marker if necessary
|
||||
if (subProps.has(null) && type._searchMarker) {
|
||||
type._searchMarker.length = 0
|
||||
}
|
||||
})
|
||||
_tr = transaction
|
||||
}, undoManager)
|
||||
const res = undoManager.currStackItem
|
||||
if (res != null) {
|
||||
const changedParentTypes = _tr.changedParentTypes
|
||||
undoManager.emit('stack-item-popped', [{ stackItem: res, type: eventType, changedParentTypes, origin: undoManager }, undoManager])
|
||||
undoManager.currStackItem = null
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} UndoManagerOptions
|
||||
* @property {number} [UndoManagerOptions.captureTimeout=500]
|
||||
* @property {function(Transaction):boolean} [UndoManagerOptions.captureTransaction] Do not capture changes of a Transaction if result false.
|
||||
* @property {function(Item):boolean} [UndoManagerOptions.deleteFilter=()=>true] Sometimes
|
||||
* it is necessary to filter what an Undo/Redo operation can delete. If this
|
||||
* filter returns false, the type/item won't be deleted even it is in the
|
||||
* undo/redo scope.
|
||||
* @property {Set<any>} [UndoManagerOptions.trackedOrigins=new Set([null])]
|
||||
* @property {boolean} [ignoreRemoteMapChanges] Experimental. By default, the UndoManager will never overwrite remote changes. Enable this property to enable overwriting remote changes on key-value changes (Y.Map, properties on Y.Xml, etc..).
|
||||
* @property {Doc} [doc] The document that this UndoManager operates on. Only needed if typeScope is empty.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} StackItemEvent
|
||||
* @property {StackItem} StackItemEvent.stackItem
|
||||
* @property {any} StackItemEvent.origin
|
||||
* @property {'undo'|'redo'} StackItemEvent.type
|
||||
* @property {Map<AbstractType<YEvent<any>>,Array<YEvent<any>>>} StackItemEvent.changedParentTypes
|
||||
*/
|
||||
|
||||
/**
|
||||
* Fires 'stack-item-added' event when a stack item was added to either the undo- or
|
||||
* the redo-stack. You may store additional stack information via the
|
||||
* metadata property on `event.stackItem.meta` (it is a `Map` of metadata properties).
|
||||
* Fires 'stack-item-popped' event when a stack item was popped from either the
|
||||
* undo- or the redo-stack. You may restore the saved stack information from `event.stackItem.meta`.
|
||||
*
|
||||
* @extends {ObservableV2<{'stack-item-added':function(StackItemEvent, UndoManager):void, 'stack-item-popped': function(StackItemEvent, UndoManager):void, 'stack-cleared': function({ undoStackCleared: boolean, redoStackCleared: boolean }):void, 'stack-item-updated': function(StackItemEvent, UndoManager):void }>}
|
||||
*/
|
||||
export class UndoManager extends ObservableV2 {
|
||||
/**
|
||||
* @param {Doc|AbstractType<any>|Array<AbstractType<any>>} typeScope Limits the scope of the UndoManager. If this is set to a ydoc instance, all changes on that ydoc will be undone. If set to a specific type, only changes on that type or its children will be undone. Also accepts an array of types.
|
||||
* @param {UndoManagerOptions} options
|
||||
*/
|
||||
constructor (typeScope, {
|
||||
captureTimeout = 500,
|
||||
captureTransaction = _tr => true,
|
||||
deleteFilter = () => true,
|
||||
trackedOrigins = new Set([null]),
|
||||
ignoreRemoteMapChanges = false,
|
||||
doc = /** @type {Doc} */ (array.isArray(typeScope) ? typeScope[0].doc : typeScope instanceof Doc ? typeScope : typeScope.doc)
|
||||
} = {}) {
|
||||
super()
|
||||
/**
|
||||
* @type {Array<AbstractType<any> | Doc>}
|
||||
*/
|
||||
this.scope = []
|
||||
this.doc = doc
|
||||
this.addToScope(typeScope)
|
||||
this.deleteFilter = deleteFilter
|
||||
trackedOrigins.add(this)
|
||||
this.trackedOrigins = trackedOrigins
|
||||
this.captureTransaction = captureTransaction
|
||||
/**
|
||||
* @type {Array<StackItem>}
|
||||
*/
|
||||
this.undoStack = []
|
||||
/**
|
||||
* @type {Array<StackItem>}
|
||||
*/
|
||||
this.redoStack = []
|
||||
/**
|
||||
* Whether the client is currently undoing (calling UndoManager.undo)
|
||||
*
|
||||
* @type {boolean}
|
||||
*/
|
||||
this.undoing = false
|
||||
this.redoing = false
|
||||
/**
|
||||
* The currently popped stack item if UndoManager.undoing or UndoManager.redoing
|
||||
*
|
||||
* @type {StackItem|null}
|
||||
*/
|
||||
this.currStackItem = null
|
||||
this.lastChange = 0
|
||||
this.ignoreRemoteMapChanges = ignoreRemoteMapChanges
|
||||
this.captureTimeout = captureTimeout
|
||||
/**
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
this.afterTransactionHandler = transaction => {
|
||||
// Only track certain transactions
|
||||
if (
|
||||
!this.captureTransaction(transaction) ||
|
||||
!this.scope.some(type => transaction.changedParentTypes.has(/** @type {AbstractType<any>} */ (type)) || type === this.doc) ||
|
||||
(!this.trackedOrigins.has(transaction.origin) && (!transaction.origin || !this.trackedOrigins.has(transaction.origin.constructor)))
|
||||
) {
|
||||
return
|
||||
}
|
||||
const undoing = this.undoing
|
||||
const redoing = this.redoing
|
||||
const stack = undoing ? this.redoStack : this.undoStack
|
||||
if (undoing) {
|
||||
this.stopCapturing() // next undo should not be appended to last stack item
|
||||
} else if (!redoing) {
|
||||
// neither undoing nor redoing: delete redoStack
|
||||
this.clear(false, true)
|
||||
}
|
||||
const insertions = new DeleteSet()
|
||||
transaction.afterState.forEach((endClock, client) => {
|
||||
const startClock = transaction.beforeState.get(client) || 0
|
||||
const len = endClock - startClock
|
||||
if (len > 0) {
|
||||
addToDeleteSet(insertions, client, startClock, len)
|
||||
}
|
||||
})
|
||||
const now = time.getUnixTime()
|
||||
let didAdd = false
|
||||
if (this.lastChange > 0 && now - this.lastChange < this.captureTimeout && stack.length > 0 && !undoing && !redoing) {
|
||||
// append change to last stack op
|
||||
const lastOp = stack[stack.length - 1]
|
||||
lastOp.deletions = mergeDeleteSets([lastOp.deletions, transaction.deleteSet])
|
||||
lastOp.insertions = mergeDeleteSets([lastOp.insertions, insertions])
|
||||
} else {
|
||||
// create a new stack op
|
||||
stack.push(new StackItem(transaction.deleteSet, insertions))
|
||||
didAdd = true
|
||||
}
|
||||
if (!undoing && !redoing) {
|
||||
this.lastChange = now
|
||||
}
|
||||
// make sure that deleted structs are not gc'd
|
||||
iterateDeletedStructs(transaction, transaction.deleteSet, /** @param {Item|GC} item */ item => {
|
||||
if (item instanceof Item && this.scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), item))) {
|
||||
keepItem(item, true)
|
||||
}
|
||||
})
|
||||
/**
|
||||
* @type {[StackItemEvent, UndoManager]}
|
||||
*/
|
||||
const changeEvent = [{ stackItem: stack[stack.length - 1], origin: transaction.origin, type: undoing ? 'redo' : 'undo', changedParentTypes: transaction.changedParentTypes }, this]
|
||||
if (didAdd) {
|
||||
this.emit('stack-item-added', changeEvent)
|
||||
} else {
|
||||
this.emit('stack-item-updated', changeEvent)
|
||||
}
|
||||
}
|
||||
this.doc.on('afterTransaction', this.afterTransactionHandler)
|
||||
this.doc.on('destroy', () => {
|
||||
this.destroy()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Extend the scope.
|
||||
*
|
||||
* @param {Array<AbstractType<any> | Doc> | AbstractType<any> | Doc} ytypes
|
||||
*/
|
||||
addToScope (ytypes) {
|
||||
const tmpSet = new Set(this.scope)
|
||||
ytypes = array.isArray(ytypes) ? ytypes : [ytypes]
|
||||
ytypes.forEach(ytype => {
|
||||
if (!tmpSet.has(ytype)) {
|
||||
tmpSet.add(ytype)
|
||||
if (ytype instanceof AbstractType ? ytype.doc !== this.doc : ytype !== this.doc) logging.warn('[yjs#509] Not same Y.Doc') // use MultiDocUndoManager instead. also see https://github.com/yjs/yjs/issues/509
|
||||
this.scope.push(ytype)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} origin
|
||||
*/
|
||||
addTrackedOrigin (origin) {
|
||||
this.trackedOrigins.add(origin)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} origin
|
||||
*/
|
||||
removeTrackedOrigin (origin) {
|
||||
this.trackedOrigins.delete(origin)
|
||||
}
|
||||
|
||||
clear (clearUndoStack = true, clearRedoStack = true) {
|
||||
if ((clearUndoStack && this.canUndo()) || (clearRedoStack && this.canRedo())) {
|
||||
this.doc.transact(tr => {
|
||||
if (clearUndoStack) {
|
||||
this.undoStack.forEach(item => clearUndoManagerStackItem(tr, this, item))
|
||||
this.undoStack = []
|
||||
}
|
||||
if (clearRedoStack) {
|
||||
this.redoStack.forEach(item => clearUndoManagerStackItem(tr, this, item))
|
||||
this.redoStack = []
|
||||
}
|
||||
this.emit('stack-cleared', [{ undoStackCleared: clearUndoStack, redoStackCleared: clearRedoStack }])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* UndoManager merges Undo-StackItem if they are created within time-gap
|
||||
* smaller than `options.captureTimeout`. Call `um.stopCapturing()` so that the next
|
||||
* StackItem won't be merged.
|
||||
*
|
||||
*
|
||||
* @example
|
||||
* // without stopCapturing
|
||||
* ytext.insert(0, 'a')
|
||||
* ytext.insert(1, 'b')
|
||||
* um.undo()
|
||||
* ytext.toString() // => '' (note that 'ab' was removed)
|
||||
* // with stopCapturing
|
||||
* ytext.insert(0, 'a')
|
||||
* um.stopCapturing()
|
||||
* ytext.insert(0, 'b')
|
||||
* um.undo()
|
||||
* ytext.toString() // => 'a' (note that only 'b' was removed)
|
||||
*
|
||||
*/
|
||||
stopCapturing () {
|
||||
this.lastChange = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Undo last changes on type.
|
||||
*
|
||||
* @return {StackItem?} Returns StackItem if a change was applied
|
||||
*/
|
||||
undo () {
|
||||
this.undoing = true
|
||||
let res
|
||||
try {
|
||||
res = popStackItem(this, this.undoStack, 'undo')
|
||||
} finally {
|
||||
this.undoing = false
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* Redo last undo operation.
|
||||
*
|
||||
* @return {StackItem?} Returns StackItem if a change was applied
|
||||
*/
|
||||
redo () {
|
||||
this.redoing = true
|
||||
let res
|
||||
try {
|
||||
res = popStackItem(this, this.redoStack, 'redo')
|
||||
} finally {
|
||||
this.redoing = false
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
/**
|
||||
* Are undo steps available?
|
||||
*
|
||||
* @return {boolean} `true` if undo is possible
|
||||
*/
|
||||
canUndo () {
|
||||
return this.undoStack.length > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Are redo steps available?
|
||||
*
|
||||
* @return {boolean} `true` if redo is possible
|
||||
*/
|
||||
canRedo () {
|
||||
return this.redoStack.length > 0
|
||||
}
|
||||
|
||||
destroy () {
|
||||
this.trackedOrigins.delete(this)
|
||||
this.doc.off('afterTransaction', this.afterTransactionHandler)
|
||||
super.destroy()
|
||||
}
|
||||
}
|
281
src/utils/UpdateDecoder.js
Normal file
281
src/utils/UpdateDecoder.js
Normal file
@ -0,0 +1,281 @@
|
||||
import * as buffer from 'lib0/buffer'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import {
|
||||
ID, createID
|
||||
} from '../internals.js'
|
||||
|
||||
export class DSDecoderV1 {
|
||||
/**
|
||||
* @param {decoding.Decoder} decoder
|
||||
*/
|
||||
constructor (decoder) {
|
||||
this.restDecoder = decoder
|
||||
}
|
||||
|
||||
resetDsCurVal () {
|
||||
// nop
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
readDsClock () {
|
||||
return decoding.readVarUint(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
readDsLen () {
|
||||
return decoding.readVarUint(this.restDecoder)
|
||||
}
|
||||
}
|
||||
|
||||
export class UpdateDecoderV1 extends DSDecoderV1 {
|
||||
/**
|
||||
* @return {ID}
|
||||
*/
|
||||
readLeftID () {
|
||||
return createID(decoding.readVarUint(this.restDecoder), decoding.readVarUint(this.restDecoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ID}
|
||||
*/
|
||||
readRightID () {
|
||||
return createID(decoding.readVarUint(this.restDecoder), decoding.readVarUint(this.restDecoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the next client id.
|
||||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
||||
*/
|
||||
readClient () {
|
||||
return decoding.readVarUint(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
readInfo () {
|
||||
return decoding.readUint8(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
readString () {
|
||||
return decoding.readVarString(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean} isKey
|
||||
*/
|
||||
readParentInfo () {
|
||||
return decoding.readVarUint(this.restDecoder) === 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
readTypeRef () {
|
||||
return decoding.readVarUint(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write len of a struct - well suited for Opt RLE encoder.
|
||||
*
|
||||
* @return {number} len
|
||||
*/
|
||||
readLen () {
|
||||
return decoding.readVarUint(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {any}
|
||||
*/
|
||||
readAny () {
|
||||
return decoding.readAny(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
readBuf () {
|
||||
return buffer.copyUint8Array(decoding.readVarUint8Array(this.restDecoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy implementation uses JSON parse. We use any-decoding in v2.
|
||||
*
|
||||
* @return {any}
|
||||
*/
|
||||
readJSON () {
|
||||
return JSON.parse(decoding.readVarString(this.restDecoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
readKey () {
|
||||
return decoding.readVarString(this.restDecoder)
|
||||
}
|
||||
}
|
||||
|
||||
export class DSDecoderV2 {
|
||||
/**
|
||||
* @param {decoding.Decoder} decoder
|
||||
*/
|
||||
constructor (decoder) {
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
this.dsCurrVal = 0
|
||||
this.restDecoder = decoder
|
||||
}
|
||||
|
||||
resetDsCurVal () {
|
||||
this.dsCurrVal = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
readDsClock () {
|
||||
this.dsCurrVal += decoding.readVarUint(this.restDecoder)
|
||||
return this.dsCurrVal
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number}
|
||||
*/
|
||||
readDsLen () {
|
||||
const diff = decoding.readVarUint(this.restDecoder) + 1
|
||||
this.dsCurrVal += diff
|
||||
return diff
|
||||
}
|
||||
}
|
||||
|
||||
export class UpdateDecoderV2 extends DSDecoderV2 {
|
||||
/**
|
||||
* @param {decoding.Decoder} decoder
|
||||
*/
|
||||
constructor (decoder) {
|
||||
super(decoder)
|
||||
/**
|
||||
* List of cached keys. If the keys[id] does not exist, we read a new key
|
||||
* from stringEncoder and push it to keys.
|
||||
*
|
||||
* @type {Array<string>}
|
||||
*/
|
||||
this.keys = []
|
||||
decoding.readVarUint(decoder) // read feature flag - currently unused
|
||||
this.keyClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||
this.clientDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||
this.leftClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||
this.rightClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||
this.infoDecoder = new decoding.RleDecoder(decoding.readVarUint8Array(decoder), decoding.readUint8)
|
||||
this.stringDecoder = new decoding.StringDecoder(decoding.readVarUint8Array(decoder))
|
||||
this.parentInfoDecoder = new decoding.RleDecoder(decoding.readVarUint8Array(decoder), decoding.readUint8)
|
||||
this.typeRefDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||
this.lenDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ID}
|
||||
*/
|
||||
readLeftID () {
|
||||
return new ID(this.clientDecoder.read(), this.leftClockDecoder.read())
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {ID}
|
||||
*/
|
||||
readRightID () {
|
||||
return new ID(this.clientDecoder.read(), this.rightClockDecoder.read())
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the next client id.
|
||||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
||||
*/
|
||||
readClient () {
|
||||
return this.clientDecoder.read()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
readInfo () {
|
||||
return /** @type {number} */ (this.infoDecoder.read())
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
readString () {
|
||||
return this.stringDecoder.read()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean}
|
||||
*/
|
||||
readParentInfo () {
|
||||
return this.parentInfoDecoder.read() === 1
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {number} An unsigned 8-bit integer
|
||||
*/
|
||||
readTypeRef () {
|
||||
return this.typeRefDecoder.read()
|
||||
}
|
||||
|
||||
/**
|
||||
* Write len of a struct - well suited for Opt RLE encoder.
|
||||
*
|
||||
* @return {number}
|
||||
*/
|
||||
readLen () {
|
||||
return this.lenDecoder.read()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {any}
|
||||
*/
|
||||
readAny () {
|
||||
return decoding.readAny(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
readBuf () {
|
||||
return decoding.readVarUint8Array(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* This is mainly here for legacy purposes.
|
||||
*
|
||||
* Initial we incoded objects using JSON. Now we use the much faster lib0/any-encoder. This method mainly exists for legacy purposes for the v1 encoder.
|
||||
*
|
||||
* @return {any}
|
||||
*/
|
||||
readJSON () {
|
||||
return decoding.readAny(this.restDecoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
readKey () {
|
||||
const keyClock = this.keyClockDecoder.read()
|
||||
if (keyClock < this.keys.length) {
|
||||
return this.keys[keyClock]
|
||||
} else {
|
||||
const key = this.stringDecoder.read()
|
||||
this.keys.push(key)
|
||||
return key
|
||||
}
|
||||
}
|
||||
}
|
320
src/utils/UpdateEncoder.js
Normal file
320
src/utils/UpdateEncoder.js
Normal file
@ -0,0 +1,320 @@
|
||||
import * as error from 'lib0/error'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
|
||||
import {
|
||||
ID // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
export class DSEncoderV1 {
|
||||
constructor () {
|
||||
this.restEncoder = encoding.createEncoder()
|
||||
}
|
||||
|
||||
toUint8Array () {
|
||||
return encoding.toUint8Array(this.restEncoder)
|
||||
}
|
||||
|
||||
resetDsCurVal () {
|
||||
// nop
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} clock
|
||||
*/
|
||||
writeDsClock (clock) {
|
||||
encoding.writeVarUint(this.restEncoder, clock)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} len
|
||||
*/
|
||||
writeDsLen (len) {
|
||||
encoding.writeVarUint(this.restEncoder, len)
|
||||
}
|
||||
}
|
||||
|
||||
export class UpdateEncoderV1 extends DSEncoderV1 {
|
||||
/**
|
||||
* @param {ID} id
|
||||
*/
|
||||
writeLeftID (id) {
|
||||
encoding.writeVarUint(this.restEncoder, id.client)
|
||||
encoding.writeVarUint(this.restEncoder, id.clock)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ID} id
|
||||
*/
|
||||
writeRightID (id) {
|
||||
encoding.writeVarUint(this.restEncoder, id.client)
|
||||
encoding.writeVarUint(this.restEncoder, id.clock)
|
||||
}
|
||||
|
||||
/**
|
||||
* Use writeClient and writeClock instead of writeID if possible.
|
||||
* @param {number} client
|
||||
*/
|
||||
writeClient (client) {
|
||||
encoding.writeVarUint(this.restEncoder, client)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
writeInfo (info) {
|
||||
encoding.writeUint8(this.restEncoder, info)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} s
|
||||
*/
|
||||
writeString (s) {
|
||||
encoding.writeVarString(this.restEncoder, s)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {boolean} isYKey
|
||||
*/
|
||||
writeParentInfo (isYKey) {
|
||||
encoding.writeVarUint(this.restEncoder, isYKey ? 1 : 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
writeTypeRef (info) {
|
||||
encoding.writeVarUint(this.restEncoder, info)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write len of a struct - well suited for Opt RLE encoder.
|
||||
*
|
||||
* @param {number} len
|
||||
*/
|
||||
writeLen (len) {
|
||||
encoding.writeVarUint(this.restEncoder, len)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} any
|
||||
*/
|
||||
writeAny (any) {
|
||||
encoding.writeAny(this.restEncoder, any)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} buf
|
||||
*/
|
||||
writeBuf (buf) {
|
||||
encoding.writeVarUint8Array(this.restEncoder, buf)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} embed
|
||||
*/
|
||||
writeJSON (embed) {
|
||||
encoding.writeVarString(this.restEncoder, JSON.stringify(embed))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
*/
|
||||
writeKey (key) {
|
||||
encoding.writeVarString(this.restEncoder, key)
|
||||
}
|
||||
}
|
||||
|
||||
export class DSEncoderV2 {
|
||||
constructor () {
|
||||
this.restEncoder = encoding.createEncoder() // encodes all the rest / non-optimized
|
||||
this.dsCurrVal = 0
|
||||
}
|
||||
|
||||
toUint8Array () {
|
||||
return encoding.toUint8Array(this.restEncoder)
|
||||
}
|
||||
|
||||
resetDsCurVal () {
|
||||
this.dsCurrVal = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} clock
|
||||
*/
|
||||
writeDsClock (clock) {
|
||||
const diff = clock - this.dsCurrVal
|
||||
this.dsCurrVal = clock
|
||||
encoding.writeVarUint(this.restEncoder, diff)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} len
|
||||
*/
|
||||
writeDsLen (len) {
|
||||
if (len === 0) {
|
||||
error.unexpectedCase()
|
||||
}
|
||||
encoding.writeVarUint(this.restEncoder, len - 1)
|
||||
this.dsCurrVal += len
|
||||
}
|
||||
}
|
||||
|
||||
export class UpdateEncoderV2 extends DSEncoderV2 {
|
||||
constructor () {
|
||||
super()
|
||||
/**
|
||||
* @type {Map<string,number>}
|
||||
*/
|
||||
this.keyMap = new Map()
|
||||
/**
|
||||
* Refers to the next unique key-identifier to me used.
|
||||
* See writeKey method for more information.
|
||||
*
|
||||
* @type {number}
|
||||
*/
|
||||
this.keyClock = 0
|
||||
this.keyClockEncoder = new encoding.IntDiffOptRleEncoder()
|
||||
this.clientEncoder = new encoding.UintOptRleEncoder()
|
||||
this.leftClockEncoder = new encoding.IntDiffOptRleEncoder()
|
||||
this.rightClockEncoder = new encoding.IntDiffOptRleEncoder()
|
||||
this.infoEncoder = new encoding.RleEncoder(encoding.writeUint8)
|
||||
this.stringEncoder = new encoding.StringEncoder()
|
||||
this.parentInfoEncoder = new encoding.RleEncoder(encoding.writeUint8)
|
||||
this.typeRefEncoder = new encoding.UintOptRleEncoder()
|
||||
this.lenEncoder = new encoding.UintOptRleEncoder()
|
||||
}
|
||||
|
||||
toUint8Array () {
|
||||
const encoder = encoding.createEncoder()
|
||||
encoding.writeVarUint(encoder, 0) // this is a feature flag that we might use in the future
|
||||
encoding.writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array())
|
||||
encoding.writeVarUint8Array(encoder, this.clientEncoder.toUint8Array())
|
||||
encoding.writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array())
|
||||
encoding.writeVarUint8Array(encoder, this.rightClockEncoder.toUint8Array())
|
||||
encoding.writeVarUint8Array(encoder, encoding.toUint8Array(this.infoEncoder))
|
||||
encoding.writeVarUint8Array(encoder, this.stringEncoder.toUint8Array())
|
||||
encoding.writeVarUint8Array(encoder, encoding.toUint8Array(this.parentInfoEncoder))
|
||||
encoding.writeVarUint8Array(encoder, this.typeRefEncoder.toUint8Array())
|
||||
encoding.writeVarUint8Array(encoder, this.lenEncoder.toUint8Array())
|
||||
// @note The rest encoder is appended! (note the missing var)
|
||||
encoding.writeUint8Array(encoder, encoding.toUint8Array(this.restEncoder))
|
||||
return encoding.toUint8Array(encoder)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ID} id
|
||||
*/
|
||||
writeLeftID (id) {
|
||||
this.clientEncoder.write(id.client)
|
||||
this.leftClockEncoder.write(id.clock)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ID} id
|
||||
*/
|
||||
writeRightID (id) {
|
||||
this.clientEncoder.write(id.client)
|
||||
this.rightClockEncoder.write(id.clock)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} client
|
||||
*/
|
||||
writeClient (client) {
|
||||
this.clientEncoder.write(client)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
writeInfo (info) {
|
||||
this.infoEncoder.write(info)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} s
|
||||
*/
|
||||
writeString (s) {
|
||||
this.stringEncoder.write(s)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {boolean} isYKey
|
||||
*/
|
||||
writeParentInfo (isYKey) {
|
||||
this.parentInfoEncoder.write(isYKey ? 1 : 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} info An unsigned 8-bit integer
|
||||
*/
|
||||
writeTypeRef (info) {
|
||||
this.typeRefEncoder.write(info)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write len of a struct - well suited for Opt RLE encoder.
|
||||
*
|
||||
* @param {number} len
|
||||
*/
|
||||
writeLen (len) {
|
||||
this.lenEncoder.write(len)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} any
|
||||
*/
|
||||
writeAny (any) {
|
||||
encoding.writeAny(this.restEncoder, any)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} buf
|
||||
*/
|
||||
writeBuf (buf) {
|
||||
encoding.writeVarUint8Array(this.restEncoder, buf)
|
||||
}
|
||||
|
||||
/**
|
||||
* This is mainly here for legacy purposes.
|
||||
*
|
||||
* Initial we incoded objects using JSON. Now we use the much faster lib0/any-encoder. This method mainly exists for legacy purposes for the v1 encoder.
|
||||
*
|
||||
* @param {any} embed
|
||||
*/
|
||||
writeJSON (embed) {
|
||||
encoding.writeAny(this.restEncoder, embed)
|
||||
}
|
||||
|
||||
/**
|
||||
* Property keys are often reused. For example, in y-prosemirror the key `bold` might
|
||||
* occur very often. For a 3d application, the key `position` might occur very often.
|
||||
*
|
||||
* We cache these keys in a Map and refer to them via a unique number.
|
||||
*
|
||||
* @param {string} key
|
||||
*/
|
||||
writeKey (key) {
|
||||
const clock = this.keyMap.get(key)
|
||||
if (clock === undefined) {
|
||||
/**
|
||||
* @todo uncomment to introduce this feature finally
|
||||
*
|
||||
* Background. The ContentFormat object was always encoded using writeKey, but the decoder used to use readString.
|
||||
* Furthermore, I forgot to set the keyclock. So everything was working fine.
|
||||
*
|
||||
* However, this feature here is basically useless as it is not being used (it actually only consumes extra memory).
|
||||
*
|
||||
* I don't know yet how to reintroduce this feature..
|
||||
*
|
||||
* Older clients won't be able to read updates when we reintroduce this feature. So this should probably be done using a flag.
|
||||
*
|
||||
*/
|
||||
// this.keyMap.set(key, this.keyClock)
|
||||
this.keyClockEncoder.write(this.keyClock++)
|
||||
this.stringEncoder.write(key)
|
||||
} else {
|
||||
this.keyClockEncoder.write(clock)
|
||||
}
|
||||
}
|
||||
}
|
277
src/utils/YEvent.js
Normal file
277
src/utils/YEvent.js
Normal file
@ -0,0 +1,277 @@
|
||||
import {
|
||||
isDeleted,
|
||||
Item, AbstractType, Transaction, AbstractStruct // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as set from 'lib0/set'
|
||||
import * as array from 'lib0/array'
|
||||
import * as error from 'lib0/error'
|
||||
|
||||
const errorComputeChanges = 'You must not compute changes after the event-handler fired.'
|
||||
|
||||
/**
|
||||
* @template {AbstractType<any>} T
|
||||
* YEvent describes the changes on a YType.
|
||||
*/
|
||||
export class YEvent {
|
||||
/**
|
||||
* @param {T} target The changed type.
|
||||
* @param {Transaction} transaction
|
||||
*/
|
||||
constructor (target, transaction) {
|
||||
/**
|
||||
* The type on which this event was created on.
|
||||
* @type {T}
|
||||
*/
|
||||
this.target = target
|
||||
/**
|
||||
* The current target on which the observe callback is called.
|
||||
* @type {AbstractType<any>}
|
||||
*/
|
||||
this.currentTarget = target
|
||||
/**
|
||||
* The transaction that triggered this event.
|
||||
* @type {Transaction}
|
||||
*/
|
||||
this.transaction = transaction
|
||||
/**
|
||||
* @type {Object|null}
|
||||
*/
|
||||
this._changes = null
|
||||
/**
|
||||
* @type {null | Map<string, { action: 'add' | 'update' | 'delete', oldValue: any, newValue: any }>}
|
||||
*/
|
||||
this._keys = null
|
||||
/**
|
||||
* @type {null | Array<{ insert?: string | Array<any> | object | AbstractType<any>, retain?: number, delete?: number, attributes?: Object<string, any> }>}
|
||||
*/
|
||||
this._delta = null
|
||||
/**
|
||||
* @type {Array<string|number>|null}
|
||||
*/
|
||||
this._path = null
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the path from `y` to the changed type.
|
||||
*
|
||||
* @todo v14 should standardize on path: Array<{parent, index}> because that is easier to work with.
|
||||
*
|
||||
* The following property holds:
|
||||
* @example
|
||||
* let type = y
|
||||
* event.path.forEach(dir => {
|
||||
* type = type.get(dir)
|
||||
* })
|
||||
* type === event.target // => true
|
||||
*/
|
||||
get path () {
|
||||
return this._path || (this._path = getPathTo(this.currentTarget, this.target))
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a struct is deleted by this event.
|
||||
*
|
||||
* In contrast to change.deleted, this method also returns true if the struct was added and then deleted.
|
||||
*
|
||||
* @param {AbstractStruct} struct
|
||||
* @return {boolean}
|
||||
*/
|
||||
deletes (struct) {
|
||||
return isDeleted(this.transaction.deleteSet, struct.id)
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Map<string, { action: 'add' | 'update' | 'delete', oldValue: any, newValue: any }>}
|
||||
*/
|
||||
get keys () {
|
||||
if (this._keys === null) {
|
||||
if (this.transaction.doc._transactionCleanups.length === 0) {
|
||||
throw error.create(errorComputeChanges)
|
||||
}
|
||||
const keys = new Map()
|
||||
const target = this.target
|
||||
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target))
|
||||
changed.forEach(key => {
|
||||
if (key !== null) {
|
||||
const item = /** @type {Item} */ (target._map.get(key))
|
||||
/**
|
||||
* @type {'delete' | 'add' | 'update'}
|
||||
*/
|
||||
let action
|
||||
let oldValue
|
||||
if (this.adds(item)) {
|
||||
let prev = item.left
|
||||
while (prev !== null && this.adds(prev)) {
|
||||
prev = prev.left
|
||||
}
|
||||
if (this.deletes(item)) {
|
||||
if (prev !== null && this.deletes(prev)) {
|
||||
action = 'delete'
|
||||
oldValue = array.last(prev.content.getContent())
|
||||
} else {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if (prev !== null && this.deletes(prev)) {
|
||||
action = 'update'
|
||||
oldValue = array.last(prev.content.getContent())
|
||||
} else {
|
||||
action = 'add'
|
||||
oldValue = undefined
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (this.deletes(item)) {
|
||||
action = 'delete'
|
||||
oldValue = array.last(/** @type {Item} */ item.content.getContent())
|
||||
} else {
|
||||
return // nop
|
||||
}
|
||||
}
|
||||
keys.set(key, { action, oldValue })
|
||||
}
|
||||
})
|
||||
this._keys = keys
|
||||
}
|
||||
return this._keys
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a computed property. Note that this can only be safely computed during the
|
||||
* event call. Computing this property after other changes happened might result in
|
||||
* unexpected behavior (incorrect computation of deltas). A safe way to collect changes
|
||||
* is to store the `changes` or the `delta` object. Avoid storing the `transaction` object.
|
||||
*
|
||||
* @type {Array<{insert?: string | Array<any> | object | AbstractType<any>, retain?: number, delete?: number, attributes?: Object<string, any>}>}
|
||||
*/
|
||||
get delta () {
|
||||
return this.changes.delta
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a struct is added by this event.
|
||||
*
|
||||
* In contrast to change.deleted, this method also returns true if the struct was added and then deleted.
|
||||
*
|
||||
* @param {AbstractStruct} struct
|
||||
* @return {boolean}
|
||||
*/
|
||||
adds (struct) {
|
||||
return struct.id.clock >= (this.transaction.beforeState.get(struct.id.client) || 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a computed property. Note that this can only be safely computed during the
|
||||
* event call. Computing this property after other changes happened might result in
|
||||
* unexpected behavior (incorrect computation of deltas). A safe way to collect changes
|
||||
* is to store the `changes` or the `delta` object. Avoid storing the `transaction` object.
|
||||
*
|
||||
* @type {{added:Set<Item>,deleted:Set<Item>,keys:Map<string,{action:'add'|'update'|'delete',oldValue:any}>,delta:Array<{insert?:Array<any>|string, delete?:number, retain?:number}>}}
|
||||
*/
|
||||
get changes () {
|
||||
let changes = this._changes
|
||||
if (changes === null) {
|
||||
if (this.transaction.doc._transactionCleanups.length === 0) {
|
||||
throw error.create(errorComputeChanges)
|
||||
}
|
||||
const target = this.target
|
||||
const added = set.create()
|
||||
const deleted = set.create()
|
||||
/**
|
||||
* @type {Array<{insert:Array<any>}|{delete:number}|{retain:number}>}
|
||||
*/
|
||||
const delta = []
|
||||
changes = {
|
||||
added,
|
||||
deleted,
|
||||
delta,
|
||||
keys: this.keys
|
||||
}
|
||||
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target))
|
||||
if (changed.has(null)) {
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let lastOp = null
|
||||
const packOp = () => {
|
||||
if (lastOp) {
|
||||
delta.push(lastOp)
|
||||
}
|
||||
}
|
||||
for (let item = target._start; item !== null; item = item.right) {
|
||||
if (item.deleted) {
|
||||
if (this.deletes(item) && !this.adds(item)) {
|
||||
if (lastOp === null || lastOp.delete === undefined) {
|
||||
packOp()
|
||||
lastOp = { delete: 0 }
|
||||
}
|
||||
lastOp.delete += item.length
|
||||
deleted.add(item)
|
||||
} // else nop
|
||||
} else {
|
||||
if (this.adds(item)) {
|
||||
if (lastOp === null || lastOp.insert === undefined) {
|
||||
packOp()
|
||||
lastOp = { insert: [] }
|
||||
}
|
||||
lastOp.insert = lastOp.insert.concat(item.content.getContent())
|
||||
added.add(item)
|
||||
} else {
|
||||
if (lastOp === null || lastOp.retain === undefined) {
|
||||
packOp()
|
||||
lastOp = { retain: 0 }
|
||||
}
|
||||
lastOp.retain += item.length
|
||||
}
|
||||
}
|
||||
}
|
||||
if (lastOp !== null && lastOp.retain === undefined) {
|
||||
packOp()
|
||||
}
|
||||
}
|
||||
this._changes = changes
|
||||
}
|
||||
return /** @type {any} */ (changes)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the path from this type to the specified target.
|
||||
*
|
||||
* @example
|
||||
* // `child` should be accessible via `type.get(path[0]).get(path[1])..`
|
||||
* const path = type.getPathTo(child)
|
||||
* // assuming `type instanceof YArray`
|
||||
* console.log(path) // might look like => [2, 'key1']
|
||||
* child === type.get(path[0]).get(path[1])
|
||||
*
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {AbstractType<any>} child target
|
||||
* @return {Array<string|number>} Path to the target
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
const getPathTo = (parent, child) => {
|
||||
const path = []
|
||||
while (child._item !== null && child !== parent) {
|
||||
if (child._item.parentSub !== null) {
|
||||
// parent is map-ish
|
||||
path.unshift(child._item.parentSub)
|
||||
} else {
|
||||
// parent is array-ish
|
||||
let i = 0
|
||||
let c = /** @type {AbstractType<any>} */ (child._item.parent)._start
|
||||
while (c !== child._item && c !== null) {
|
||||
if (!c.deleted && c.countable) {
|
||||
i += c.length
|
||||
}
|
||||
c = c.right
|
||||
}
|
||||
path.unshift(i)
|
||||
}
|
||||
child = /** @type {AbstractType<any>} */ (child._item.parent)
|
||||
}
|
||||
return path
|
||||
}
|
644
src/utils/encoding.js
Normal file
644
src/utils/encoding.js
Normal file
@ -0,0 +1,644 @@
|
||||
/**
|
||||
* @module encoding
|
||||
*/
|
||||
/*
|
||||
* We use the first five bits in the info flag for determining the type of the struct.
|
||||
*
|
||||
* 0: GC
|
||||
* 1: Item with Deleted content
|
||||
* 2: Item with JSON content
|
||||
* 3: Item with Binary content
|
||||
* 4: Item with String content
|
||||
* 5: Item with Embed content (for richtext content)
|
||||
* 6: Item with Format content (a formatting marker for richtext content)
|
||||
* 7: Item with Type
|
||||
*/
|
||||
|
||||
import {
|
||||
findIndexSS,
|
||||
getState,
|
||||
createID,
|
||||
getStateVector,
|
||||
readAndApplyDeleteSet,
|
||||
writeDeleteSet,
|
||||
createDeleteSetFromStructStore,
|
||||
transact,
|
||||
readItemContent,
|
||||
UpdateDecoderV1,
|
||||
UpdateDecoderV2,
|
||||
UpdateEncoderV1,
|
||||
UpdateEncoderV2,
|
||||
DSEncoderV2,
|
||||
DSDecoderV1,
|
||||
DSEncoderV1,
|
||||
mergeUpdates,
|
||||
mergeUpdatesV2,
|
||||
Skip,
|
||||
diffUpdateV2,
|
||||
convertUpdateFormatV2ToV1,
|
||||
DSDecoderV2, Doc, Transaction, GC, Item, StructStore // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as binary from 'lib0/binary'
|
||||
import * as map from 'lib0/map'
|
||||
import * as math from 'lib0/math'
|
||||
import * as array from 'lib0/array'
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {Array<GC|Item>} structs All structs by `client`
|
||||
* @param {number} client
|
||||
* @param {number} clock write structs starting with `ID(client,clock)`
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
const writeStructs = (encoder, structs, client, clock) => {
|
||||
// write first id
|
||||
clock = math.max(clock, structs[0].id.clock) // make sure the first id exists
|
||||
const startNewStructs = findIndexSS(structs, clock)
|
||||
// write # encoded structs
|
||||
encoding.writeVarUint(encoder.restEncoder, structs.length - startNewStructs)
|
||||
encoder.writeClient(client)
|
||||
encoding.writeVarUint(encoder.restEncoder, clock)
|
||||
const firstStruct = structs[startNewStructs]
|
||||
// write first struct with an offset
|
||||
firstStruct.write(encoder, clock - firstStruct.id.clock)
|
||||
for (let i = startNewStructs + 1; i < structs.length; i++) {
|
||||
structs[i].write(encoder, 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {StructStore} store
|
||||
* @param {Map<number,number>} _sm
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const writeClientsStructs = (encoder, store, _sm) => {
|
||||
// we filter all valid _sm entries into sm
|
||||
const sm = new Map()
|
||||
_sm.forEach((clock, client) => {
|
||||
// only write if new structs are available
|
||||
if (getState(store, client) > clock) {
|
||||
sm.set(client, clock)
|
||||
}
|
||||
})
|
||||
getStateVector(store).forEach((_clock, client) => {
|
||||
if (!_sm.has(client)) {
|
||||
sm.set(client, 0)
|
||||
}
|
||||
})
|
||||
// write # states that were updated
|
||||
encoding.writeVarUint(encoder.restEncoder, sm.size)
|
||||
// Write items with higher client ids first
|
||||
// This heavily improves the conflict algorithm.
|
||||
array.from(sm.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
||||
writeStructs(encoder, /** @type {Array<GC|Item>} */ (store.clients.get(client)), client, clock)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from.
|
||||
* @param {Doc} doc
|
||||
* @return {Map<number, { i: number, refs: Array<Item | GC> }>}
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const readClientsStructRefs = (decoder, doc) => {
|
||||
/**
|
||||
* @type {Map<number, { i: number, refs: Array<Item | GC> }>}
|
||||
*/
|
||||
const clientRefs = map.create()
|
||||
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
||||
for (let i = 0; i < numOfStateUpdates; i++) {
|
||||
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
||||
/**
|
||||
* @type {Array<GC|Item>}
|
||||
*/
|
||||
const refs = new Array(numberOfStructs)
|
||||
const client = decoder.readClient()
|
||||
let clock = decoding.readVarUint(decoder.restDecoder)
|
||||
// const start = performance.now()
|
||||
clientRefs.set(client, { i: 0, refs })
|
||||
for (let i = 0; i < numberOfStructs; i++) {
|
||||
const info = decoder.readInfo()
|
||||
switch (binary.BITS5 & info) {
|
||||
case 0: { // GC
|
||||
const len = decoder.readLen()
|
||||
refs[i] = new GC(createID(client, clock), len)
|
||||
clock += len
|
||||
break
|
||||
}
|
||||
case 10: { // Skip Struct (nothing to apply)
|
||||
// @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing.
|
||||
const len = decoding.readVarUint(decoder.restDecoder)
|
||||
refs[i] = new Skip(createID(client, clock), len)
|
||||
clock += len
|
||||
break
|
||||
}
|
||||
default: { // Item with content
|
||||
/**
|
||||
* The optimized implementation doesn't use any variables because inlining variables is faster.
|
||||
* Below a non-optimized version is shown that implements the basic algorithm with
|
||||
* a few comments
|
||||
*/
|
||||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
||||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||||
// and we read the next string as parentYKey.
|
||||
// It indicates how we store/retrieve parent from `y.share`
|
||||
// @type {string|null}
|
||||
const struct = new Item(
|
||||
createID(client, clock),
|
||||
null, // left
|
||||
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
|
||||
null, // right
|
||||
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
|
||||
cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent
|
||||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
||||
readItemContent(decoder, info) // item content
|
||||
)
|
||||
/* A non-optimized implementation of the above algorithm:
|
||||
|
||||
// The item that was originally to the left of this item.
|
||||
const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null
|
||||
// The item that was originally to the right of this item.
|
||||
const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null
|
||||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
||||
const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false
|
||||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||||
// and we read the next string as parentYKey.
|
||||
// It indicates how we store/retrieve parent from `y.share`
|
||||
// @type {string|null}
|
||||
const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null
|
||||
|
||||
const struct = new Item(
|
||||
createID(client, clock),
|
||||
null, // left
|
||||
origin, // origin
|
||||
null, // right
|
||||
rightOrigin, // right origin
|
||||
cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent
|
||||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
||||
readItemContent(decoder, info) // item content
|
||||
)
|
||||
*/
|
||||
refs[i] = struct
|
||||
clock += struct.length
|
||||
}
|
||||
}
|
||||
}
|
||||
// console.log('time to read: ', performance.now() - start) // @todo remove
|
||||
}
|
||||
return clientRefs
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume computing structs generated by struct readers.
|
||||
*
|
||||
* While there is something to do, we integrate structs in this order
|
||||
* 1. top element on stack, if stack is not empty
|
||||
* 2. next element from current struct reader (if empty, use next struct reader)
|
||||
*
|
||||
* If struct causally depends on another struct (ref.missing), we put next reader of
|
||||
* `ref.id.client` on top of stack.
|
||||
*
|
||||
* At some point we find a struct that has no causal dependencies,
|
||||
* then we start emptying the stack.
|
||||
*
|
||||
* It is not possible to have circles: i.e. struct1 (from client1) depends on struct2 (from client2)
|
||||
* depends on struct3 (from client1). Therefore the max stack size is equal to `structReaders.length`.
|
||||
*
|
||||
* This method is implemented in a way so that we can resume computation if this update
|
||||
* causally depends on another update.
|
||||
*
|
||||
* @param {Transaction} transaction
|
||||
* @param {StructStore} store
|
||||
* @param {Map<number, { i: number, refs: (GC | Item)[] }>} clientsStructRefs
|
||||
* @return { null | { update: Uint8Array, missing: Map<number,number> } }
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
const integrateStructs = (transaction, store, clientsStructRefs) => {
|
||||
/**
|
||||
* @type {Array<Item | GC>}
|
||||
*/
|
||||
const stack = []
|
||||
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
|
||||
let clientsStructRefsIds = array.from(clientsStructRefs.keys()).sort((a, b) => a - b)
|
||||
if (clientsStructRefsIds.length === 0) {
|
||||
return null
|
||||
}
|
||||
const getNextStructTarget = () => {
|
||||
if (clientsStructRefsIds.length === 0) {
|
||||
return null
|
||||
}
|
||||
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
||||
while (nextStructsTarget.refs.length === nextStructsTarget.i) {
|
||||
clientsStructRefsIds.pop()
|
||||
if (clientsStructRefsIds.length > 0) {
|
||||
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
return nextStructsTarget
|
||||
}
|
||||
let curStructsTarget = getNextStructTarget()
|
||||
if (curStructsTarget === null) {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {StructStore}
|
||||
*/
|
||||
const restStructs = new StructStore()
|
||||
const missingSV = new Map()
|
||||
/**
|
||||
* @param {number} client
|
||||
* @param {number} clock
|
||||
*/
|
||||
const updateMissingSv = (client, clock) => {
|
||||
const mclock = missingSV.get(client)
|
||||
if (mclock == null || mclock > clock) {
|
||||
missingSV.set(client, clock)
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @type {GC|Item}
|
||||
*/
|
||||
let stackHead = /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++]
|
||||
// caching the state because it is used very often
|
||||
const state = new Map()
|
||||
|
||||
const addStackToRestSS = () => {
|
||||
for (const item of stack) {
|
||||
const client = item.id.client
|
||||
const inapplicableItems = clientsStructRefs.get(client)
|
||||
if (inapplicableItems) {
|
||||
// decrement because we weren't able to apply previous operation
|
||||
inapplicableItems.i--
|
||||
restStructs.clients.set(client, inapplicableItems.refs.slice(inapplicableItems.i))
|
||||
clientsStructRefs.delete(client)
|
||||
inapplicableItems.i = 0
|
||||
inapplicableItems.refs = []
|
||||
} else {
|
||||
// item was the last item on clientsStructRefs and the field was already cleared. Add item to restStructs and continue
|
||||
restStructs.clients.set(client, [item])
|
||||
}
|
||||
// remove client from clientsStructRefsIds to prevent users from applying the same update again
|
||||
clientsStructRefsIds = clientsStructRefsIds.filter(c => c !== client)
|
||||
}
|
||||
stack.length = 0
|
||||
}
|
||||
|
||||
// iterate over all struct readers until we are done
|
||||
while (true) {
|
||||
if (stackHead.constructor !== Skip) {
|
||||
const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client))
|
||||
const offset = localClock - stackHead.id.clock
|
||||
if (offset < 0) {
|
||||
// update from the same client is missing
|
||||
stack.push(stackHead)
|
||||
updateMissingSv(stackHead.id.client, stackHead.id.clock - 1)
|
||||
// hid a dead wall, add all items from stack to restSS
|
||||
addStackToRestSS()
|
||||
} else {
|
||||
const missing = stackHead.getMissing(transaction, store)
|
||||
if (missing !== null) {
|
||||
stack.push(stackHead)
|
||||
// get the struct reader that has the missing struct
|
||||
/**
|
||||
* @type {{ refs: Array<GC|Item>, i: number }}
|
||||
*/
|
||||
const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 }
|
||||
if (structRefs.refs.length === structRefs.i) {
|
||||
// This update message causally depends on another update message that doesn't exist yet
|
||||
updateMissingSv(/** @type {number} */ (missing), getState(store, missing))
|
||||
addStackToRestSS()
|
||||
} else {
|
||||
stackHead = structRefs.refs[structRefs.i++]
|
||||
continue
|
||||
}
|
||||
} else if (offset === 0 || offset < stackHead.length) {
|
||||
// all fine, apply the stackhead
|
||||
stackHead.integrate(transaction, offset)
|
||||
state.set(stackHead.id.client, stackHead.id.clock + stackHead.length)
|
||||
}
|
||||
}
|
||||
}
|
||||
// iterate to next stackHead
|
||||
if (stack.length > 0) {
|
||||
stackHead = /** @type {GC|Item} */ (stack.pop())
|
||||
} else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) {
|
||||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++])
|
||||
} else {
|
||||
curStructsTarget = getNextStructTarget()
|
||||
if (curStructsTarget === null) {
|
||||
// we are done!
|
||||
break
|
||||
} else {
|
||||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++])
|
||||
}
|
||||
}
|
||||
}
|
||||
if (restStructs.clients.size > 0) {
|
||||
const encoder = new UpdateEncoderV2()
|
||||
writeClientsStructs(encoder, restStructs, new Map())
|
||||
// write empty deleteset
|
||||
// writeDeleteSet(encoder, new DeleteSet())
|
||||
encoding.writeVarUint(encoder.restEncoder, 0) // => no need for an extra function call, just write 0 deletes
|
||||
return { missing: missingSV, update: encoder.toUint8Array() }
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {Transaction} transaction
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState)
|
||||
|
||||
/**
|
||||
* Read and apply a document update.
|
||||
*
|
||||
* This function has the same effect as `applyUpdate` but accepts a decoder.
|
||||
*
|
||||
* @param {decoding.Decoder} decoder
|
||||
* @param {Doc} ydoc
|
||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} [structDecoder]
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) =>
|
||||
transact(ydoc, transaction => {
|
||||
// force that transaction.local is set to non-local
|
||||
transaction.local = false
|
||||
let retry = false
|
||||
const doc = transaction.doc
|
||||
const store = doc.store
|
||||
// let start = performance.now()
|
||||
const ss = readClientsStructRefs(structDecoder, doc)
|
||||
// console.log('time to read structs: ', performance.now() - start) // @todo remove
|
||||
// start = performance.now()
|
||||
// console.log('time to merge: ', performance.now() - start) // @todo remove
|
||||
// start = performance.now()
|
||||
const restStructs = integrateStructs(transaction, store, ss)
|
||||
const pending = store.pendingStructs
|
||||
if (pending) {
|
||||
// check if we can apply something
|
||||
for (const [client, clock] of pending.missing) {
|
||||
if (clock < getState(store, client)) {
|
||||
retry = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (restStructs) {
|
||||
// merge restStructs into store.pending
|
||||
for (const [client, clock] of restStructs.missing) {
|
||||
const mclock = pending.missing.get(client)
|
||||
if (mclock == null || mclock > clock) {
|
||||
pending.missing.set(client, clock)
|
||||
}
|
||||
}
|
||||
pending.update = mergeUpdatesV2([pending.update, restStructs.update])
|
||||
}
|
||||
} else {
|
||||
store.pendingStructs = restStructs
|
||||
}
|
||||
// console.log('time to integrate: ', performance.now() - start) // @todo remove
|
||||
// start = performance.now()
|
||||
const dsRest = readAndApplyDeleteSet(structDecoder, transaction, store)
|
||||
if (store.pendingDs) {
|
||||
// @todo we could make a lower-bound state-vector check as we do above
|
||||
const pendingDSUpdate = new UpdateDecoderV2(decoding.createDecoder(store.pendingDs))
|
||||
decoding.readVarUint(pendingDSUpdate.restDecoder) // read 0 structs, because we only encode deletes in pendingdsupdate
|
||||
const dsRest2 = readAndApplyDeleteSet(pendingDSUpdate, transaction, store)
|
||||
if (dsRest && dsRest2) {
|
||||
// case 1: ds1 != null && ds2 != null
|
||||
store.pendingDs = mergeUpdatesV2([dsRest, dsRest2])
|
||||
} else {
|
||||
// case 2: ds1 != null
|
||||
// case 3: ds2 != null
|
||||
// case 4: ds1 == null && ds2 == null
|
||||
store.pendingDs = dsRest || dsRest2
|
||||
}
|
||||
} else {
|
||||
// Either dsRest == null && pendingDs == null OR dsRest != null
|
||||
store.pendingDs = dsRest
|
||||
}
|
||||
// console.log('time to cleanup: ', performance.now() - start) // @todo remove
|
||||
// start = performance.now()
|
||||
|
||||
// console.log('time to resume delete readers: ', performance.now() - start) // @todo remove
|
||||
// start = performance.now()
|
||||
if (retry) {
|
||||
const update = /** @type {{update: Uint8Array}} */ (store.pendingStructs).update
|
||||
store.pendingStructs = null
|
||||
applyUpdateV2(transaction.doc, update)
|
||||
}
|
||||
}, transactionOrigin, false)
|
||||
|
||||
/**
|
||||
* Read and apply a document update.
|
||||
*
|
||||
* This function has the same effect as `applyUpdate` but accepts a decoder.
|
||||
*
|
||||
* @param {decoding.Decoder} decoder
|
||||
* @param {Doc} ydoc
|
||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new UpdateDecoderV1(decoder))
|
||||
|
||||
/**
|
||||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
||||
*
|
||||
* This function has the same effect as `readUpdate` but accepts an Uint8Array instead of a Decoder.
|
||||
*
|
||||
* @param {Doc} ydoc
|
||||
* @param {Uint8Array} update
|
||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = UpdateDecoderV2) => {
|
||||
const decoder = decoding.createDecoder(update)
|
||||
readUpdateV2(decoder, ydoc, transactionOrigin, new YDecoder(decoder))
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
||||
*
|
||||
* This function has the same effect as `readUpdate` but accepts an Uint8Array instead of a Decoder.
|
||||
*
|
||||
* @param {Doc} ydoc
|
||||
* @param {Uint8Array} update
|
||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, UpdateDecoderV1)
|
||||
|
||||
/**
|
||||
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
||||
* only write the operations that are missing.
|
||||
*
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
* @param {Doc} doc
|
||||
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map()) => {
|
||||
writeClientsStructs(encoder, doc.store, targetStateVector)
|
||||
writeDeleteSet(encoder, createDeleteSetFromStructStore(doc.store))
|
||||
}
|
||||
|
||||
/**
|
||||
* Write all the document as a single update message that can be applied on the remote document. If you specify the state of the remote client (`targetState`) it will
|
||||
* only write the operations that are missing.
|
||||
*
|
||||
* Use `writeStateAsUpdate` instead if you are working with lib0/encoding.js#Encoder
|
||||
*
|
||||
* @param {Doc} doc
|
||||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} [encoder]
|
||||
* @return {Uint8Array}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector = new Uint8Array([0]), encoder = new UpdateEncoderV2()) => {
|
||||
const targetStateVector = decodeStateVector(encodedTargetStateVector)
|
||||
writeStateAsUpdate(encoder, doc, targetStateVector)
|
||||
const updates = [encoder.toUint8Array()]
|
||||
// also add the pending updates (if there are any)
|
||||
if (doc.store.pendingDs) {
|
||||
updates.push(doc.store.pendingDs)
|
||||
}
|
||||
if (doc.store.pendingStructs) {
|
||||
updates.push(diffUpdateV2(doc.store.pendingStructs.update, encodedTargetStateVector))
|
||||
}
|
||||
if (updates.length > 1) {
|
||||
if (encoder.constructor === UpdateEncoderV1) {
|
||||
return mergeUpdates(updates.map((update, i) => i === 0 ? update : convertUpdateFormatV2ToV1(update)))
|
||||
} else if (encoder.constructor === UpdateEncoderV2) {
|
||||
return mergeUpdatesV2(updates)
|
||||
}
|
||||
}
|
||||
return updates[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* Write all the document as a single update message that can be applied on the remote document. If you specify the state of the remote client (`targetState`) it will
|
||||
* only write the operations that are missing.
|
||||
*
|
||||
* Use `writeStateAsUpdate` instead if you are working with lib0/encoding.js#Encoder
|
||||
*
|
||||
* @param {Doc} doc
|
||||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||
* @return {Uint8Array}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new UpdateEncoderV1())
|
||||
|
||||
/**
|
||||
* Read state vector from Decoder and return as Map
|
||||
*
|
||||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
||||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const readStateVector = decoder => {
|
||||
const ss = new Map()
|
||||
const ssLength = decoding.readVarUint(decoder.restDecoder)
|
||||
for (let i = 0; i < ssLength; i++) {
|
||||
const client = decoding.readVarUint(decoder.restDecoder)
|
||||
const clock = decoding.readVarUint(decoder.restDecoder)
|
||||
ss.set(client, clock)
|
||||
}
|
||||
return ss
|
||||
}
|
||||
|
||||
/**
|
||||
* Read decodedState and return State as Map.
|
||||
*
|
||||
* @param {Uint8Array} decodedState
|
||||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
// export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState)))
|
||||
|
||||
/**
|
||||
* Read decodedState and return State as Map.
|
||||
*
|
||||
* @param {Uint8Array} decodedState
|
||||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const decodeStateVector = decodedState => readStateVector(new DSDecoderV1(decoding.createDecoder(decodedState)))
|
||||
|
||||
/**
|
||||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||||
* @param {Map<number,number>} sv
|
||||
* @function
|
||||
*/
|
||||
export const writeStateVector = (encoder, sv) => {
|
||||
encoding.writeVarUint(encoder.restEncoder, sv.size)
|
||||
array.from(sv.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
||||
encoding.writeVarUint(encoder.restEncoder, client) // @todo use a special client decoder that is based on mapping
|
||||
encoding.writeVarUint(encoder.restEncoder, clock)
|
||||
})
|
||||
return encoder
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
||||
* @param {Doc} doc
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encoder, getStateVector(doc.store))
|
||||
|
||||
/**
|
||||
* Encode State as Uint8Array.
|
||||
*
|
||||
* @param {Doc|Map<number,number>} doc
|
||||
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
||||
* @return {Uint8Array}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => {
|
||||
if (doc instanceof Map) {
|
||||
writeStateVector(encoder, doc)
|
||||
} else {
|
||||
writeDocumentStateVector(encoder, doc)
|
||||
}
|
||||
return encoder.toUint8Array()
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode State as Uint8Array.
|
||||
*
|
||||
* @param {Doc|Map<number,number>} doc
|
||||
* @return {Uint8Array}
|
||||
*
|
||||
* @function
|
||||
*/
|
||||
export const encodeStateVector = doc => encodeStateVectorV2(doc, new DSEncoderV1())
|
21
src/utils/isParentOf.js
Normal file
21
src/utils/isParentOf.js
Normal file
@ -0,0 +1,21 @@
|
||||
import { AbstractType, Item } from '../internals.js' // eslint-disable-line
|
||||
|
||||
/**
|
||||
* Check if `parent` is a parent of `child`.
|
||||
*
|
||||
* @param {AbstractType<any>} parent
|
||||
* @param {Item|null} child
|
||||
* @return {Boolean} Whether `parent` is a parent of `child`.
|
||||
*
|
||||
* @private
|
||||
* @function
|
||||
*/
|
||||
export const isParentOf = (parent, child) => {
|
||||
while (child !== null) {
|
||||
if (child.parent === parent) {
|
||||
return true
|
||||
}
|
||||
child = /** @type {AbstractType<any>} */ (child.parent)._item
|
||||
}
|
||||
return false
|
||||
}
|
21
src/utils/logging.js
Normal file
21
src/utils/logging.js
Normal file
@ -0,0 +1,21 @@
|
||||
import {
|
||||
AbstractType // eslint-disable-line
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* Convenient helper to log type information.
|
||||
*
|
||||
* Do not use in productive systems as the output can be immense!
|
||||
*
|
||||
* @param {AbstractType<any>} type
|
||||
*/
|
||||
export const logType = type => {
|
||||
const res = []
|
||||
let n = type._start
|
||||
while (n) {
|
||||
res.push(n)
|
||||
n = n.right
|
||||
}
|
||||
console.log('Children: ', res)
|
||||
console.log('Children content: ', res.filter(m => !m.deleted).map(m => m.content))
|
||||
}
|
722
src/utils/updates.js
Normal file
722
src/utils/updates.js
Normal file
@ -0,0 +1,722 @@
|
||||
import * as binary from 'lib0/binary'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as error from 'lib0/error'
|
||||
import * as f from 'lib0/function'
|
||||
import * as logging from 'lib0/logging'
|
||||
import * as map from 'lib0/map'
|
||||
import * as math from 'lib0/math'
|
||||
import * as string from 'lib0/string'
|
||||
|
||||
import {
|
||||
ContentAny,
|
||||
ContentBinary,
|
||||
ContentDeleted,
|
||||
ContentDoc,
|
||||
ContentEmbed,
|
||||
ContentFormat,
|
||||
ContentJSON,
|
||||
ContentString,
|
||||
ContentType,
|
||||
createID,
|
||||
decodeStateVector,
|
||||
DSEncoderV1,
|
||||
DSEncoderV2,
|
||||
GC,
|
||||
Item,
|
||||
mergeDeleteSets,
|
||||
readDeleteSet,
|
||||
readItemContent,
|
||||
Skip,
|
||||
UpdateDecoderV1,
|
||||
UpdateDecoderV2,
|
||||
UpdateEncoderV1,
|
||||
UpdateEncoderV2,
|
||||
writeDeleteSet,
|
||||
YXmlElement,
|
||||
YXmlHook
|
||||
} from '../internals.js'
|
||||
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
*/
|
||||
function * lazyStructReaderGenerator (decoder) {
|
||||
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
||||
for (let i = 0; i < numOfStateUpdates; i++) {
|
||||
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
||||
const client = decoder.readClient()
|
||||
let clock = decoding.readVarUint(decoder.restDecoder)
|
||||
for (let i = 0; i < numberOfStructs; i++) {
|
||||
const info = decoder.readInfo()
|
||||
// @todo use switch instead of ifs
|
||||
if (info === 10) {
|
||||
const len = decoding.readVarUint(decoder.restDecoder)
|
||||
yield new Skip(createID(client, clock), len)
|
||||
clock += len
|
||||
} else if ((binary.BITS5 & info) !== 0) {
|
||||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
||||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||||
// and we read the next string as parentYKey.
|
||||
// It indicates how we store/retrieve parent from `y.share`
|
||||
// @type {string|null}
|
||||
const struct = new Item(
|
||||
createID(client, clock),
|
||||
null, // left
|
||||
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
|
||||
null, // right
|
||||
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
|
||||
// @ts-ignore Force writing a string here.
|
||||
cantCopyParentInfo ? (decoder.readParentInfo() ? decoder.readString() : decoder.readLeftID()) : null, // parent
|
||||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
||||
readItemContent(decoder, info) // item content
|
||||
)
|
||||
yield struct
|
||||
clock += struct.length
|
||||
} else {
|
||||
const len = decoder.readLen()
|
||||
yield new GC(createID(client, clock), len)
|
||||
clock += len
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class LazyStructReader {
|
||||
/**
|
||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
||||
* @param {boolean} filterSkips
|
||||
*/
|
||||
constructor (decoder, filterSkips) {
|
||||
this.gen = lazyStructReaderGenerator(decoder)
|
||||
/**
|
||||
* @type {null | Item | Skip | GC}
|
||||
*/
|
||||
this.curr = null
|
||||
this.done = false
|
||||
this.filterSkips = filterSkips
|
||||
this.next()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {Item | GC | Skip |null}
|
||||
*/
|
||||
next () {
|
||||
// ignore "Skip" structs
|
||||
do {
|
||||
this.curr = this.gen.next().value || null
|
||||
} while (this.filterSkips && this.curr !== null && this.curr.constructor === Skip)
|
||||
return this.curr
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
*
|
||||
*/
|
||||
export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
||||
*
|
||||
*/
|
||||
export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
|
||||
const structs = []
|
||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
||||
structs.push(curr)
|
||||
}
|
||||
logging.print('Structs: ', structs)
|
||||
const ds = readDeleteSet(updateDecoder)
|
||||
logging.print('DeleteSet: ', ds)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
*
|
||||
*/
|
||||
export const decodeUpdate = (update) => decodeUpdateV2(update, UpdateDecoderV1)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
||||
*
|
||||
*/
|
||||
export const decodeUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
|
||||
const structs = []
|
||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
||||
structs.push(curr)
|
||||
}
|
||||
return {
|
||||
structs,
|
||||
ds: readDeleteSet(updateDecoder)
|
||||
}
|
||||
}
|
||||
|
||||
export class LazyStructWriter {
|
||||
/**
|
||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
||||
*/
|
||||
constructor (encoder) {
|
||||
this.currClient = 0
|
||||
this.startClock = 0
|
||||
this.written = 0
|
||||
this.encoder = encoder
|
||||
/**
|
||||
* We want to write operations lazily, but also we need to know beforehand how many operations we want to write for each client.
|
||||
*
|
||||
* This kind of meta-information (#clients, #structs-per-client-written) is written to the restEncoder.
|
||||
*
|
||||
* We fragment the restEncoder and store a slice of it per-client until we know how many clients there are.
|
||||
* When we flush (toUint8Array) we write the restEncoder using the fragments and the meta-information.
|
||||
*
|
||||
* @type {Array<{ written: number, restEncoder: Uint8Array }>}
|
||||
*/
|
||||
this.clientStructs = []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Array<Uint8Array>} updates
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {typeof DSEncoderV1 | typeof DSEncoderV2} YEncoder
|
||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YDecoder = UpdateDecoderV2) => {
|
||||
const encoder = new YEncoder()
|
||||
const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false)
|
||||
let curr = updateDecoder.curr
|
||||
if (curr !== null) {
|
||||
let size = 0
|
||||
let currClient = curr.id.client
|
||||
let stopCounting = curr.id.clock !== 0 // must start at 0
|
||||
let currClock = stopCounting ? 0 : curr.id.clock + curr.length
|
||||
for (; curr !== null; curr = updateDecoder.next()) {
|
||||
if (currClient !== curr.id.client) {
|
||||
if (currClock !== 0) {
|
||||
size++
|
||||
// We found a new client
|
||||
// write what we have to the encoder
|
||||
encoding.writeVarUint(encoder.restEncoder, currClient)
|
||||
encoding.writeVarUint(encoder.restEncoder, currClock)
|
||||
}
|
||||
currClient = curr.id.client
|
||||
currClock = 0
|
||||
stopCounting = curr.id.clock !== 0
|
||||
}
|
||||
// we ignore skips
|
||||
if (curr.constructor === Skip) {
|
||||
stopCounting = true
|
||||
}
|
||||
if (!stopCounting) {
|
||||
currClock = curr.id.clock + curr.length
|
||||
}
|
||||
}
|
||||
// write what we have
|
||||
if (currClock !== 0) {
|
||||
size++
|
||||
encoding.writeVarUint(encoder.restEncoder, currClient)
|
||||
encoding.writeVarUint(encoder.restEncoder, currClock)
|
||||
}
|
||||
// prepend the size of the state vector
|
||||
const enc = encoding.createEncoder()
|
||||
encoding.writeVarUint(enc, size)
|
||||
encoding.writeBinaryEncoder(enc, encoder.restEncoder)
|
||||
encoder.restEncoder = enc
|
||||
return encoder.toUint8Array()
|
||||
} else {
|
||||
encoding.writeVarUint(encoder.restEncoder, 0)
|
||||
return encoder.toUint8Array()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV1)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder
|
||||
* @return {{ from: Map<number,number>, to: Map<number,number> }}
|
||||
*/
|
||||
export const parseUpdateMetaV2 = (update, YDecoder = UpdateDecoderV2) => {
|
||||
/**
|
||||
* @type {Map<number, number>}
|
||||
*/
|
||||
const from = new Map()
|
||||
/**
|
||||
* @type {Map<number, number>}
|
||||
*/
|
||||
const to = new Map()
|
||||
const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false)
|
||||
let curr = updateDecoder.curr
|
||||
if (curr !== null) {
|
||||
let currClient = curr.id.client
|
||||
let currClock = curr.id.clock
|
||||
// write the beginning to `from`
|
||||
from.set(currClient, currClock)
|
||||
for (; curr !== null; curr = updateDecoder.next()) {
|
||||
if (currClient !== curr.id.client) {
|
||||
// We found a new client
|
||||
// write the end to `to`
|
||||
to.set(currClient, currClock)
|
||||
// write the beginning to `from`
|
||||
from.set(curr.id.client, curr.id.clock)
|
||||
// update currClient
|
||||
currClient = curr.id.client
|
||||
}
|
||||
currClock = curr.id.clock + curr.length
|
||||
}
|
||||
// write the end to `to`
|
||||
to.set(currClient, currClock)
|
||||
}
|
||||
return { from, to }
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @return {{ from: Map<number,number>, to: Map<number,number> }}
|
||||
*/
|
||||
export const parseUpdateMeta = update => parseUpdateMetaV2(update, UpdateDecoderV1)
|
||||
|
||||
/**
|
||||
* This method is intended to slice any kind of struct and retrieve the right part.
|
||||
* It does not handle side-effects, so it should only be used by the lazy-encoder.
|
||||
*
|
||||
* @param {Item | GC | Skip} left
|
||||
* @param {number} diff
|
||||
* @return {Item | GC}
|
||||
*/
|
||||
const sliceStruct = (left, diff) => {
|
||||
if (left.constructor === GC) {
|
||||
const { client, clock } = left.id
|
||||
return new GC(createID(client, clock + diff), left.length - diff)
|
||||
} else if (left.constructor === Skip) {
|
||||
const { client, clock } = left.id
|
||||
return new Skip(createID(client, clock + diff), left.length - diff)
|
||||
} else {
|
||||
const leftItem = /** @type {Item} */ (left)
|
||||
const { client, clock } = leftItem.id
|
||||
return new Item(
|
||||
createID(client, clock + diff),
|
||||
null,
|
||||
createID(client, clock + diff - 1),
|
||||
null,
|
||||
leftItem.rightOrigin,
|
||||
leftItem.parent,
|
||||
leftItem.parentSub,
|
||||
leftItem.content.splice(diff)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* This function works similarly to `readUpdateV2`.
|
||||
*
|
||||
* @param {Array<Uint8Array>} updates
|
||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||||
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
||||
* @return {Uint8Array}
|
||||
*/
|
||||
export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
||||
if (updates.length === 1) {
|
||||
return updates[0]
|
||||
}
|
||||
const updateDecoders = updates.map(update => new YDecoder(decoding.createDecoder(update)))
|
||||
let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder, true))
|
||||
|
||||
/**
|
||||
* @todo we don't need offset because we always slice before
|
||||
* @type {null | { struct: Item | GC | Skip, offset: number }}
|
||||
*/
|
||||
let currWrite = null
|
||||
|
||||
const updateEncoder = new YEncoder()
|
||||
// write structs lazily
|
||||
const lazyStructEncoder = new LazyStructWriter(updateEncoder)
|
||||
|
||||
// Note: We need to ensure that all lazyStructDecoders are fully consumed
|
||||
// Note: Should merge document updates whenever possible - even from different updates
|
||||
// Note: Should handle that some operations cannot be applied yet ()
|
||||
|
||||
while (true) {
|
||||
// Write higher clients first ⇒ sort by clientID & clock and remove decoders without content
|
||||
lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null)
|
||||
lazyStructDecoders.sort(
|
||||
/** @type {function(any,any):number} */ (dec1, dec2) => {
|
||||
if (dec1.curr.id.client === dec2.curr.id.client) {
|
||||
const clockDiff = dec1.curr.id.clock - dec2.curr.id.clock
|
||||
if (clockDiff === 0) {
|
||||
// @todo remove references to skip since the structDecoders must filter Skips.
|
||||
return dec1.curr.constructor === dec2.curr.constructor
|
||||
? 0
|
||||
: dec1.curr.constructor === Skip ? 1 : -1 // we are filtering skips anyway.
|
||||
} else {
|
||||
return clockDiff
|
||||
}
|
||||
} else {
|
||||
return dec2.curr.id.client - dec1.curr.id.client
|
||||
}
|
||||
}
|
||||
)
|
||||
if (lazyStructDecoders.length === 0) {
|
||||
break
|
||||
}
|
||||
const currDecoder = lazyStructDecoders[0]
|
||||
// write from currDecoder until the next operation is from another client or if filler-struct
|
||||
// then we need to reorder the decoders and find the next operation to write
|
||||
const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client
|
||||
|
||||
if (currWrite !== null) {
|
||||
let curr = /** @type {Item | GC | null} */ (currDecoder.curr)
|
||||
let iterated = false
|
||||
|
||||
// iterate until we find something that we haven't written already
|
||||
// remember: first the high client-ids are written
|
||||
while (curr !== null && curr.id.clock + curr.length <= currWrite.struct.id.clock + currWrite.struct.length && curr.id.client >= currWrite.struct.id.client) {
|
||||
curr = currDecoder.next()
|
||||
iterated = true
|
||||
}
|
||||
if (
|
||||
curr === null || // current decoder is empty
|
||||
curr.id.client !== firstClient || // check whether there is another decoder that has has updates from `firstClient`
|
||||
(iterated && curr.id.clock > currWrite.struct.id.clock + currWrite.struct.length) // the above while loop was used and we are potentially missing updates
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (firstClient !== currWrite.struct.id.client) {
|
||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||
currWrite = { struct: curr, offset: 0 }
|
||||
currDecoder.next()
|
||||
} else {
|
||||
if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) {
|
||||
// @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock)
|
||||
if (currWrite.struct.constructor === Skip) {
|
||||
// extend existing skip
|
||||
currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock
|
||||
} else {
|
||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||
const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length
|
||||
/**
|
||||
* @type {Skip}
|
||||
*/
|
||||
const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff)
|
||||
currWrite = { struct, offset: 0 }
|
||||
}
|
||||
} else { // if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) {
|
||||
const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock
|
||||
if (diff > 0) {
|
||||
if (currWrite.struct.constructor === Skip) {
|
||||
// prefer to slice Skip because the other struct might contain more information
|
||||
currWrite.struct.length -= diff
|
||||
} else {
|
||||
curr = sliceStruct(curr, diff)
|
||||
}
|
||||
}
|
||||
if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) {
|
||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||
currWrite = { struct: curr, offset: 0 }
|
||||
currDecoder.next()
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
currWrite = { struct: /** @type {Item | GC} */ (currDecoder.curr), offset: 0 }
|
||||
currDecoder.next()
|
||||
}
|
||||
for (
|
||||
let next = currDecoder.curr;
|
||||
next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length && next.constructor !== Skip;
|
||||
next = currDecoder.next()
|
||||
) {
|
||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||
currWrite = { struct: next, offset: 0 }
|
||||
}
|
||||
}
|
||||
if (currWrite !== null) {
|
||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
||||
currWrite = null
|
||||
}
|
||||
finishLazyStructWriting(lazyStructEncoder)
|
||||
|
||||
const dss = updateDecoders.map(decoder => readDeleteSet(decoder))
|
||||
const ds = mergeDeleteSets(dss)
|
||||
writeDeleteSet(updateEncoder, ds)
|
||||
return updateEncoder.toUint8Array()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {Uint8Array} sv
|
||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
||||
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
||||
*/
|
||||
export const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
||||
const state = decodeStateVector(sv)
|
||||
const encoder = new YEncoder()
|
||||
const lazyStructWriter = new LazyStructWriter(encoder)
|
||||
const decoder = new YDecoder(decoding.createDecoder(update))
|
||||
const reader = new LazyStructReader(decoder, false)
|
||||
while (reader.curr) {
|
||||
const curr = reader.curr
|
||||
const currClient = curr.id.client
|
||||
const svClock = state.get(currClient) || 0
|
||||
if (reader.curr.constructor === Skip) {
|
||||
// the first written struct shouldn't be a skip
|
||||
reader.next()
|
||||
continue
|
||||
}
|
||||
if (curr.id.clock + curr.length > svClock) {
|
||||
writeStructToLazyStructWriter(lazyStructWriter, curr, math.max(svClock - curr.id.clock, 0))
|
||||
reader.next()
|
||||
while (reader.curr && reader.curr.id.client === currClient) {
|
||||
writeStructToLazyStructWriter(lazyStructWriter, reader.curr, 0)
|
||||
reader.next()
|
||||
}
|
||||
} else {
|
||||
// read until something new comes up
|
||||
while (reader.curr && reader.curr.id.client === currClient && reader.curr.id.clock + reader.curr.length <= svClock) {
|
||||
reader.next()
|
||||
}
|
||||
}
|
||||
}
|
||||
finishLazyStructWriting(lazyStructWriter)
|
||||
// write ds
|
||||
const ds = readDeleteSet(decoder)
|
||||
writeDeleteSet(encoder, ds)
|
||||
return encoder.toUint8Array()
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {Uint8Array} sv
|
||||
*/
|
||||
export const diffUpdate = (update, sv) => diffUpdateV2(update, sv, UpdateDecoderV1, UpdateEncoderV1)
|
||||
|
||||
/**
|
||||
* @param {LazyStructWriter} lazyWriter
|
||||
*/
|
||||
const flushLazyStructWriter = lazyWriter => {
|
||||
if (lazyWriter.written > 0) {
|
||||
lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: encoding.toUint8Array(lazyWriter.encoder.restEncoder) })
|
||||
lazyWriter.encoder.restEncoder = encoding.createEncoder()
|
||||
lazyWriter.written = 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {LazyStructWriter} lazyWriter
|
||||
* @param {Item | GC} struct
|
||||
* @param {number} offset
|
||||
*/
|
||||
const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => {
|
||||
// flush curr if we start another client
|
||||
if (lazyWriter.written > 0 && lazyWriter.currClient !== struct.id.client) {
|
||||
flushLazyStructWriter(lazyWriter)
|
||||
}
|
||||
if (lazyWriter.written === 0) {
|
||||
lazyWriter.currClient = struct.id.client
|
||||
// write next client
|
||||
lazyWriter.encoder.writeClient(struct.id.client)
|
||||
// write startClock
|
||||
encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock + offset)
|
||||
}
|
||||
struct.write(lazyWriter.encoder, offset)
|
||||
lazyWriter.written++
|
||||
}
|
||||
/**
|
||||
* Call this function when we collected all parts and want to
|
||||
* put all the parts together. After calling this method,
|
||||
* you can continue using the UpdateEncoder.
|
||||
*
|
||||
* @param {LazyStructWriter} lazyWriter
|
||||
*/
|
||||
const finishLazyStructWriting = (lazyWriter) => {
|
||||
flushLazyStructWriter(lazyWriter)
|
||||
|
||||
// this is a fresh encoder because we called flushCurr
|
||||
const restEncoder = lazyWriter.encoder.restEncoder
|
||||
|
||||
/**
|
||||
* Now we put all the fragments together.
|
||||
* This works similarly to `writeClientsStructs`
|
||||
*/
|
||||
|
||||
// write # states that were updated - i.e. the clients
|
||||
encoding.writeVarUint(restEncoder, lazyWriter.clientStructs.length)
|
||||
|
||||
for (let i = 0; i < lazyWriter.clientStructs.length; i++) {
|
||||
const partStructs = lazyWriter.clientStructs[i]
|
||||
/**
|
||||
* Works similarly to `writeStructs`
|
||||
*/
|
||||
// write # encoded structs
|
||||
encoding.writeVarUint(restEncoder, partStructs.written)
|
||||
// write the rest of the fragment
|
||||
encoding.writeUint8Array(restEncoder, partStructs.restEncoder)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {function(Item|GC|Skip):Item|GC|Skip} blockTransformer
|
||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} YDecoder
|
||||
* @param {typeof UpdateEncoderV2 | typeof UpdateEncoderV1 } YEncoder
|
||||
*/
|
||||
export const convertUpdateFormat = (update, blockTransformer, YDecoder, YEncoder) => {
|
||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
||||
const updateEncoder = new YEncoder()
|
||||
const lazyWriter = new LazyStructWriter(updateEncoder)
|
||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
||||
writeStructToLazyStructWriter(lazyWriter, blockTransformer(curr), 0)
|
||||
}
|
||||
finishLazyStructWriting(lazyWriter)
|
||||
const ds = readDeleteSet(updateDecoder)
|
||||
writeDeleteSet(updateEncoder, ds)
|
||||
return updateEncoder.toUint8Array()
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} ObfuscatorOptions
|
||||
* @property {boolean} [ObfuscatorOptions.formatting=true]
|
||||
* @property {boolean} [ObfuscatorOptions.subdocs=true]
|
||||
* @property {boolean} [ObfuscatorOptions.yxml=true] Whether to obfuscate nodeName / hookName
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {ObfuscatorOptions} obfuscator
|
||||
*/
|
||||
const createObfuscator = ({ formatting = true, subdocs = true, yxml = true } = {}) => {
|
||||
let i = 0
|
||||
const mapKeyCache = map.create()
|
||||
const nodeNameCache = map.create()
|
||||
const formattingKeyCache = map.create()
|
||||
const formattingValueCache = map.create()
|
||||
formattingValueCache.set(null, null) // end of a formatting range should always be the end of a formatting range
|
||||
/**
|
||||
* @param {Item|GC|Skip} block
|
||||
* @return {Item|GC|Skip}
|
||||
*/
|
||||
return block => {
|
||||
switch (block.constructor) {
|
||||
case GC:
|
||||
case Skip:
|
||||
return block
|
||||
case Item: {
|
||||
const item = /** @type {Item} */ (block)
|
||||
const content = item.content
|
||||
switch (content.constructor) {
|
||||
case ContentDeleted:
|
||||
break
|
||||
case ContentType: {
|
||||
if (yxml) {
|
||||
const type = /** @type {ContentType} */ (content).type
|
||||
if (type instanceof YXmlElement) {
|
||||
type.nodeName = map.setIfUndefined(nodeNameCache, type.nodeName, () => 'node-' + i)
|
||||
}
|
||||
if (type instanceof YXmlHook) {
|
||||
type.hookName = map.setIfUndefined(nodeNameCache, type.hookName, () => 'hook-' + i)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
case ContentAny: {
|
||||
const c = /** @type {ContentAny} */ (content)
|
||||
c.arr = c.arr.map(() => i)
|
||||
break
|
||||
}
|
||||
case ContentBinary: {
|
||||
const c = /** @type {ContentBinary} */ (content)
|
||||
c.content = new Uint8Array([i])
|
||||
break
|
||||
}
|
||||
case ContentDoc: {
|
||||
const c = /** @type {ContentDoc} */ (content)
|
||||
if (subdocs) {
|
||||
c.opts = {}
|
||||
c.doc.guid = i + ''
|
||||
}
|
||||
break
|
||||
}
|
||||
case ContentEmbed: {
|
||||
const c = /** @type {ContentEmbed} */ (content)
|
||||
c.embed = {}
|
||||
break
|
||||
}
|
||||
case ContentFormat: {
|
||||
const c = /** @type {ContentFormat} */ (content)
|
||||
if (formatting) {
|
||||
c.key = map.setIfUndefined(formattingKeyCache, c.key, () => i + '')
|
||||
c.value = map.setIfUndefined(formattingValueCache, c.value, () => ({ i }))
|
||||
}
|
||||
break
|
||||
}
|
||||
case ContentJSON: {
|
||||
const c = /** @type {ContentJSON} */ (content)
|
||||
c.arr = c.arr.map(() => i)
|
||||
break
|
||||
}
|
||||
case ContentString: {
|
||||
const c = /** @type {ContentString} */ (content)
|
||||
c.str = string.repeat((i % 10) + '', c.str.length)
|
||||
break
|
||||
}
|
||||
default:
|
||||
// unknown content type
|
||||
error.unexpectedCase()
|
||||
}
|
||||
if (item.parentSub) {
|
||||
item.parentSub = map.setIfUndefined(mapKeyCache, item.parentSub, () => i + '')
|
||||
}
|
||||
i++
|
||||
return block
|
||||
}
|
||||
default:
|
||||
// unknown block-type
|
||||
error.unexpectedCase()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function obfuscates the content of a Yjs update. This is useful to share
|
||||
* buggy Yjs documents while significantly limiting the possibility that a
|
||||
* developer can on the user. Note that it might still be possible to deduce
|
||||
* some information by analyzing the "structure" of the document or by analyzing
|
||||
* the typing behavior using the CRDT-related metadata that is still kept fully
|
||||
* intact.
|
||||
*
|
||||
* @param {Uint8Array} update
|
||||
* @param {ObfuscatorOptions} [opts]
|
||||
*/
|
||||
export const obfuscateUpdate = (update, opts) => convertUpdateFormat(update, createObfuscator(opts), UpdateDecoderV1, UpdateEncoderV1)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {ObfuscatorOptions} [opts]
|
||||
*/
|
||||
export const obfuscateUpdateV2 = (update, opts) => convertUpdateFormat(update, createObfuscator(opts), UpdateDecoderV2, UpdateEncoderV2)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
*/
|
||||
export const convertUpdateFormatV1ToV2 = update => convertUpdateFormat(update, f.id, UpdateDecoderV1, UpdateEncoderV2)
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
*/
|
||||
export const convertUpdateFormatV2ToV1 = update => convertUpdateFormat(update, f.id, UpdateDecoderV2, UpdateEncoderV1)
|
9
test.html
Normal file
9
test.html
Normal file
@ -0,0 +1,9 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Testing Yjs</title>
|
||||
</head>
|
||||
<body>
|
||||
<script type="module" src="./dist/tests.js"></script>
|
||||
</body>
|
||||
</html>
|
45
tests/compatibility.tests.js
Normal file
45
tests/compatibility.tests.js
Normal file
File diff suppressed because one or more lines are too long
329
tests/doc.tests.js
Normal file
329
tests/doc.tests.js
Normal file
@ -0,0 +1,329 @@
|
||||
import * as Y from '../src/index.js'
|
||||
import * as t from 'lib0/testing'
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testAfterTransactionRecursion = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yxml = ydoc.getXmlFragment('')
|
||||
ydoc.on('afterTransaction', tr => {
|
||||
if (tr.origin === 'test') {
|
||||
yxml.toJSON()
|
||||
}
|
||||
})
|
||||
ydoc.transact(_tr => {
|
||||
for (let i = 0; i < 15000; i++) {
|
||||
yxml.push([new Y.XmlText('a')])
|
||||
}
|
||||
}, 'test')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testOriginInTransaction = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const ytext = doc.getText()
|
||||
/**
|
||||
* @type {Array<string>}
|
||||
*/
|
||||
const origins = []
|
||||
doc.on('afterTransaction', (tr) => {
|
||||
origins.push(tr.origin)
|
||||
if (origins.length <= 1) {
|
||||
ytext.toDelta(Y.snapshot(doc)) // adding a snapshot forces toDelta to create a cleanup transaction
|
||||
doc.transact(() => {
|
||||
ytext.insert(0, 'a')
|
||||
}, 'nested')
|
||||
}
|
||||
})
|
||||
doc.transact(() => {
|
||||
ytext.insert(0, '0')
|
||||
}, 'first')
|
||||
t.compareArrays(origins, ['first', 'cleanup', 'nested'])
|
||||
}
|
||||
|
||||
/**
|
||||
* Client id should be changed when an instance receives updates from another client using the same client id.
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testClientIdDuplicateChange = _tc => {
|
||||
const doc1 = new Y.Doc()
|
||||
doc1.clientID = 0
|
||||
const doc2 = new Y.Doc()
|
||||
doc2.clientID = 0
|
||||
t.assert(doc2.clientID === doc1.clientID)
|
||||
doc1.getArray('a').insert(0, [1, 2])
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc1))
|
||||
t.assert(doc2.clientID !== doc1.clientID)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testGetTypeEmptyId = _tc => {
|
||||
const doc1 = new Y.Doc()
|
||||
doc1.getText('').insert(0, 'h')
|
||||
doc1.getText().insert(1, 'i')
|
||||
const doc2 = new Y.Doc()
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc1))
|
||||
t.assert(doc2.getText().toString() === 'hi')
|
||||
t.assert(doc2.getText('').toString() === 'hi')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testToJSON = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
t.compare(doc.toJSON(), {}, 'doc.toJSON yields empty object')
|
||||
|
||||
const arr = doc.getArray('array')
|
||||
arr.push(['test1'])
|
||||
|
||||
const map = doc.getMap('map')
|
||||
map.set('k1', 'v1')
|
||||
const map2 = new Y.Map()
|
||||
map.set('k2', map2)
|
||||
map2.set('m2k1', 'm2v1')
|
||||
|
||||
t.compare(doc.toJSON(), {
|
||||
array: ['test1'],
|
||||
map: {
|
||||
k1: 'v1',
|
||||
k2: {
|
||||
m2k1: 'm2v1'
|
||||
}
|
||||
}
|
||||
}, 'doc.toJSON has array and recursive map')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSubdoc = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
doc.load() // doesn't do anything
|
||||
{
|
||||
/**
|
||||
* @type {Array<any>|null}
|
||||
*/
|
||||
let event = /** @type {any} */ (null)
|
||||
doc.on('subdocs', subdocs => {
|
||||
event = [Array.from(subdocs.added).map(x => x.guid), Array.from(subdocs.removed).map(x => x.guid), Array.from(subdocs.loaded).map(x => x.guid)]
|
||||
})
|
||||
const subdocs = doc.getMap('mysubdocs')
|
||||
const docA = new Y.Doc({ guid: 'a' })
|
||||
docA.load()
|
||||
subdocs.set('a', docA)
|
||||
t.compare(event, [['a'], [], ['a']])
|
||||
|
||||
event = null
|
||||
subdocs.get('a').load()
|
||||
t.assert(event === null)
|
||||
|
||||
event = null
|
||||
subdocs.get('a').destroy()
|
||||
t.compare(event, [['a'], ['a'], []])
|
||||
subdocs.get('a').load()
|
||||
t.compare(event, [[], [], ['a']])
|
||||
|
||||
subdocs.set('b', new Y.Doc({ guid: 'a', shouldLoad: false }))
|
||||
t.compare(event, [['a'], [], []])
|
||||
subdocs.get('b').load()
|
||||
t.compare(event, [[], [], ['a']])
|
||||
|
||||
const docC = new Y.Doc({ guid: 'c' })
|
||||
docC.load()
|
||||
subdocs.set('c', docC)
|
||||
t.compare(event, [['c'], [], ['c']])
|
||||
|
||||
t.compare(Array.from(doc.getSubdocGuids()), ['a', 'c'])
|
||||
}
|
||||
|
||||
const doc2 = new Y.Doc()
|
||||
{
|
||||
t.compare(Array.from(doc2.getSubdocs()), [])
|
||||
/**
|
||||
* @type {Array<any>|null}
|
||||
*/
|
||||
let event = /** @type {any} */ (null)
|
||||
doc2.on('subdocs', subdocs => {
|
||||
event = [Array.from(subdocs.added).map(d => d.guid), Array.from(subdocs.removed).map(d => d.guid), Array.from(subdocs.loaded).map(d => d.guid)]
|
||||
})
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
||||
t.compare(event, [['a', 'a', 'c'], [], []])
|
||||
|
||||
doc2.getMap('mysubdocs').get('a').load()
|
||||
t.compare(event, [[], [], ['a']])
|
||||
|
||||
t.compare(Array.from(doc2.getSubdocGuids()), ['a', 'c'])
|
||||
|
||||
doc2.getMap('mysubdocs').delete('a')
|
||||
t.compare(event, [[], ['a'], []])
|
||||
t.compare(Array.from(doc2.getSubdocGuids()), ['a', 'c'])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSubdocLoadEdgeCases = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yarray = ydoc.getArray()
|
||||
const subdoc1 = new Y.Doc()
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let lastEvent = null
|
||||
ydoc.on('subdocs', event => {
|
||||
lastEvent = event
|
||||
})
|
||||
yarray.insert(0, [subdoc1])
|
||||
t.assert(subdoc1.shouldLoad)
|
||||
t.assert(subdoc1.autoLoad === false)
|
||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc1))
|
||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc1))
|
||||
// destroy and check whether lastEvent adds it again to added (it shouldn't)
|
||||
subdoc1.destroy()
|
||||
const subdoc2 = yarray.get(0)
|
||||
t.assert(subdoc1 !== subdoc2)
|
||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc2))
|
||||
t.assert(lastEvent !== null && !lastEvent.loaded.has(subdoc2))
|
||||
// load
|
||||
subdoc2.load()
|
||||
t.assert(lastEvent !== null && !lastEvent.added.has(subdoc2))
|
||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc2))
|
||||
// apply from remote
|
||||
const ydoc2 = new Y.Doc()
|
||||
ydoc2.on('subdocs', event => {
|
||||
lastEvent = event
|
||||
})
|
||||
Y.applyUpdate(ydoc2, Y.encodeStateAsUpdate(ydoc))
|
||||
const subdoc3 = ydoc2.getArray().get(0)
|
||||
t.assert(subdoc3.shouldLoad === false)
|
||||
t.assert(subdoc3.autoLoad === false)
|
||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc3))
|
||||
t.assert(lastEvent !== null && !lastEvent.loaded.has(subdoc3))
|
||||
// load
|
||||
subdoc3.load()
|
||||
t.assert(subdoc3.shouldLoad)
|
||||
t.assert(lastEvent !== null && !lastEvent.added.has(subdoc3))
|
||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc3))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSubdocLoadEdgeCasesAutoload = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yarray = ydoc.getArray()
|
||||
const subdoc1 = new Y.Doc({ autoLoad: true })
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let lastEvent = null
|
||||
ydoc.on('subdocs', event => {
|
||||
lastEvent = event
|
||||
})
|
||||
yarray.insert(0, [subdoc1])
|
||||
t.assert(subdoc1.shouldLoad)
|
||||
t.assert(subdoc1.autoLoad)
|
||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc1))
|
||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc1))
|
||||
// destroy and check whether lastEvent adds it again to added (it shouldn't)
|
||||
subdoc1.destroy()
|
||||
const subdoc2 = yarray.get(0)
|
||||
t.assert(subdoc1 !== subdoc2)
|
||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc2))
|
||||
t.assert(lastEvent !== null && !lastEvent.loaded.has(subdoc2))
|
||||
// load
|
||||
subdoc2.load()
|
||||
t.assert(lastEvent !== null && !lastEvent.added.has(subdoc2))
|
||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc2))
|
||||
// apply from remote
|
||||
const ydoc2 = new Y.Doc()
|
||||
ydoc2.on('subdocs', event => {
|
||||
lastEvent = event
|
||||
})
|
||||
Y.applyUpdate(ydoc2, Y.encodeStateAsUpdate(ydoc))
|
||||
const subdoc3 = ydoc2.getArray().get(0)
|
||||
t.assert(subdoc1.shouldLoad)
|
||||
t.assert(subdoc1.autoLoad)
|
||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc3))
|
||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc3))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSubdocsUndo = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const elems = ydoc.getXmlFragment()
|
||||
const undoManager = new Y.UndoManager(elems)
|
||||
const subdoc = new Y.Doc()
|
||||
// @ts-ignore
|
||||
elems.insert(0, [subdoc])
|
||||
undoManager.undo()
|
||||
undoManager.redo()
|
||||
t.assert(elems.length === 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testLoadDocsEvent = async _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
t.assert(ydoc.isLoaded === false)
|
||||
let loadedEvent = false
|
||||
ydoc.on('load', () => {
|
||||
loadedEvent = true
|
||||
})
|
||||
ydoc.emit('load', [ydoc])
|
||||
await ydoc.whenLoaded
|
||||
t.assert(loadedEvent)
|
||||
t.assert(ydoc.isLoaded)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSyncDocsEvent = async _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
t.assert(ydoc.isLoaded === false)
|
||||
t.assert(ydoc.isSynced === false)
|
||||
let loadedEvent = false
|
||||
ydoc.once('load', () => {
|
||||
loadedEvent = true
|
||||
})
|
||||
let syncedEvent = false
|
||||
ydoc.once('sync', /** @param {any} isSynced */ (isSynced) => {
|
||||
syncedEvent = true
|
||||
t.assert(isSynced)
|
||||
})
|
||||
ydoc.emit('sync', [true, ydoc])
|
||||
await ydoc.whenLoaded
|
||||
const oldWhenSynced = ydoc.whenSynced
|
||||
await ydoc.whenSynced
|
||||
t.assert(loadedEvent)
|
||||
t.assert(syncedEvent)
|
||||
t.assert(ydoc.isLoaded)
|
||||
t.assert(ydoc.isSynced)
|
||||
let loadedEvent2 = false
|
||||
ydoc.on('load', () => {
|
||||
loadedEvent2 = true
|
||||
})
|
||||
let syncedEvent2 = false
|
||||
ydoc.on('sync', (isSynced) => {
|
||||
syncedEvent2 = true
|
||||
t.assert(isSynced === false)
|
||||
})
|
||||
ydoc.emit('sync', [false, ydoc])
|
||||
t.assert(!loadedEvent2)
|
||||
t.assert(syncedEvent2)
|
||||
t.assert(ydoc.isLoaded)
|
||||
t.assert(!ydoc.isSynced)
|
||||
t.assert(ydoc.whenSynced !== oldWhenSynced)
|
||||
}
|
108
tests/encoding.tests.js
Normal file
108
tests/encoding.tests.js
Normal file
@ -0,0 +1,108 @@
|
||||
import * as t from 'lib0/testing'
|
||||
import * as promise from 'lib0/promise'
|
||||
|
||||
import {
|
||||
contentRefs,
|
||||
readContentBinary,
|
||||
readContentDeleted,
|
||||
readContentString,
|
||||
readContentJSON,
|
||||
readContentEmbed,
|
||||
readContentType,
|
||||
readContentFormat,
|
||||
readContentAny,
|
||||
readContentDoc,
|
||||
Doc,
|
||||
PermanentUserData,
|
||||
encodeStateAsUpdate,
|
||||
applyUpdate
|
||||
} from '../src/internals.js'
|
||||
|
||||
import * as Y from '../src/index.js'
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testStructReferences = tc => {
|
||||
t.assert(contentRefs.length === 11)
|
||||
t.assert(contentRefs[1] === readContentDeleted)
|
||||
t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json?
|
||||
t.assert(contentRefs[3] === readContentBinary)
|
||||
t.assert(contentRefs[4] === readContentString)
|
||||
t.assert(contentRefs[5] === readContentEmbed)
|
||||
t.assert(contentRefs[6] === readContentFormat)
|
||||
t.assert(contentRefs[7] === readContentType)
|
||||
t.assert(contentRefs[8] === readContentAny)
|
||||
t.assert(contentRefs[9] === readContentDoc)
|
||||
// contentRefs[10] is reserved for Skip structs
|
||||
}
|
||||
|
||||
/**
|
||||
* There is some custom encoding/decoding happening in PermanentUserData.
|
||||
* This is why it landed here.
|
||||
*
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testPermanentUserData = async tc => {
|
||||
const ydoc1 = new Doc()
|
||||
const ydoc2 = new Doc()
|
||||
const pd1 = new PermanentUserData(ydoc1)
|
||||
const pd2 = new PermanentUserData(ydoc2)
|
||||
pd1.setUserMapping(ydoc1, ydoc1.clientID, 'user a')
|
||||
pd2.setUserMapping(ydoc2, ydoc2.clientID, 'user b')
|
||||
ydoc1.getText().insert(0, 'xhi')
|
||||
ydoc1.getText().delete(0, 1)
|
||||
ydoc2.getText().insert(0, 'hxxi')
|
||||
ydoc2.getText().delete(1, 2)
|
||||
await promise.wait(10)
|
||||
applyUpdate(ydoc2, encodeStateAsUpdate(ydoc1))
|
||||
applyUpdate(ydoc1, encodeStateAsUpdate(ydoc2))
|
||||
|
||||
// now sync a third doc with same name as doc1 and then create PermanentUserData
|
||||
const ydoc3 = new Doc()
|
||||
applyUpdate(ydoc3, encodeStateAsUpdate(ydoc1))
|
||||
const pd3 = new PermanentUserData(ydoc3)
|
||||
pd3.setUserMapping(ydoc3, ydoc3.clientID, 'user a')
|
||||
}
|
||||
|
||||
/**
|
||||
* Reported here: https://github.com/yjs/yjs/issues/308
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testDiffStateVectorOfUpdateIsEmpty = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let sv = null
|
||||
ydoc.getText().insert(0, 'a')
|
||||
ydoc.on('update', update => {
|
||||
sv = Y.encodeStateVectorFromUpdate(update)
|
||||
})
|
||||
// should produce an update with an empty state vector (because previous ops are missing)
|
||||
ydoc.getText().insert(0, 'a')
|
||||
t.assert(sv !== null && sv.byteLength === 1 && sv[0] === 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reported here: https://github.com/yjs/yjs/issues/308
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testDiffStateVectorOfUpdateIgnoresSkips = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
/**
|
||||
* @type {Array<Uint8Array>}
|
||||
*/
|
||||
const updates = []
|
||||
ydoc.on('update', update => {
|
||||
updates.push(update)
|
||||
})
|
||||
ydoc.getText().insert(0, 'a')
|
||||
ydoc.getText().insert(0, 'b')
|
||||
ydoc.getText().insert(0, 'c')
|
||||
const update13 = Y.mergeUpdates([updates[0], updates[2]])
|
||||
const sv = Y.encodeStateVectorFromUpdate(update13)
|
||||
const state = Y.decodeStateVector(sv)
|
||||
t.assert(state.get(ydoc.clientID) === 1)
|
||||
t.assert(state.size === 1)
|
||||
}
|
42
tests/index.js
Normal file
42
tests/index.js
Normal file
@ -0,0 +1,42 @@
|
||||
/* eslint-env node */
|
||||
|
||||
import * as map from './y-map.tests.js'
|
||||
import * as array from './y-array.tests.js'
|
||||
import * as text from './y-text.tests.js'
|
||||
import * as xml from './y-xml.tests.js'
|
||||
import * as encoding from './encoding.tests.js'
|
||||
import * as undoredo from './undo-redo.tests.js'
|
||||
import * as compatibility from './compatibility.tests.js'
|
||||
import * as doc from './doc.tests.js'
|
||||
import * as snapshot from './snapshot.tests.js'
|
||||
import * as updates from './updates.tests.js'
|
||||
import * as relativePositions from './relativePositions.tests.js'
|
||||
|
||||
import { runTests } from 'lib0/testing'
|
||||
import { isBrowser, isNode } from 'lib0/environment'
|
||||
import * as log from 'lib0/logging'
|
||||
import { environment } from 'lib0'
|
||||
|
||||
if (isBrowser) {
|
||||
log.createVConsole(document.body)
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
const tests = {
|
||||
doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates, relativePositions
|
||||
}
|
||||
|
||||
const run = async () => {
|
||||
if (environment.isNode) {
|
||||
// tests.nodejs = await import('./node.tests.js')
|
||||
}
|
||||
|
||||
const success = await runTests(tests)
|
||||
/* istanbul ignore next */
|
||||
if (isNode) {
|
||||
process.exit(success ? 0 : 1)
|
||||
}
|
||||
}
|
||||
run()
|
145
tests/relativePositions.tests.js
Normal file
145
tests/relativePositions.tests.js
Normal file
@ -0,0 +1,145 @@
|
||||
import * as Y from '../src/index.js'
|
||||
import * as t from 'lib0/testing'
|
||||
|
||||
/**
|
||||
* @param {Y.Text} ytext
|
||||
*/
|
||||
const checkRelativePositions = ytext => {
|
||||
// test if all positions are encoded and restored correctly
|
||||
for (let i = 0; i < ytext.length; i++) {
|
||||
// for all types of associations..
|
||||
for (let assoc = -1; assoc < 2; assoc++) {
|
||||
const rpos = Y.createRelativePositionFromTypeIndex(ytext, i, assoc)
|
||||
const encodedRpos = Y.encodeRelativePosition(rpos)
|
||||
const decodedRpos = Y.decodeRelativePosition(encodedRpos)
|
||||
const absPos = /** @type {Y.AbsolutePosition} */ (Y.createAbsolutePositionFromRelativePosition(decodedRpos, /** @type {Y.Doc} */ (ytext.doc)))
|
||||
t.assert(absPos.index === i)
|
||||
t.assert(absPos.assoc === assoc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase1 = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, '1')
|
||||
ytext.insert(0, 'abc')
|
||||
ytext.insert(0, 'z')
|
||||
ytext.insert(0, 'y')
|
||||
ytext.insert(0, 'x')
|
||||
checkRelativePositions(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase2 = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, 'abc')
|
||||
checkRelativePositions(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase3 = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, 'abc')
|
||||
ytext.insert(0, '1')
|
||||
ytext.insert(0, 'xyz')
|
||||
checkRelativePositions(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase4 = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, '1')
|
||||
checkRelativePositions(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase5 = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, '2')
|
||||
ytext.insert(0, '1')
|
||||
checkRelativePositions(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase6 = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
checkRelativePositions(ytext)
|
||||
}
|
||||
|
||||
/**
|
||||
* Testing https://github.com/yjs/yjs/issues/657
|
||||
*
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionCase7 = tc => {
|
||||
const docA = new Y.Doc()
|
||||
const textA = docA.getText('text')
|
||||
textA.insert(0, 'abcde')
|
||||
// Create a relative position at index 2 in 'textA'
|
||||
const relativePosition = Y.createRelativePositionFromTypeIndex(textA, 2)
|
||||
// Verify that the absolutes positions on 'docA' are the same
|
||||
const absolutePositionWithFollow =
|
||||
Y.createAbsolutePositionFromRelativePosition(relativePosition, docA, true)
|
||||
const absolutePositionWithoutFollow =
|
||||
Y.createAbsolutePositionFromRelativePosition(relativePosition, docA, false)
|
||||
t.assert(absolutePositionWithFollow?.index === 2)
|
||||
t.assert(absolutePositionWithoutFollow?.index === 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionAssociationDifference = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, '2')
|
||||
ytext.insert(0, '1')
|
||||
const rposRight = Y.createRelativePositionFromTypeIndex(ytext, 1, 0)
|
||||
const rposLeft = Y.createRelativePositionFromTypeIndex(ytext, 1, -1)
|
||||
ytext.insert(1, 'x')
|
||||
const posRight = Y.createAbsolutePositionFromRelativePosition(rposRight, ydoc)
|
||||
const posLeft = Y.createAbsolutePositionFromRelativePosition(rposLeft, ydoc)
|
||||
t.assert(posRight != null && posRight.index === 2)
|
||||
t.assert(posLeft != null && posLeft.index === 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRelativePositionWithUndo = tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText()
|
||||
ytext.insert(0, 'hello world')
|
||||
const rpos = Y.createRelativePositionFromTypeIndex(ytext, 1)
|
||||
const um = new Y.UndoManager(ytext)
|
||||
ytext.delete(0, 6)
|
||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydoc)?.index === 0)
|
||||
um.undo()
|
||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydoc)?.index === 1)
|
||||
const posWithoutFollow = Y.createAbsolutePositionFromRelativePosition(rpos, ydoc, false)
|
||||
console.log({ posWithoutFollow })
|
||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydoc, false)?.index === 6)
|
||||
const ydocClone = new Y.Doc()
|
||||
Y.applyUpdate(ydocClone, Y.encodeStateAsUpdate(ydoc))
|
||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydocClone)?.index === 6)
|
||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydocClone, false)?.index === 6)
|
||||
}
|
223
tests/snapshot.tests.js
Normal file
223
tests/snapshot.tests.js
Normal file
@ -0,0 +1,223 @@
|
||||
import * as Y from '../src/index.js'
|
||||
import * as t from 'lib0/testing'
|
||||
import { init } from './testHelper.js'
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testBasic = _tc => {
|
||||
const ydoc = new Y.Doc({ gc: false })
|
||||
ydoc.getText().insert(0, 'world!')
|
||||
const snapshot = Y.snapshot(ydoc)
|
||||
ydoc.getText().insert(0, 'hello ')
|
||||
const restored = Y.createDocFromSnapshot(ydoc, snapshot)
|
||||
t.assert(restored.getText().toString() === 'world!')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testBasicXmlAttributes = _tc => {
|
||||
const ydoc = new Y.Doc({ gc: false })
|
||||
const yxml = ydoc.getMap().set('el', new Y.XmlElement('div'))
|
||||
const snapshot1 = Y.snapshot(ydoc)
|
||||
yxml.setAttribute('a', '1')
|
||||
const snapshot2 = Y.snapshot(ydoc)
|
||||
yxml.setAttribute('a', '2')
|
||||
t.compare(yxml.getAttributes(), { a: '2' })
|
||||
t.compare(yxml.getAttributes(snapshot2), { a: '1' })
|
||||
t.compare(yxml.getAttributes(snapshot1), {})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testBasicRestoreSnapshot = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
doc.getArray('array').insert(0, ['hello'])
|
||||
const snap = Y.snapshot(doc)
|
||||
doc.getArray('array').insert(1, ['world'])
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray('array').toArray(), ['hello'])
|
||||
t.compare(doc.getArray('array').toArray(), ['hello', 'world'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testEmptyRestoreSnapshot = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
const snap = Y.snapshot(doc)
|
||||
snap.sv.set(9999, 0)
|
||||
doc.getArray().insert(0, ['world'])
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray().toArray(), [])
|
||||
t.compare(doc.getArray().toArray(), ['world'])
|
||||
|
||||
// now this snapshot reflects the latest state. It should still work.
|
||||
const snap2 = Y.snapshot(doc)
|
||||
const docRestored2 = Y.createDocFromSnapshot(doc, snap2)
|
||||
t.compare(docRestored2.getArray().toArray(), ['world'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testRestoreSnapshotWithSubType = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
doc.getArray('array').insert(0, [new Y.Map()])
|
||||
const subMap = doc.getArray('array').get(0)
|
||||
subMap.set('key1', 'value1')
|
||||
|
||||
const snap = Y.snapshot(doc)
|
||||
subMap.set('key2', 'value2')
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray('array').toJSON(), [{
|
||||
key1: 'value1'
|
||||
}])
|
||||
t.compare(doc.getArray('array').toJSON(), [{
|
||||
key1: 'value1',
|
||||
key2: 'value2'
|
||||
}])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testRestoreDeletedItem1 = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
doc.getArray('array').insert(0, ['item1', 'item2'])
|
||||
|
||||
const snap = Y.snapshot(doc)
|
||||
doc.getArray('array').delete(0)
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray('array').toArray(), ['item1', 'item2'])
|
||||
t.compare(doc.getArray('array').toArray(), ['item2'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testRestoreLeftItem = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
doc.getArray('array').insert(0, ['item1'])
|
||||
doc.getMap('map').set('test', 1)
|
||||
doc.getArray('array').insert(0, ['item0'])
|
||||
|
||||
const snap = Y.snapshot(doc)
|
||||
doc.getArray('array').delete(1)
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray('array').toArray(), ['item0', 'item1'])
|
||||
t.compare(doc.getArray('array').toArray(), ['item0'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testDeletedItemsBase = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
doc.getArray('array').insert(0, ['item1'])
|
||||
doc.getArray('array').delete(0)
|
||||
const snap = Y.snapshot(doc)
|
||||
doc.getArray('array').insert(0, ['item0'])
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray('array').toArray(), [])
|
||||
t.compare(doc.getArray('array').toArray(), ['item0'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testDeletedItems2 = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
doc.getArray('array').insert(0, ['item1', 'item2', 'item3'])
|
||||
doc.getArray('array').delete(1)
|
||||
const snap = Y.snapshot(doc)
|
||||
doc.getArray('array').insert(0, ['item0'])
|
||||
|
||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
||||
|
||||
t.compare(docRestored.getArray('array').toArray(), ['item1', 'item3'])
|
||||
t.compare(doc.getArray('array').toArray(), ['item0', 'item1', 'item3'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testDependentChanges = tc => {
|
||||
const { array0, array1, testConnector } = init(tc, { users: 2 })
|
||||
|
||||
if (!array0.doc) {
|
||||
throw new Error('no document 0')
|
||||
}
|
||||
if (!array1.doc) {
|
||||
throw new Error('no document 1')
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Y.Doc}
|
||||
*/
|
||||
const doc0 = array0.doc
|
||||
/**
|
||||
* @type {Y.Doc}
|
||||
*/
|
||||
const doc1 = array1.doc
|
||||
|
||||
doc0.gc = false
|
||||
doc1.gc = false
|
||||
|
||||
array0.insert(0, ['user1item1'])
|
||||
testConnector.syncAll()
|
||||
array1.insert(1, ['user2item1'])
|
||||
testConnector.syncAll()
|
||||
|
||||
const snap = Y.snapshot(array0.doc)
|
||||
|
||||
array0.insert(2, ['user1item2'])
|
||||
testConnector.syncAll()
|
||||
array1.insert(3, ['user2item2'])
|
||||
testConnector.syncAll()
|
||||
|
||||
const docRestored0 = Y.createDocFromSnapshot(array0.doc, snap)
|
||||
t.compare(docRestored0.getArray('array').toArray(), ['user1item1', 'user2item1'])
|
||||
|
||||
const docRestored1 = Y.createDocFromSnapshot(array1.doc, snap)
|
||||
t.compare(docRestored1.getArray('array').toArray(), ['user1item1', 'user2item1'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testContainsUpdate = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
/**
|
||||
* @type {Array<Uint8Array>}
|
||||
*/
|
||||
const updates = []
|
||||
ydoc.on('update', update => {
|
||||
updates.push(update)
|
||||
})
|
||||
const yarr = ydoc.getArray()
|
||||
const snapshot1 = Y.snapshot(ydoc)
|
||||
yarr.insert(0, [1])
|
||||
const snapshot2 = Y.snapshot(ydoc)
|
||||
yarr.delete(0, 1)
|
||||
const snapshotFinal = Y.snapshot(ydoc)
|
||||
t.assert(!Y.snapshotContainsUpdate(snapshot1, updates[0]))
|
||||
t.assert(!Y.snapshotContainsUpdate(snapshot2, updates[1]))
|
||||
t.assert(Y.snapshotContainsUpdate(snapshot2, updates[0]))
|
||||
t.assert(Y.snapshotContainsUpdate(snapshotFinal, updates[0]))
|
||||
t.assert(Y.snapshotContainsUpdate(snapshotFinal, updates[1]))
|
||||
}
|
454
tests/testHelper.js
Normal file
454
tests/testHelper.js
Normal file
@ -0,0 +1,454 @@
|
||||
import * as t from 'lib0/testing'
|
||||
import * as prng from 'lib0/prng'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as syncProtocol from 'y-protocols/sync'
|
||||
import * as object from 'lib0/object'
|
||||
import * as map from 'lib0/map'
|
||||
import * as Y from '../src/index.js'
|
||||
export * from '../src/index.js'
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
// @ts-ignore
|
||||
window.Y = Y // eslint-disable-line
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TestYInstance} y // publish message created by `y` to all other online clients
|
||||
* @param {Uint8Array} m
|
||||
*/
|
||||
const broadcastMessage = (y, m) => {
|
||||
if (y.tc.onlineConns.has(y)) {
|
||||
y.tc.onlineConns.forEach(remoteYInstance => {
|
||||
if (remoteYInstance !== y) {
|
||||
remoteYInstance._receive(m, y)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export let useV2 = false
|
||||
|
||||
export const encV1 = {
|
||||
encodeStateAsUpdate: Y.encodeStateAsUpdate,
|
||||
mergeUpdates: Y.mergeUpdates,
|
||||
applyUpdate: Y.applyUpdate,
|
||||
logUpdate: Y.logUpdate,
|
||||
updateEventName: /** @type {'update'} */ ('update'),
|
||||
diffUpdate: Y.diffUpdate
|
||||
}
|
||||
|
||||
export const encV2 = {
|
||||
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
||||
mergeUpdates: Y.mergeUpdatesV2,
|
||||
applyUpdate: Y.applyUpdateV2,
|
||||
logUpdate: Y.logUpdateV2,
|
||||
updateEventName: /** @type {'updateV2'} */ ('updateV2'),
|
||||
diffUpdate: Y.diffUpdateV2
|
||||
}
|
||||
|
||||
export let enc = encV1
|
||||
|
||||
const useV1Encoding = () => {
|
||||
useV2 = false
|
||||
enc = encV1
|
||||
}
|
||||
|
||||
const useV2Encoding = () => {
|
||||
console.error('sync protocol doesnt support v2 protocol yet, fallback to v1 encoding') // @Todo
|
||||
useV2 = false
|
||||
enc = encV1
|
||||
}
|
||||
|
||||
export class TestYInstance extends Y.Doc {
|
||||
/**
|
||||
* @param {TestConnector} testConnector
|
||||
* @param {number} clientID
|
||||
*/
|
||||
constructor (testConnector, clientID) {
|
||||
super()
|
||||
this.userID = clientID // overwriting clientID
|
||||
/**
|
||||
* @type {TestConnector}
|
||||
*/
|
||||
this.tc = testConnector
|
||||
/**
|
||||
* @type {Map<TestYInstance, Array<Uint8Array>>}
|
||||
*/
|
||||
this.receiving = new Map()
|
||||
testConnector.allConns.add(this)
|
||||
/**
|
||||
* The list of received updates.
|
||||
* We are going to merge them later using Y.mergeUpdates and check if the resulting document is correct.
|
||||
* @type {Array<Uint8Array>}
|
||||
*/
|
||||
this.updates = []
|
||||
// set up observe on local model
|
||||
this.on(enc.updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => {
|
||||
if (origin !== testConnector) {
|
||||
const encoder = encoding.createEncoder()
|
||||
syncProtocol.writeUpdate(encoder, update)
|
||||
broadcastMessage(this, encoding.toUint8Array(encoder))
|
||||
}
|
||||
this.updates.push(update)
|
||||
})
|
||||
this.connect()
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from TestConnector.
|
||||
*/
|
||||
disconnect () {
|
||||
this.receiving = new Map()
|
||||
this.tc.onlineConns.delete(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Append yourself to the list of known Y instances in testconnector.
|
||||
* Also initiate sync with all clients.
|
||||
*/
|
||||
connect () {
|
||||
if (!this.tc.onlineConns.has(this)) {
|
||||
this.tc.onlineConns.add(this)
|
||||
const encoder = encoding.createEncoder()
|
||||
syncProtocol.writeSyncStep1(encoder, this)
|
||||
// publish SyncStep1
|
||||
broadcastMessage(this, encoding.toUint8Array(encoder))
|
||||
this.tc.onlineConns.forEach(remoteYInstance => {
|
||||
if (remoteYInstance !== this) {
|
||||
// remote instance sends instance to this instance
|
||||
const encoder = encoding.createEncoder()
|
||||
syncProtocol.writeSyncStep1(encoder, remoteYInstance)
|
||||
this._receive(encoding.toUint8Array(encoder), remoteYInstance)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Receive a message from another client. This message is only appended to the list of receiving messages.
|
||||
* TestConnector decides when this client actually reads this message.
|
||||
*
|
||||
* @param {Uint8Array} message
|
||||
* @param {TestYInstance} remoteClient
|
||||
*/
|
||||
_receive (message, remoteClient) {
|
||||
map.setIfUndefined(this.receiving, remoteClient, () => /** @type {Array<Uint8Array>} */ ([])).push(message)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Keeps track of TestYInstances.
|
||||
*
|
||||
* The TestYInstances add/remove themselves from the list of connections maiained in this object.
|
||||
* I think it makes sense. Deal with it.
|
||||
*/
|
||||
export class TestConnector {
|
||||
/**
|
||||
* @param {prng.PRNG} gen
|
||||
*/
|
||||
constructor (gen) {
|
||||
/**
|
||||
* @type {Set<TestYInstance>}
|
||||
*/
|
||||
this.allConns = new Set()
|
||||
/**
|
||||
* @type {Set<TestYInstance>}
|
||||
*/
|
||||
this.onlineConns = new Set()
|
||||
/**
|
||||
* @type {prng.PRNG}
|
||||
*/
|
||||
this.prng = gen
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Y instance and add it to the list of connections
|
||||
* @param {number} clientID
|
||||
*/
|
||||
createY (clientID) {
|
||||
return new TestYInstance(this, clientID)
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose random connection and flush a random message from a random sender.
|
||||
*
|
||||
* If this function was unable to flush a message, because there are no more messages to flush, it returns false. true otherwise.
|
||||
* @return {boolean}
|
||||
*/
|
||||
flushRandomMessage () {
|
||||
const gen = this.prng
|
||||
const conns = Array.from(this.onlineConns).filter(conn => conn.receiving.size > 0)
|
||||
if (conns.length > 0) {
|
||||
const receiver = prng.oneOf(gen, conns)
|
||||
const [sender, messages] = prng.oneOf(gen, Array.from(receiver.receiving))
|
||||
const m = messages.shift()
|
||||
if (messages.length === 0) {
|
||||
receiver.receiving.delete(sender)
|
||||
}
|
||||
if (m === undefined) {
|
||||
return this.flushRandomMessage()
|
||||
}
|
||||
const encoder = encoding.createEncoder()
|
||||
// console.log('receive (' + sender.userID + '->' + receiver.userID + '):\n', syncProtocol.stringifySyncMessage(decoding.createDecoder(m), receiver))
|
||||
// do not publish data created when this function is executed (could be ss2 or update message)
|
||||
syncProtocol.readSyncMessage(decoding.createDecoder(m), encoder, receiver, receiver.tc)
|
||||
if (encoding.length(encoder) > 0) {
|
||||
// send reply message
|
||||
sender._receive(encoding.toUint8Array(encoder), receiver)
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean} True iff this function actually flushed something
|
||||
*/
|
||||
flushAllMessages () {
|
||||
let didSomething = false
|
||||
while (this.flushRandomMessage()) {
|
||||
didSomething = true
|
||||
}
|
||||
return didSomething
|
||||
}
|
||||
|
||||
reconnectAll () {
|
||||
this.allConns.forEach(conn => conn.connect())
|
||||
}
|
||||
|
||||
disconnectAll () {
|
||||
this.allConns.forEach(conn => conn.disconnect())
|
||||
}
|
||||
|
||||
syncAll () {
|
||||
this.reconnectAll()
|
||||
this.flushAllMessages()
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean} Whether it was possible to disconnect a randon connection.
|
||||
*/
|
||||
disconnectRandom () {
|
||||
if (this.onlineConns.size === 0) {
|
||||
return false
|
||||
}
|
||||
prng.oneOf(this.prng, Array.from(this.onlineConns)).disconnect()
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {boolean} Whether it was possible to reconnect a random connection.
|
||||
*/
|
||||
reconnectRandom () {
|
||||
/**
|
||||
* @type {Array<TestYInstance>}
|
||||
*/
|
||||
const reconnectable = []
|
||||
this.allConns.forEach(conn => {
|
||||
if (!this.onlineConns.has(conn)) {
|
||||
reconnectable.push(conn)
|
||||
}
|
||||
})
|
||||
if (reconnectable.length === 0) {
|
||||
return false
|
||||
}
|
||||
prng.oneOf(this.prng, reconnectable).connect()
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {t.TestCase} tc
|
||||
* @param {{users?:number}} conf
|
||||
* @param {InitTestObjectCallback<T>} [initTestObject]
|
||||
* @return {{testObjects:Array<any>,testConnector:TestConnector,users:Array<TestYInstance>,array0:Y.Array<any>,array1:Y.Array<any>,array2:Y.Array<any>,map0:Y.Map<any>,map1:Y.Map<any>,map2:Y.Map<any>,map3:Y.Map<any>,text0:Y.Text,text1:Y.Text,text2:Y.Text,xml0:Y.XmlElement,xml1:Y.XmlElement,xml2:Y.XmlElement}}
|
||||
*/
|
||||
export const init = (tc, { users = 5 } = {}, initTestObject) => {
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
const result = {
|
||||
users: []
|
||||
}
|
||||
const gen = tc.prng
|
||||
// choose an encoding approach at random
|
||||
if (prng.bool(gen)) {
|
||||
useV2Encoding()
|
||||
} else {
|
||||
useV1Encoding()
|
||||
}
|
||||
|
||||
const testConnector = new TestConnector(gen)
|
||||
result.testConnector = testConnector
|
||||
for (let i = 0; i < users; i++) {
|
||||
const y = testConnector.createY(i)
|
||||
y.clientID = i
|
||||
result.users.push(y)
|
||||
result['array' + i] = y.getArray('array')
|
||||
result['map' + i] = y.getMap('map')
|
||||
result['xml' + i] = y.get('xml', Y.XmlElement)
|
||||
result['text' + i] = y.getText('text')
|
||||
}
|
||||
testConnector.syncAll()
|
||||
result.testObjects = result.users.map(initTestObject || (() => null))
|
||||
useV1Encoding()
|
||||
return /** @type {any} */ (result)
|
||||
}
|
||||
|
||||
/**
|
||||
* 1. reconnect and flush all
|
||||
* 2. user 0 gc
|
||||
* 3. get type content
|
||||
* 4. disconnect & reconnect all (so gc is propagated)
|
||||
* 5. compare os, ds, ss
|
||||
*
|
||||
* @param {Array<TestYInstance>} users
|
||||
*/
|
||||
export const compare = users => {
|
||||
users.forEach(u => u.connect())
|
||||
while (users[0].tc.flushAllMessages()) {} // eslint-disable-line
|
||||
// For each document, merge all received document updates with Y.mergeUpdates and create a new document which will be added to the list of "users"
|
||||
// This ensures that mergeUpdates works correctly
|
||||
const mergedDocs = users.map(user => {
|
||||
const ydoc = new Y.Doc()
|
||||
enc.applyUpdate(ydoc, enc.mergeUpdates(user.updates))
|
||||
return ydoc
|
||||
})
|
||||
users.push(.../** @type {any} */(mergedDocs))
|
||||
const userArrayValues = users.map(u => u.getArray('array').toJSON())
|
||||
const userMapValues = users.map(u => u.getMap('map').toJSON())
|
||||
const userXmlValues = users.map(u => u.get('xml', Y.XmlElement).toString())
|
||||
const userTextValues = users.map(u => u.getText('text').toDelta())
|
||||
for (const u of users) {
|
||||
t.assert(u.store.pendingDs === null)
|
||||
t.assert(u.store.pendingStructs === null)
|
||||
}
|
||||
// Test Array iterator
|
||||
t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array')))
|
||||
// Test Map iterator
|
||||
const ymapkeys = Array.from(users[0].getMap('map').keys())
|
||||
t.assert(ymapkeys.length === Object.keys(userMapValues[0]).length)
|
||||
ymapkeys.forEach(key => t.assert(object.hasProperty(userMapValues[0], key)))
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
const mapRes = {}
|
||||
for (const [k, v] of users[0].getMap('map')) {
|
||||
mapRes[k] = v instanceof Y.AbstractType ? v.toJSON() : v
|
||||
}
|
||||
t.compare(userMapValues[0], mapRes)
|
||||
// Compare all users
|
||||
for (let i = 0; i < users.length - 1; i++) {
|
||||
t.compare(userArrayValues[i].length, users[i].getArray('array').length)
|
||||
t.compare(userArrayValues[i], userArrayValues[i + 1])
|
||||
t.compare(userMapValues[i], userMapValues[i + 1])
|
||||
t.compare(userXmlValues[i], userXmlValues[i + 1])
|
||||
t.compare(userTextValues[i].map(/** @param {any} a */ a => typeof a.insert === 'string' ? a.insert : ' ').join('').length, users[i].getText('text').length)
|
||||
t.compare(userTextValues[i], userTextValues[i + 1], '', (_constructor, a, b) => {
|
||||
if (a instanceof Y.AbstractType) {
|
||||
t.compare(a.toJSON(), b.toJSON())
|
||||
} else if (a !== b) {
|
||||
t.fail('Deltas dont match')
|
||||
}
|
||||
return true
|
||||
})
|
||||
t.compare(Y.encodeStateVector(users[i]), Y.encodeStateVector(users[i + 1]))
|
||||
Y.equalDeleteSets(Y.createDeleteSetFromStructStore(users[i].store), Y.createDeleteSetFromStructStore(users[i + 1].store))
|
||||
compareStructStores(users[i].store, users[i + 1].store)
|
||||
t.compare(Y.encodeSnapshot(Y.snapshot(users[i])), Y.encodeSnapshot(Y.snapshot(users[i + 1])))
|
||||
}
|
||||
users.map(u => u.destroy())
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Y.Item?} a
|
||||
* @param {Y.Item?} b
|
||||
* @return {boolean}
|
||||
*/
|
||||
export const compareItemIDs = (a, b) => a === b || (a !== null && b != null && Y.compareIDs(a.id, b.id))
|
||||
|
||||
/**
|
||||
* @param {import('../src/internals.js').StructStore} ss1
|
||||
* @param {import('../src/internals.js').StructStore} ss2
|
||||
*/
|
||||
export const compareStructStores = (ss1, ss2) => {
|
||||
t.assert(ss1.clients.size === ss2.clients.size)
|
||||
for (const [client, structs1] of ss1.clients) {
|
||||
const structs2 = /** @type {Array<Y.AbstractStruct>} */ (ss2.clients.get(client))
|
||||
t.assert(structs2 !== undefined && structs1.length === structs2.length)
|
||||
for (let i = 0; i < structs1.length; i++) {
|
||||
const s1 = structs1[i]
|
||||
const s2 = structs2[i]
|
||||
// checks for abstract struct
|
||||
if (
|
||||
s1.constructor !== s2.constructor ||
|
||||
!Y.compareIDs(s1.id, s2.id) ||
|
||||
s1.deleted !== s2.deleted ||
|
||||
// @ts-ignore
|
||||
s1.length !== s2.length
|
||||
) {
|
||||
t.fail('Structs dont match')
|
||||
}
|
||||
if (s1 instanceof Y.Item) {
|
||||
if (
|
||||
!(s2 instanceof Y.Item) ||
|
||||
!((s1.left === null && s2.left === null) || (s1.left !== null && s2.left !== null && Y.compareIDs(s1.left.lastId, s2.left.lastId))) ||
|
||||
!compareItemIDs(s1.right, s2.right) ||
|
||||
!Y.compareIDs(s1.origin, s2.origin) ||
|
||||
!Y.compareIDs(s1.rightOrigin, s2.rightOrigin) ||
|
||||
s1.parentSub !== s2.parentSub
|
||||
) {
|
||||
return t.fail('Items dont match')
|
||||
}
|
||||
// make sure that items are connected correctly
|
||||
t.assert(s1.left === null || s1.left.right === s1)
|
||||
t.assert(s1.right === null || s1.right.left === s1)
|
||||
t.assert(s2.left === null || s2.left.right === s2)
|
||||
t.assert(s2.right === null || s2.right.left === s2)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @callback InitTestObjectCallback
|
||||
* @param {TestYInstance} y
|
||||
* @return {T}
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {t.TestCase} tc
|
||||
* @param {Array<function(Y.Doc,prng.PRNG,T):void>} mods
|
||||
* @param {number} iterations
|
||||
* @param {InitTestObjectCallback<T>} [initTestObject]
|
||||
*/
|
||||
export const applyRandomTests = (tc, mods, iterations, initTestObject) => {
|
||||
const gen = tc.prng
|
||||
const result = init(tc, { users: 5 }, initTestObject)
|
||||
const { testConnector, users } = result
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
if (prng.int32(gen, 0, 100) <= 2) {
|
||||
// 2% chance to disconnect/reconnect a random user
|
||||
if (prng.bool(gen)) {
|
||||
testConnector.disconnectRandom()
|
||||
} else {
|
||||
testConnector.reconnectRandom()
|
||||
}
|
||||
} else if (prng.int32(gen, 0, 100) <= 1) {
|
||||
// 1% chance to flush all
|
||||
testConnector.flushAllMessages()
|
||||
} else if (prng.int32(gen, 0, 100) <= 50) {
|
||||
// 50% chance to flush a random message
|
||||
testConnector.flushRandomMessage()
|
||||
}
|
||||
const user = prng.int32(gen, 0, users.length - 1)
|
||||
const test = prng.oneOf(gen, mods)
|
||||
test(users[user], gen, result.testObjects[user])
|
||||
}
|
||||
compare(users)
|
||||
return result
|
||||
}
|
813
tests/undo-redo.tests.js
Normal file
813
tests/undo-redo.tests.js
Normal file
@ -0,0 +1,813 @@
|
||||
import { init } from './testHelper.js' // eslint-disable-line
|
||||
|
||||
import * as Y from '../src/index.js'
|
||||
import * as t from 'lib0/testing'
|
||||
|
||||
export const testInconsistentFormat = () => {
|
||||
/**
|
||||
* @param {Y.Doc} ydoc
|
||||
*/
|
||||
const testYjsMerge = ydoc => {
|
||||
const content = /** @type {Y.XmlText} */ (ydoc.get('text', Y.XmlText))
|
||||
content.format(0, 6, { bold: null })
|
||||
content.format(6, 4, { type: 'text' })
|
||||
t.compare(content.toDelta(), [
|
||||
{
|
||||
attributes: { type: 'text' },
|
||||
insert: 'Merge Test'
|
||||
},
|
||||
{
|
||||
attributes: { type: 'text', italic: true },
|
||||
insert: ' After'
|
||||
}
|
||||
])
|
||||
}
|
||||
const initializeYDoc = () => {
|
||||
const yDoc = new Y.Doc({ gc: false })
|
||||
|
||||
const content = /** @type {Y.XmlText} */ (yDoc.get('text', Y.XmlText))
|
||||
content.insert(0, ' After', { type: 'text', italic: true })
|
||||
content.insert(0, 'Test', { type: 'text' })
|
||||
content.insert(0, 'Merge ', { type: 'text', bold: true })
|
||||
return yDoc
|
||||
}
|
||||
{
|
||||
const yDoc = initializeYDoc()
|
||||
testYjsMerge(yDoc)
|
||||
}
|
||||
{
|
||||
const initialYDoc = initializeYDoc()
|
||||
const yDoc = new Y.Doc({ gc: false })
|
||||
Y.applyUpdate(yDoc, Y.encodeStateAsUpdate(initialYDoc))
|
||||
testYjsMerge(yDoc)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInfiniteCaptureTimeout = tc => {
|
||||
const { array0 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(array0, { captureTimeout: Number.MAX_VALUE })
|
||||
array0.push([1, 2, 3])
|
||||
undoManager.stopCapturing()
|
||||
array0.push([4, 5, 6])
|
||||
undoManager.undo()
|
||||
t.compare(array0.toArray(), [1, 2, 3])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoText = tc => {
|
||||
const { testConnector, text0, text1 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(text0)
|
||||
|
||||
// items that are added & deleted in the same transaction won't be undo
|
||||
text0.insert(0, 'test')
|
||||
text0.delete(0, 4)
|
||||
undoManager.undo()
|
||||
t.assert(text0.toString() === '')
|
||||
|
||||
// follow redone items
|
||||
text0.insert(0, 'a')
|
||||
undoManager.stopCapturing()
|
||||
text0.delete(0, 1)
|
||||
undoManager.stopCapturing()
|
||||
undoManager.undo()
|
||||
t.assert(text0.toString() === 'a')
|
||||
undoManager.undo()
|
||||
t.assert(text0.toString() === '')
|
||||
|
||||
text0.insert(0, 'abc')
|
||||
text1.insert(0, 'xyz')
|
||||
testConnector.syncAll()
|
||||
undoManager.undo()
|
||||
t.assert(text0.toString() === 'xyz')
|
||||
undoManager.redo()
|
||||
t.assert(text0.toString() === 'abcxyz')
|
||||
testConnector.syncAll()
|
||||
text1.delete(0, 1)
|
||||
testConnector.syncAll()
|
||||
undoManager.undo()
|
||||
t.assert(text0.toString() === 'xyz')
|
||||
undoManager.redo()
|
||||
t.assert(text0.toString() === 'bcxyz')
|
||||
// test marks
|
||||
text0.format(1, 3, { bold: true })
|
||||
t.compare(text0.toDelta(), [{ insert: 'b' }, { insert: 'cxy', attributes: { bold: true } }, { insert: 'z' }])
|
||||
undoManager.undo()
|
||||
t.compare(text0.toDelta(), [{ insert: 'bcxyz' }])
|
||||
undoManager.redo()
|
||||
t.compare(text0.toDelta(), [{ insert: 'b' }, { insert: 'cxy', attributes: { bold: true } }, { insert: 'z' }])
|
||||
}
|
||||
|
||||
/**
|
||||
* Test case to fix #241
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testEmptyTypeScope = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const um = new Y.UndoManager([], { doc: ydoc })
|
||||
const yarray = ydoc.getArray()
|
||||
um.addToScope(yarray)
|
||||
yarray.insert(0, [1])
|
||||
um.undo()
|
||||
t.assert(yarray.length === 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testRejectUpdateExample = _tc => {
|
||||
const tmpydoc1 = new Y.Doc()
|
||||
tmpydoc1.getArray('restricted').insert(0, [1])
|
||||
tmpydoc1.getArray('public').insert(0, [1])
|
||||
const update1 = Y.encodeStateAsUpdate(tmpydoc1)
|
||||
const tmpydoc2 = new Y.Doc()
|
||||
tmpydoc2.getArray('public').insert(0, [2])
|
||||
const update2 = Y.encodeStateAsUpdate(tmpydoc2)
|
||||
|
||||
const ydoc = new Y.Doc()
|
||||
const restrictedType = ydoc.getArray('restricted')
|
||||
|
||||
/**
|
||||
* Assume this function handles incoming updates via a communication channel like websockets.
|
||||
* Changes to the `ydoc.getMap('restricted')` type should be rejected.
|
||||
*
|
||||
* - set up undo manager on the restricted types
|
||||
* - cache pending* updates from the Ydoc to avoid certain attacks
|
||||
* - apply received update and check whether the restricted type (or any of its children) has been changed.
|
||||
* - catch errors that might try to circumvent the restrictions
|
||||
* - undo changes on restricted types
|
||||
* - reapply pending* updates
|
||||
*
|
||||
* @param {Uint8Array} update
|
||||
*/
|
||||
const updateHandler = (update) => {
|
||||
// don't handle changes of the local undo manager, which is used to undo invalid changes
|
||||
const um = new Y.UndoManager(restrictedType, { trackedOrigins: new Set(['remote change']) })
|
||||
const beforePendingDs = ydoc.store.pendingDs
|
||||
const beforePendingStructs = ydoc.store.pendingStructs?.update
|
||||
try {
|
||||
Y.applyUpdate(ydoc, update, 'remote change')
|
||||
} finally {
|
||||
while (um.undoStack.length) {
|
||||
um.undo()
|
||||
}
|
||||
um.destroy()
|
||||
ydoc.store.pendingDs = beforePendingDs
|
||||
ydoc.store.pendingStructs = null
|
||||
if (beforePendingStructs) {
|
||||
Y.applyUpdateV2(ydoc, beforePendingStructs)
|
||||
}
|
||||
}
|
||||
}
|
||||
updateHandler(update1)
|
||||
updateHandler(update2)
|
||||
t.assert(restrictedType.length === 0)
|
||||
t.assert(ydoc.getArray('public').length === 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Test case to fix #241
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testGlobalScope = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const um = new Y.UndoManager(ydoc)
|
||||
const yarray = ydoc.getArray()
|
||||
yarray.insert(0, [1])
|
||||
um.undo()
|
||||
t.assert(yarray.length === 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* Test case to fix #241
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testDoubleUndo = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const text = doc.getText()
|
||||
text.insert(0, '1221')
|
||||
|
||||
const manager = new Y.UndoManager(text)
|
||||
|
||||
text.insert(2, '3')
|
||||
text.insert(3, '3')
|
||||
|
||||
manager.undo()
|
||||
manager.undo()
|
||||
|
||||
text.insert(2, '3')
|
||||
|
||||
t.compareStrings(text.toString(), '12321')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoMap = tc => {
|
||||
const { testConnector, map0, map1 } = init(tc, { users: 2 })
|
||||
map0.set('a', 0)
|
||||
const undoManager = new Y.UndoManager(map0)
|
||||
map0.set('a', 1)
|
||||
undoManager.undo()
|
||||
t.assert(map0.get('a') === 0)
|
||||
undoManager.redo()
|
||||
t.assert(map0.get('a') === 1)
|
||||
// testing sub-types and if it can restore a whole type
|
||||
const subType = new Y.Map()
|
||||
map0.set('a', subType)
|
||||
subType.set('x', 42)
|
||||
t.compare(map0.toJSON(), /** @type {any} */ ({ a: { x: 42 } }))
|
||||
undoManager.undo()
|
||||
t.assert(map0.get('a') === 1)
|
||||
undoManager.redo()
|
||||
t.compare(map0.toJSON(), /** @type {any} */ ({ a: { x: 42 } }))
|
||||
testConnector.syncAll()
|
||||
// if content is overwritten by another user, undo operations should be skipped
|
||||
map1.set('a', 44)
|
||||
testConnector.syncAll()
|
||||
undoManager.undo()
|
||||
t.assert(map0.get('a') === 44)
|
||||
undoManager.redo()
|
||||
t.assert(map0.get('a') === 44)
|
||||
|
||||
// test setting value multiple times
|
||||
map0.set('b', 'initial')
|
||||
undoManager.stopCapturing()
|
||||
map0.set('b', 'val1')
|
||||
map0.set('b', 'val2')
|
||||
undoManager.stopCapturing()
|
||||
undoManager.undo()
|
||||
t.assert(map0.get('b') === 'initial')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoArray = tc => {
|
||||
const { testConnector, array0, array1 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(array0)
|
||||
array0.insert(0, [1, 2, 3])
|
||||
array1.insert(0, [4, 5, 6])
|
||||
testConnector.syncAll()
|
||||
t.compare(array0.toArray(), [1, 2, 3, 4, 5, 6])
|
||||
undoManager.undo()
|
||||
t.compare(array0.toArray(), [4, 5, 6])
|
||||
undoManager.redo()
|
||||
t.compare(array0.toArray(), [1, 2, 3, 4, 5, 6])
|
||||
testConnector.syncAll()
|
||||
array1.delete(0, 1) // user1 deletes [1]
|
||||
testConnector.syncAll()
|
||||
undoManager.undo()
|
||||
t.compare(array0.toArray(), [4, 5, 6])
|
||||
undoManager.redo()
|
||||
t.compare(array0.toArray(), [2, 3, 4, 5, 6])
|
||||
array0.delete(0, 5)
|
||||
// test nested structure
|
||||
const ymap = new Y.Map()
|
||||
array0.insert(0, [ymap])
|
||||
t.compare(array0.toJSON(), [{}])
|
||||
undoManager.stopCapturing()
|
||||
ymap.set('a', 1)
|
||||
t.compare(array0.toJSON(), [{ a: 1 }])
|
||||
undoManager.undo()
|
||||
t.compare(array0.toJSON(), [{}])
|
||||
undoManager.undo()
|
||||
t.compare(array0.toJSON(), [2, 3, 4, 5, 6])
|
||||
undoManager.redo()
|
||||
t.compare(array0.toJSON(), [{}])
|
||||
undoManager.redo()
|
||||
t.compare(array0.toJSON(), [{ a: 1 }])
|
||||
testConnector.syncAll()
|
||||
array1.get(0).set('b', 2)
|
||||
testConnector.syncAll()
|
||||
t.compare(array0.toJSON(), [{ a: 1, b: 2 }])
|
||||
undoManager.undo()
|
||||
t.compare(array0.toJSON(), [{ b: 2 }])
|
||||
undoManager.undo()
|
||||
t.compare(array0.toJSON(), [2, 3, 4, 5, 6])
|
||||
undoManager.redo()
|
||||
t.compare(array0.toJSON(), [{ b: 2 }])
|
||||
undoManager.redo()
|
||||
t.compare(array0.toJSON(), [{ a: 1, b: 2 }])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoXml = tc => {
|
||||
const { xml0 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(xml0)
|
||||
const child = new Y.XmlElement('p')
|
||||
xml0.insert(0, [child])
|
||||
const textchild = new Y.XmlText('content')
|
||||
child.insert(0, [textchild])
|
||||
t.assert(xml0.toString() === '<undefined><p>content</p></undefined>')
|
||||
// format textchild and revert that change
|
||||
undoManager.stopCapturing()
|
||||
textchild.format(3, 4, { bold: {} })
|
||||
t.assert(xml0.toString() === '<undefined><p>con<bold>tent</bold></p></undefined>')
|
||||
undoManager.undo()
|
||||
t.assert(xml0.toString() === '<undefined><p>content</p></undefined>')
|
||||
undoManager.redo()
|
||||
t.assert(xml0.toString() === '<undefined><p>con<bold>tent</bold></p></undefined>')
|
||||
xml0.delete(0, 1)
|
||||
t.assert(xml0.toString() === '<undefined></undefined>')
|
||||
undoManager.undo()
|
||||
t.assert(xml0.toString() === '<undefined><p>con<bold>tent</bold></p></undefined>')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoEvents = tc => {
|
||||
const { text0 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(text0)
|
||||
let counter = 0
|
||||
let receivedMetadata = -1
|
||||
undoManager.on('stack-item-added', /** @param {any} event */ event => {
|
||||
t.assert(event.type != null)
|
||||
t.assert(event.changedParentTypes != null && event.changedParentTypes.has(text0))
|
||||
event.stackItem.meta.set('test', counter++)
|
||||
})
|
||||
undoManager.on('stack-item-popped', /** @param {any} event */ event => {
|
||||
t.assert(event.type != null)
|
||||
t.assert(event.changedParentTypes != null && event.changedParentTypes.has(text0))
|
||||
receivedMetadata = event.stackItem.meta.get('test')
|
||||
})
|
||||
text0.insert(0, 'abc')
|
||||
undoManager.undo()
|
||||
t.assert(receivedMetadata === 0)
|
||||
undoManager.redo()
|
||||
t.assert(receivedMetadata === 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testTrackClass = tc => {
|
||||
const { users, text0 } = init(tc, { users: 3 })
|
||||
// only track origins that are numbers
|
||||
const undoManager = new Y.UndoManager(text0, { trackedOrigins: new Set([Number]) })
|
||||
users[0].transact(() => {
|
||||
text0.insert(0, 'abc')
|
||||
}, 42)
|
||||
t.assert(text0.toString() === 'abc')
|
||||
undoManager.undo()
|
||||
t.assert(text0.toString() === '')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testTypeScope = tc => {
|
||||
const { array0 } = init(tc, { users: 3 })
|
||||
// only track origins that are numbers
|
||||
const text0 = new Y.Text()
|
||||
const text1 = new Y.Text()
|
||||
array0.insert(0, [text0, text1])
|
||||
const undoManager = new Y.UndoManager(text0)
|
||||
const undoManagerBoth = new Y.UndoManager([text0, text1])
|
||||
text1.insert(0, 'abc')
|
||||
t.assert(undoManager.undoStack.length === 0)
|
||||
t.assert(undoManagerBoth.undoStack.length === 1)
|
||||
t.assert(text1.toString() === 'abc')
|
||||
undoManager.undo()
|
||||
t.assert(text1.toString() === 'abc')
|
||||
undoManagerBoth.undo()
|
||||
t.assert(text1.toString() === '')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoInEmbed = tc => {
|
||||
const { text0 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(text0)
|
||||
const nestedText = new Y.Text('initial text')
|
||||
undoManager.stopCapturing()
|
||||
text0.insertEmbed(0, nestedText, { bold: true })
|
||||
t.assert(nestedText.toString() === 'initial text')
|
||||
undoManager.stopCapturing()
|
||||
nestedText.delete(0, nestedText.length)
|
||||
nestedText.insert(0, 'other text')
|
||||
t.assert(nestedText.toString() === 'other text')
|
||||
undoManager.undo()
|
||||
t.assert(nestedText.toString() === 'initial text')
|
||||
undoManager.undo()
|
||||
t.assert(text0.length === 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoDeleteFilter = tc => {
|
||||
/**
|
||||
* @type {Y.Array<any>}
|
||||
*/
|
||||
const array0 = /** @type {any} */ (init(tc, { users: 3 }).array0)
|
||||
const undoManager = new Y.UndoManager(array0, { deleteFilter: item => !(item instanceof Y.Item) || (item.content instanceof Y.ContentType && item.content.type._map.size === 0) })
|
||||
const map0 = new Y.Map()
|
||||
map0.set('hi', 1)
|
||||
const map1 = new Y.Map()
|
||||
array0.insert(0, [map0, map1])
|
||||
undoManager.undo()
|
||||
t.assert(array0.length === 1)
|
||||
array0.get(0)
|
||||
t.assert(Array.from(array0.get(0).keys()).length === 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported in https://discuss.yjs.dev/t/undomanager-with-external-updates/454/6
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testUndoUntilChangePerformed = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const doc2 = new Y.Doc()
|
||||
doc.on('update', update => Y.applyUpdate(doc2, update))
|
||||
doc2.on('update', update => Y.applyUpdate(doc, update))
|
||||
|
||||
const yArray = doc.getArray('array')
|
||||
const yArray2 = doc2.getArray('array')
|
||||
const yMap = new Y.Map()
|
||||
yMap.set('hello', 'world')
|
||||
yArray.push([yMap])
|
||||
const yMap2 = new Y.Map()
|
||||
yMap2.set('key', 'value')
|
||||
yArray.push([yMap2])
|
||||
|
||||
const undoManager = new Y.UndoManager([yArray], { trackedOrigins: new Set([doc.clientID]) })
|
||||
const undoManager2 = new Y.UndoManager([doc2.get('array')], { trackedOrigins: new Set([doc2.clientID]) })
|
||||
|
||||
Y.transact(doc, () => yMap2.set('key', 'value modified'), doc.clientID)
|
||||
undoManager.stopCapturing()
|
||||
Y.transact(doc, () => yMap.set('hello', 'world modified'), doc.clientID)
|
||||
Y.transact(doc2, () => yArray2.delete(0), doc2.clientID)
|
||||
undoManager2.undo()
|
||||
undoManager.undo()
|
||||
t.compareStrings(yMap2.get('key'), 'value')
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported in https://github.com/yjs/yjs/issues/317
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testUndoNestedUndoIssue = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
const design = doc.getMap()
|
||||
const undoManager = new Y.UndoManager(design, { captureTimeout: 0 })
|
||||
|
||||
/**
|
||||
* @type {Y.Map<any>}
|
||||
*/
|
||||
const text = new Y.Map()
|
||||
|
||||
const blocks1 = new Y.Array()
|
||||
const blocks1block = new Y.Map()
|
||||
|
||||
doc.transact(() => {
|
||||
blocks1block.set('text', 'Type Something')
|
||||
blocks1.push([blocks1block])
|
||||
text.set('blocks', blocks1block)
|
||||
design.set('text', text)
|
||||
})
|
||||
|
||||
const blocks2 = new Y.Array()
|
||||
const blocks2block = new Y.Map()
|
||||
doc.transact(() => {
|
||||
blocks2block.set('text', 'Something')
|
||||
blocks2.push([blocks2block])
|
||||
text.set('blocks', blocks2block)
|
||||
})
|
||||
|
||||
const blocks3 = new Y.Array()
|
||||
const blocks3block = new Y.Map()
|
||||
doc.transact(() => {
|
||||
blocks3block.set('text', 'Something Else')
|
||||
blocks3.push([blocks3block])
|
||||
text.set('blocks', blocks3block)
|
||||
})
|
||||
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something Else' } } })
|
||||
undoManager.undo()
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something' } } })
|
||||
undoManager.undo()
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Type Something' } } })
|
||||
undoManager.undo()
|
||||
t.compare(design.toJSON(), { })
|
||||
undoManager.redo()
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Type Something' } } })
|
||||
undoManager.redo()
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something' } } })
|
||||
undoManager.redo()
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something Else' } } })
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported in https://github.com/yjs/yjs/issues/355
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testConsecutiveRedoBug = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const yRoot = doc.getMap()
|
||||
const undoMgr = new Y.UndoManager(yRoot)
|
||||
|
||||
let yPoint = new Y.Map()
|
||||
yPoint.set('x', 0)
|
||||
yPoint.set('y', 0)
|
||||
yRoot.set('a', yPoint)
|
||||
undoMgr.stopCapturing()
|
||||
|
||||
yPoint.set('x', 100)
|
||||
yPoint.set('y', 100)
|
||||
undoMgr.stopCapturing()
|
||||
|
||||
yPoint.set('x', 200)
|
||||
yPoint.set('y', 200)
|
||||
undoMgr.stopCapturing()
|
||||
|
||||
yPoint.set('x', 300)
|
||||
yPoint.set('y', 300)
|
||||
undoMgr.stopCapturing()
|
||||
|
||||
t.compare(yPoint.toJSON(), { x: 300, y: 300 })
|
||||
|
||||
undoMgr.undo() // x=200, y=200
|
||||
t.compare(yPoint.toJSON(), { x: 200, y: 200 })
|
||||
undoMgr.undo() // x=100, y=100
|
||||
t.compare(yPoint.toJSON(), { x: 100, y: 100 })
|
||||
undoMgr.undo() // x=0, y=0
|
||||
t.compare(yPoint.toJSON(), { x: 0, y: 0 })
|
||||
undoMgr.undo() // nil
|
||||
t.compare(yRoot.get('a'), undefined)
|
||||
|
||||
undoMgr.redo() // x=0, y=0
|
||||
yPoint = yRoot.get('a')
|
||||
|
||||
t.compare(yPoint.toJSON(), { x: 0, y: 0 })
|
||||
undoMgr.redo() // x=100, y=100
|
||||
t.compare(yPoint.toJSON(), { x: 100, y: 100 })
|
||||
undoMgr.redo() // x=200, y=200
|
||||
t.compare(yPoint.toJSON(), { x: 200, y: 200 })
|
||||
undoMgr.redo() // expected x=300, y=300, actually nil
|
||||
t.compare(yPoint.toJSON(), { x: 300, y: 300 })
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported in https://github.com/yjs/yjs/issues/304
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testUndoXmlBug = _tc => {
|
||||
const origin = 'origin'
|
||||
const doc = new Y.Doc()
|
||||
const fragment = doc.getXmlFragment('t')
|
||||
const undoManager = new Y.UndoManager(fragment, {
|
||||
captureTimeout: 0,
|
||||
trackedOrigins: new Set([origin])
|
||||
})
|
||||
|
||||
// create element
|
||||
doc.transact(() => {
|
||||
const e = new Y.XmlElement('test-node')
|
||||
e.setAttribute('a', '100')
|
||||
e.setAttribute('b', '0')
|
||||
fragment.insert(fragment.length, [e])
|
||||
}, origin)
|
||||
|
||||
// change one attribute
|
||||
doc.transact(() => {
|
||||
const e = fragment.get(0)
|
||||
e.setAttribute('a', '200')
|
||||
}, origin)
|
||||
|
||||
// change both attributes
|
||||
doc.transact(() => {
|
||||
const e = fragment.get(0)
|
||||
e.setAttribute('a', '180')
|
||||
e.setAttribute('b', '50')
|
||||
}, origin)
|
||||
|
||||
undoManager.undo()
|
||||
undoManager.undo()
|
||||
undoManager.undo()
|
||||
|
||||
undoManager.redo()
|
||||
undoManager.redo()
|
||||
undoManager.redo()
|
||||
t.compare(fragment.toString(), '<test-node a="180" b="50"></test-node>')
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported in https://github.com/yjs/yjs/issues/343
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testUndoBlockBug = _tc => {
|
||||
const doc = new Y.Doc({ gc: false })
|
||||
const design = doc.getMap()
|
||||
|
||||
const undoManager = new Y.UndoManager(design, { captureTimeout: 0 })
|
||||
|
||||
const text = new Y.Map()
|
||||
|
||||
const blocks1 = new Y.Array()
|
||||
const blocks1block = new Y.Map()
|
||||
doc.transact(() => {
|
||||
blocks1block.set('text', '1')
|
||||
blocks1.push([blocks1block])
|
||||
|
||||
text.set('blocks', blocks1block)
|
||||
design.set('text', text)
|
||||
})
|
||||
|
||||
const blocks2 = new Y.Array()
|
||||
const blocks2block = new Y.Map()
|
||||
doc.transact(() => {
|
||||
blocks2block.set('text', '2')
|
||||
blocks2.push([blocks2block])
|
||||
text.set('blocks', blocks2block)
|
||||
})
|
||||
|
||||
const blocks3 = new Y.Array()
|
||||
const blocks3block = new Y.Map()
|
||||
doc.transact(() => {
|
||||
blocks3block.set('text', '3')
|
||||
blocks3.push([blocks3block])
|
||||
text.set('blocks', blocks3block)
|
||||
})
|
||||
|
||||
const blocks4 = new Y.Array()
|
||||
const blocks4block = new Y.Map()
|
||||
doc.transact(() => {
|
||||
blocks4block.set('text', '4')
|
||||
blocks4.push([blocks4block])
|
||||
text.set('blocks', blocks4block)
|
||||
})
|
||||
|
||||
// {"text":{"blocks":{"text":"4"}}}
|
||||
undoManager.undo() // {"text":{"blocks":{"3"}}}
|
||||
undoManager.undo() // {"text":{"blocks":{"text":"2"}}}
|
||||
undoManager.undo() // {"text":{"blocks":{"text":"1"}}}
|
||||
undoManager.undo() // {}
|
||||
undoManager.redo() // {"text":{"blocks":{"text":"1"}}}
|
||||
undoManager.redo() // {"text":{"blocks":{"text":"2"}}}
|
||||
undoManager.redo() // {"text":{"blocks":{"text":"3"}}}
|
||||
undoManager.redo() // {"text":{}}
|
||||
t.compare(design.toJSON(), { text: { blocks: { text: '4' } } })
|
||||
}
|
||||
|
||||
/**
|
||||
* Undo text formatting delete should not corrupt peer state.
|
||||
*
|
||||
* @see https://github.com/yjs/yjs/issues/392
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testUndoDeleteTextFormat = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const text = doc.getText()
|
||||
text.insert(0, 'Attack ships on fire off the shoulder of Orion.')
|
||||
const doc2 = new Y.Doc()
|
||||
const text2 = doc2.getText()
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
||||
const undoManager = new Y.UndoManager(text)
|
||||
|
||||
text.format(13, 7, { bold: true })
|
||||
undoManager.stopCapturing()
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
||||
|
||||
text.format(16, 4, { bold: null })
|
||||
undoManager.stopCapturing()
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
||||
|
||||
undoManager.undo()
|
||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
||||
|
||||
const expect = [
|
||||
{ insert: 'Attack ships ' },
|
||||
{
|
||||
insert: 'on fire',
|
||||
attributes: { bold: true }
|
||||
},
|
||||
{ insert: ' off the shoulder of Orion.' }
|
||||
]
|
||||
t.compare(text.toDelta(), expect)
|
||||
t.compare(text2.toDelta(), expect)
|
||||
}
|
||||
|
||||
/**
|
||||
* Undo text formatting delete should not corrupt peer state.
|
||||
*
|
||||
* @see https://github.com/yjs/yjs/issues/392
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testBehaviorOfIgnoreremotemapchangesProperty = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const doc2 = new Y.Doc()
|
||||
doc.on('update', update => Y.applyUpdate(doc2, update, doc))
|
||||
doc2.on('update', update => Y.applyUpdate(doc, update, doc2))
|
||||
const map1 = doc.getMap()
|
||||
const map2 = doc2.getMap()
|
||||
const um1 = new Y.UndoManager(map1, { ignoreRemoteMapChanges: true })
|
||||
map1.set('x', 1)
|
||||
map2.set('x', 2)
|
||||
map1.set('x', 3)
|
||||
map2.set('x', 4)
|
||||
um1.undo()
|
||||
t.assert(map1.get('x') === 2)
|
||||
t.assert(map2.get('x') === 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Special deletion case.
|
||||
*
|
||||
* @see https://github.com/yjs/yjs/issues/447
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSpecialDeletionCase = _tc => {
|
||||
const origin = 'undoable'
|
||||
const doc = new Y.Doc()
|
||||
const fragment = doc.getXmlFragment()
|
||||
const undoManager = new Y.UndoManager(fragment, { trackedOrigins: new Set([origin]) })
|
||||
doc.transact(() => {
|
||||
const e = new Y.XmlElement('test')
|
||||
e.setAttribute('a', '1')
|
||||
e.setAttribute('b', '2')
|
||||
fragment.insert(0, [e])
|
||||
})
|
||||
t.compareStrings(fragment.toString(), '<test a="1" b="2"></test>')
|
||||
doc.transact(() => {
|
||||
// change attribute "b" and delete test-node
|
||||
const e = fragment.get(0)
|
||||
e.setAttribute('b', '3')
|
||||
fragment.delete(0)
|
||||
}, origin)
|
||||
t.compareStrings(fragment.toString(), '')
|
||||
undoManager.undo()
|
||||
t.compareStrings(fragment.toString(), '<test a="1" b="2"></test>')
|
||||
}
|
||||
|
||||
/**
|
||||
* Deleted entries in a map should be restored on undo.
|
||||
*
|
||||
* @see https://github.com/yjs/yjs/issues/500
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testUndoDeleteInMap = (tc) => {
|
||||
const { map0 } = init(tc, { users: 3 })
|
||||
const undoManager = new Y.UndoManager(map0, { captureTimeout: 0 })
|
||||
map0.set('a', 'a')
|
||||
map0.delete('a')
|
||||
map0.set('a', 'b')
|
||||
map0.delete('a')
|
||||
map0.set('a', 'c')
|
||||
map0.delete('a')
|
||||
map0.set('a', 'd')
|
||||
t.compare(map0.toJSON(), { a: 'd' })
|
||||
undoManager.undo()
|
||||
t.compare(map0.toJSON(), {})
|
||||
undoManager.undo()
|
||||
t.compare(map0.toJSON(), { a: 'c' })
|
||||
undoManager.undo()
|
||||
t.compare(map0.toJSON(), {})
|
||||
undoManager.undo()
|
||||
t.compare(map0.toJSON(), { a: 'b' })
|
||||
undoManager.undo()
|
||||
t.compare(map0.toJSON(), {})
|
||||
undoManager.undo()
|
||||
t.compare(map0.toJSON(), { a: 'a' })
|
||||
}
|
||||
|
||||
/**
|
||||
* It should expose the StackItem being processed if undoing
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testUndoDoingStackItem = async (_tc) => {
|
||||
const doc = new Y.Doc()
|
||||
const text = doc.getText('text')
|
||||
const undoManager = new Y.UndoManager([text])
|
||||
undoManager.on('stack-item-added', /** @param {any} event */ event => {
|
||||
event.stackItem.meta.set('str', '42')
|
||||
})
|
||||
let metaUndo = /** @type {any} */ (null)
|
||||
let metaRedo = /** @type {any} */ (null)
|
||||
text.observe((event) => {
|
||||
const /** @type {Y.UndoManager} */ origin = event.transaction.origin
|
||||
if (origin === undoManager && origin.undoing) {
|
||||
metaUndo = origin.currStackItem?.meta.get('str')
|
||||
} else if (origin === undoManager && origin.redoing) {
|
||||
metaRedo = origin.currStackItem?.meta.get('str')
|
||||
}
|
||||
})
|
||||
text.insert(0, 'abc')
|
||||
undoManager.undo()
|
||||
undoManager.redo()
|
||||
t.compare(metaUndo, '42', 'currStackItem is accessible while undoing')
|
||||
t.compare(metaRedo, '42', 'currStackItem is accessible while redoing')
|
||||
t.compare(undoManager.currStackItem, null, 'currStackItem is null after observe/transaction')
|
||||
}
|
357
tests/updates.tests.js
Normal file
357
tests/updates.tests.js
Normal file
@ -0,0 +1,357 @@
|
||||
import * as t from 'lib0/testing'
|
||||
import { init, compare } from './testHelper.js' // eslint-disable-line
|
||||
import * as Y from '../src/index.js'
|
||||
import { readClientsStructRefs, readDeleteSet, UpdateDecoderV2, UpdateEncoderV2, writeDeleteSet } from '../src/internals.js'
|
||||
import * as encoding from 'lib0/encoding'
|
||||
import * as decoding from 'lib0/decoding'
|
||||
import * as object from 'lib0/object'
|
||||
|
||||
/**
|
||||
* @typedef {Object} Enc
|
||||
* @property {function(Array<Uint8Array>):Uint8Array} Enc.mergeUpdates
|
||||
* @property {function(Y.Doc):Uint8Array} Enc.encodeStateAsUpdate
|
||||
* @property {function(Y.Doc, Uint8Array):void} Enc.applyUpdate
|
||||
* @property {function(Uint8Array):void} Enc.logUpdate
|
||||
* @property {function(Uint8Array):{from:Map<number,number>,to:Map<number,number>}} Enc.parseUpdateMeta
|
||||
* @property {function(Y.Doc):Uint8Array} Enc.encodeStateVector
|
||||
* @property {function(Uint8Array):Uint8Array} Enc.encodeStateVectorFromUpdate
|
||||
* @property {'update'|'updateV2'} Enc.updateEventName
|
||||
* @property {string} Enc.description
|
||||
* @property {function(Uint8Array, Uint8Array):Uint8Array} Enc.diffUpdate
|
||||
*/
|
||||
|
||||
/**
|
||||
* @type {Enc}
|
||||
*/
|
||||
const encV1 = {
|
||||
mergeUpdates: Y.mergeUpdates,
|
||||
encodeStateAsUpdate: Y.encodeStateAsUpdate,
|
||||
applyUpdate: Y.applyUpdate,
|
||||
logUpdate: Y.logUpdate,
|
||||
parseUpdateMeta: Y.parseUpdateMeta,
|
||||
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdate,
|
||||
encodeStateVector: Y.encodeStateVector,
|
||||
updateEventName: 'update',
|
||||
description: 'V1',
|
||||
diffUpdate: Y.diffUpdate
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Enc}
|
||||
*/
|
||||
const encV2 = {
|
||||
mergeUpdates: Y.mergeUpdatesV2,
|
||||
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
||||
applyUpdate: Y.applyUpdateV2,
|
||||
logUpdate: Y.logUpdateV2,
|
||||
parseUpdateMeta: Y.parseUpdateMetaV2,
|
||||
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2,
|
||||
encodeStateVector: Y.encodeStateVector,
|
||||
updateEventName: 'updateV2',
|
||||
description: 'V2',
|
||||
diffUpdate: Y.diffUpdateV2
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Enc}
|
||||
*/
|
||||
const encDoc = {
|
||||
mergeUpdates: (updates) => {
|
||||
const ydoc = new Y.Doc({ gc: false })
|
||||
updates.forEach(update => {
|
||||
Y.applyUpdateV2(ydoc, update)
|
||||
})
|
||||
return Y.encodeStateAsUpdateV2(ydoc)
|
||||
},
|
||||
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
||||
applyUpdate: Y.applyUpdateV2,
|
||||
logUpdate: Y.logUpdateV2,
|
||||
parseUpdateMeta: Y.parseUpdateMetaV2,
|
||||
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2,
|
||||
encodeStateVector: Y.encodeStateVector,
|
||||
updateEventName: 'updateV2',
|
||||
description: 'Merge via Y.Doc',
|
||||
/**
|
||||
* @param {Uint8Array} update
|
||||
* @param {Uint8Array} sv
|
||||
*/
|
||||
diffUpdate: (update, sv) => {
|
||||
const ydoc = new Y.Doc({ gc: false })
|
||||
Y.applyUpdateV2(ydoc, update)
|
||||
return Y.encodeStateAsUpdateV2(ydoc, sv)
|
||||
}
|
||||
}
|
||||
|
||||
const encoders = [encV1, encV2, encDoc]
|
||||
|
||||
/**
|
||||
* @param {Array<Y.Doc>} users
|
||||
* @param {Enc} enc
|
||||
*/
|
||||
const fromUpdates = (users, enc) => {
|
||||
const updates = users.map(user =>
|
||||
enc.encodeStateAsUpdate(user)
|
||||
)
|
||||
const ydoc = new Y.Doc()
|
||||
enc.applyUpdate(ydoc, enc.mergeUpdates(updates))
|
||||
return ydoc
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testMergeUpdates = tc => {
|
||||
const { users, array0, array1 } = init(tc, { users: 3 })
|
||||
|
||||
array0.insert(0, [1])
|
||||
array1.insert(0, [2])
|
||||
|
||||
compare(users)
|
||||
encoders.forEach(enc => {
|
||||
const merged = fromUpdates(users, enc)
|
||||
t.compareArrays(array0.toArray(), merged.getArray('array').toArray())
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testKeyEncoding = tc => {
|
||||
const { users, text0, text1 } = init(tc, { users: 2 })
|
||||
|
||||
text0.insert(0, 'a', { italic: true })
|
||||
text0.insert(0, 'b')
|
||||
text0.insert(0, 'c', { italic: true })
|
||||
|
||||
const update = Y.encodeStateAsUpdateV2(users[0])
|
||||
Y.applyUpdateV2(users[1], update)
|
||||
|
||||
t.compare(text1.toDelta(), [{ insert: 'c', attributes: { italic: true } }, { insert: 'b' }, { insert: 'a', attributes: { italic: true } }])
|
||||
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Y.Doc} ydoc
|
||||
* @param {Array<Uint8Array>} updates - expecting at least 4 updates
|
||||
* @param {Enc} enc
|
||||
* @param {boolean} hasDeletes
|
||||
*/
|
||||
const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => {
|
||||
const cases = []
|
||||
// Case 1: Simple case, simply merge everything
|
||||
cases.push(enc.mergeUpdates(updates))
|
||||
|
||||
// Case 2: Overlapping updates
|
||||
cases.push(enc.mergeUpdates([
|
||||
enc.mergeUpdates(updates.slice(2)),
|
||||
enc.mergeUpdates(updates.slice(0, 2))
|
||||
]))
|
||||
|
||||
// Case 3: Overlapping updates
|
||||
cases.push(enc.mergeUpdates([
|
||||
enc.mergeUpdates(updates.slice(2)),
|
||||
enc.mergeUpdates(updates.slice(1, 3)),
|
||||
updates[0]
|
||||
]))
|
||||
|
||||
// Case 4: Separated updates (containing skips)
|
||||
cases.push(enc.mergeUpdates([
|
||||
enc.mergeUpdates([updates[0], updates[2]]),
|
||||
enc.mergeUpdates([updates[1], updates[3]]),
|
||||
enc.mergeUpdates(updates.slice(4))
|
||||
]))
|
||||
|
||||
// Case 5: overlapping with many duplicates
|
||||
cases.push(enc.mergeUpdates(cases))
|
||||
|
||||
// const targetState = enc.encodeStateAsUpdate(ydoc)
|
||||
// t.info('Target State: ')
|
||||
// enc.logUpdate(targetState)
|
||||
|
||||
cases.forEach((mergedUpdates) => {
|
||||
// t.info('State Case $' + i + ':')
|
||||
// enc.logUpdate(updates)
|
||||
const merged = new Y.Doc({ gc: false })
|
||||
enc.applyUpdate(merged, mergedUpdates)
|
||||
t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray())
|
||||
t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(mergedUpdates))
|
||||
|
||||
if (enc.updateEventName !== 'update') { // @todo should this also work on legacy updates?
|
||||
for (let j = 1; j < updates.length; j++) {
|
||||
const partMerged = enc.mergeUpdates(updates.slice(j))
|
||||
const partMeta = enc.parseUpdateMeta(partMerged)
|
||||
const targetSV = Y.encodeStateVectorFromUpdateV2(Y.mergeUpdatesV2(updates.slice(0, j)))
|
||||
const diffed = enc.diffUpdate(mergedUpdates, targetSV)
|
||||
const diffedMeta = enc.parseUpdateMeta(diffed)
|
||||
t.compare(partMeta, diffedMeta)
|
||||
{
|
||||
// We can'd do the following
|
||||
// - t.compare(diffed, mergedDeletes)
|
||||
// because diffed contains the set of all deletes.
|
||||
// So we add all deletes from `diffed` to `partDeletes` and compare then
|
||||
const decoder = decoding.createDecoder(diffed)
|
||||
const updateDecoder = new UpdateDecoderV2(decoder)
|
||||
readClientsStructRefs(updateDecoder, new Y.Doc())
|
||||
const ds = readDeleteSet(updateDecoder)
|
||||
const updateEncoder = new UpdateEncoderV2()
|
||||
encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs
|
||||
writeDeleteSet(updateEncoder, ds)
|
||||
const deletesUpdate = updateEncoder.toUint8Array()
|
||||
const mergedDeletes = Y.mergeUpdatesV2([deletesUpdate, partMerged])
|
||||
if (!hasDeletes || enc !== encDoc) {
|
||||
// deletes will almost definitely lead to different encoders because of the mergeStruct feature that is present in encDoc
|
||||
t.compare(diffed, mergedDeletes)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const meta = enc.parseUpdateMeta(mergedUpdates)
|
||||
meta.from.forEach((clock, client) => t.assert(clock === 0))
|
||||
meta.to.forEach((clock, client) => {
|
||||
const structs = /** @type {Array<Y.Item>} */ (merged.store.clients.get(client))
|
||||
const lastStruct = structs[structs.length - 1]
|
||||
t.assert(lastStruct.id.clock + lastStruct.length === clock)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testMergeUpdates1 = _tc => {
|
||||
encoders.forEach((enc) => {
|
||||
t.info(`Using encoder: ${enc.description}`)
|
||||
const ydoc = new Y.Doc({ gc: false })
|
||||
const updates = /** @type {Array<Uint8Array>} */ ([])
|
||||
ydoc.on(enc.updateEventName, update => { updates.push(update) })
|
||||
|
||||
const array = ydoc.getArray()
|
||||
array.insert(0, [1])
|
||||
array.insert(0, [2])
|
||||
array.insert(0, [3])
|
||||
array.insert(0, [4])
|
||||
|
||||
checkUpdateCases(ydoc, updates, enc, false)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testMergeUpdates2 = tc => {
|
||||
encoders.forEach((enc, i) => {
|
||||
t.info(`Using encoder: ${enc.description}`)
|
||||
const ydoc = new Y.Doc({ gc: false })
|
||||
const updates = /** @type {Array<Uint8Array>} */ ([])
|
||||
ydoc.on(enc.updateEventName, update => { updates.push(update) })
|
||||
|
||||
const array = ydoc.getArray()
|
||||
array.insert(0, [1, 2])
|
||||
array.delete(1, 1)
|
||||
array.insert(0, [3, 4])
|
||||
array.delete(1, 2)
|
||||
|
||||
checkUpdateCases(ydoc, updates, enc, true)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testMergePendingUpdates = tc => {
|
||||
const yDoc = new Y.Doc()
|
||||
/**
|
||||
* @type {Array<Uint8Array>}
|
||||
*/
|
||||
const serverUpdates = []
|
||||
yDoc.on('update', (update, origin, c) => {
|
||||
serverUpdates.splice(serverUpdates.length, 0, update)
|
||||
})
|
||||
const yText = yDoc.getText('textBlock')
|
||||
yText.applyDelta([{ insert: 'r' }])
|
||||
yText.applyDelta([{ insert: 'o' }])
|
||||
yText.applyDelta([{ insert: 'n' }])
|
||||
yText.applyDelta([{ insert: 'e' }])
|
||||
yText.applyDelta([{ insert: 'n' }])
|
||||
|
||||
const yDoc1 = new Y.Doc()
|
||||
Y.applyUpdate(yDoc1, serverUpdates[0])
|
||||
const update1 = Y.encodeStateAsUpdate(yDoc1)
|
||||
|
||||
const yDoc2 = new Y.Doc()
|
||||
Y.applyUpdate(yDoc2, update1)
|
||||
Y.applyUpdate(yDoc2, serverUpdates[1])
|
||||
const update2 = Y.encodeStateAsUpdate(yDoc2)
|
||||
|
||||
const yDoc3 = new Y.Doc()
|
||||
Y.applyUpdate(yDoc3, update2)
|
||||
Y.applyUpdate(yDoc3, serverUpdates[3])
|
||||
const update3 = Y.encodeStateAsUpdate(yDoc3)
|
||||
|
||||
const yDoc4 = new Y.Doc()
|
||||
Y.applyUpdate(yDoc4, update3)
|
||||
Y.applyUpdate(yDoc4, serverUpdates[2])
|
||||
const update4 = Y.encodeStateAsUpdate(yDoc4)
|
||||
|
||||
const yDoc5 = new Y.Doc()
|
||||
Y.applyUpdate(yDoc5, update4)
|
||||
Y.applyUpdate(yDoc5, serverUpdates[4])
|
||||
// @ts-ignore
|
||||
const _update5 = Y.encodeStateAsUpdate(yDoc5) // eslint-disable-line
|
||||
|
||||
const yText5 = yDoc5.getText('textBlock')
|
||||
t.compareStrings(yText5.toString(), 'nenor')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testObfuscateUpdates = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = ydoc.getText('text')
|
||||
const ymap = ydoc.getMap('map')
|
||||
const yarray = ydoc.getArray('array')
|
||||
// test ytext
|
||||
ytext.applyDelta([{ insert: 'text', attributes: { bold: true } }, { insert: { href: 'supersecreturl' } }])
|
||||
// test ymap
|
||||
ymap.set('key', 'secret1')
|
||||
ymap.set('key', 'secret2')
|
||||
// test yarray with subtype & subdoc
|
||||
const subtype = new Y.XmlElement('secretnodename')
|
||||
const subdoc = new Y.Doc({ guid: 'secret' })
|
||||
subtype.setAttribute('attr', 'val')
|
||||
yarray.insert(0, ['teststring', 42, subtype, subdoc])
|
||||
// obfuscate the content and put it into a new document
|
||||
const obfuscatedUpdate = Y.obfuscateUpdate(Y.encodeStateAsUpdate(ydoc))
|
||||
const odoc = new Y.Doc()
|
||||
Y.applyUpdate(odoc, obfuscatedUpdate)
|
||||
const otext = odoc.getText('text')
|
||||
const omap = odoc.getMap('map')
|
||||
const oarray = odoc.getArray('array')
|
||||
// test ytext
|
||||
const delta = otext.toDelta()
|
||||
t.assert(delta.length === 2)
|
||||
t.assert(delta[0].insert !== 'text' && delta[0].insert.length === 4)
|
||||
t.assert(object.length(delta[0].attributes) === 1)
|
||||
t.assert(!object.hasProperty(delta[0].attributes, 'bold'))
|
||||
t.assert(object.length(delta[1]) === 1)
|
||||
t.assert(object.hasProperty(delta[1], 'insert'))
|
||||
// test ymap
|
||||
t.assert(omap.size === 1)
|
||||
t.assert(!omap.has('key'))
|
||||
// test yarray with subtype & subdoc
|
||||
const result = oarray.toArray()
|
||||
t.assert(result.length === 4)
|
||||
t.assert(result[0] !== 'teststring')
|
||||
t.assert(result[1] !== 42)
|
||||
const osubtype = /** @type {Y.XmlElement} */ (result[2])
|
||||
const osubdoc = result[3]
|
||||
// test subtype
|
||||
t.assert(osubtype.nodeName !== subtype.nodeName)
|
||||
t.assert(object.length(osubtype.getAttributes()) === 1)
|
||||
t.assert(osubtype.getAttribute('attr') === undefined)
|
||||
// test subdoc
|
||||
t.assert(osubdoc.guid !== subdoc.guid)
|
||||
}
|
691
tests/y-array.tests.js
Normal file
691
tests/y-array.tests.js
Normal file
@ -0,0 +1,691 @@
|
||||
import { init, compare, applyRandomTests, Doc } from './testHelper.js' // eslint-disable-line
|
||||
|
||||
import * as Y from '../src/index.js'
|
||||
import * as t from 'lib0/testing'
|
||||
import * as prng from 'lib0/prng'
|
||||
import * as math from 'lib0/math'
|
||||
import * as env from 'lib0/environment'
|
||||
|
||||
const isDevMode = env.getVariable('node_env') === 'development'
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testBasicUpdate = tc => {
|
||||
const doc1 = new Y.Doc()
|
||||
const doc2 = new Y.Doc()
|
||||
doc1.getArray('array').insert(0, ['hi'])
|
||||
const update = Y.encodeStateAsUpdate(doc1)
|
||||
Y.applyUpdate(doc2, update)
|
||||
t.compare(doc2.getArray('array').toArray(), ['hi'])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testFailsObjectManipulationInDevMode = tc => {
|
||||
if (isDevMode) {
|
||||
t.info('running in dev mode')
|
||||
const doc = new Y.Doc()
|
||||
const a = [1, 2, 3]
|
||||
const b = { o: 1 }
|
||||
doc.getArray('test').insert(0, [a])
|
||||
doc.getMap('map').set('k', b)
|
||||
t.fails(() => {
|
||||
a[0] = 42
|
||||
})
|
||||
t.fails(() => {
|
||||
b.o = 42
|
||||
})
|
||||
} else {
|
||||
t.info('not in dev mode')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testSlice = tc => {
|
||||
const doc1 = new Y.Doc()
|
||||
const arr = doc1.getArray('array')
|
||||
arr.insert(0, [1, 2, 3])
|
||||
t.compareArrays(arr.slice(0), [1, 2, 3])
|
||||
t.compareArrays(arr.slice(1), [2, 3])
|
||||
t.compareArrays(arr.slice(0, -1), [1, 2])
|
||||
arr.insert(0, [0])
|
||||
t.compareArrays(arr.slice(0), [0, 1, 2, 3])
|
||||
t.compareArrays(arr.slice(0, 2), [0, 1])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testArrayFrom = tc => {
|
||||
const doc1 = new Y.Doc()
|
||||
const db1 = doc1.getMap('root')
|
||||
const nestedArray1 = Y.Array.from([0, 1, 2])
|
||||
db1.set('array', nestedArray1)
|
||||
t.compare(nestedArray1.toArray(), [0, 1, 2])
|
||||
}
|
||||
|
||||
/**
|
||||
* Debugging yjs#297 - a critical bug connected to the search-marker approach
|
||||
*
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testLengthIssue = tc => {
|
||||
const doc1 = new Y.Doc()
|
||||
const arr = doc1.getArray('array')
|
||||
arr.push([0, 1, 2, 3])
|
||||
arr.delete(0)
|
||||
arr.insert(0, [0])
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
doc1.transact(() => {
|
||||
arr.delete(1)
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
arr.insert(1, [1])
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
arr.delete(2)
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
arr.insert(2, [2])
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
})
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
arr.delete(1)
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
arr.insert(1, [1])
|
||||
t.assert(arr.length === arr.toArray().length)
|
||||
}
|
||||
|
||||
/**
|
||||
* Debugging yjs#314
|
||||
*
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testLengthIssue2 = tc => {
|
||||
const doc = new Y.Doc()
|
||||
const next = doc.getArray()
|
||||
doc.transact(() => {
|
||||
next.insert(0, ['group2'])
|
||||
})
|
||||
doc.transact(() => {
|
||||
next.insert(1, ['rectangle3'])
|
||||
})
|
||||
doc.transact(() => {
|
||||
next.delete(0)
|
||||
next.insert(0, ['rectangle3'])
|
||||
})
|
||||
next.delete(1)
|
||||
doc.transact(() => {
|
||||
next.insert(1, ['ellipse4'])
|
||||
})
|
||||
doc.transact(() => {
|
||||
next.insert(2, ['ellipse3'])
|
||||
})
|
||||
doc.transact(() => {
|
||||
next.insert(3, ['ellipse2'])
|
||||
})
|
||||
doc.transact(() => {
|
||||
doc.transact(() => {
|
||||
t.fails(() => {
|
||||
next.insert(5, ['rectangle2'])
|
||||
})
|
||||
next.insert(4, ['rectangle2'])
|
||||
})
|
||||
doc.transact(() => {
|
||||
// this should not throw an error message
|
||||
next.delete(4)
|
||||
})
|
||||
})
|
||||
console.log(next.toArray())
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testDeleteInsert = tc => {
|
||||
const { users, array0 } = init(tc, { users: 2 })
|
||||
array0.delete(0, 0)
|
||||
t.describe('Does not throw when deleting zero elements with position 0')
|
||||
t.fails(() => {
|
||||
array0.delete(1, 1)
|
||||
})
|
||||
array0.insert(0, ['A'])
|
||||
array0.delete(1, 0)
|
||||
t.describe('Does not throw when deleting zero elements with valid position 1')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInsertThreeElementsTryRegetProperty = tc => {
|
||||
const { testConnector, users, array0, array1 } = init(tc, { users: 2 })
|
||||
array0.insert(0, [1, true, false])
|
||||
t.compare(array0.toJSON(), [1, true, false], '.toJSON() works')
|
||||
testConnector.flushAllMessages()
|
||||
t.compare(array1.toJSON(), [1, true, false], '.toJSON() works after sync')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testConcurrentInsertWithThreeConflicts = tc => {
|
||||
const { users, array0, array1, array2 } = init(tc, { users: 3 })
|
||||
array0.insert(0, [0])
|
||||
array1.insert(0, [1])
|
||||
array2.insert(0, [2])
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testConcurrentInsertDeleteWithThreeConflicts = tc => {
|
||||
const { testConnector, users, array0, array1, array2 } = init(tc, { users: 3 })
|
||||
array0.insert(0, ['x', 'y', 'z'])
|
||||
testConnector.flushAllMessages()
|
||||
array0.insert(1, [0])
|
||||
array1.delete(0)
|
||||
array1.delete(1, 1)
|
||||
array2.insert(1, [2])
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInsertionsInLateSync = tc => {
|
||||
const { testConnector, users, array0, array1, array2 } = init(tc, { users: 3 })
|
||||
array0.insert(0, ['x', 'y'])
|
||||
testConnector.flushAllMessages()
|
||||
users[1].disconnect()
|
||||
users[2].disconnect()
|
||||
array0.insert(1, ['user0'])
|
||||
array1.insert(1, ['user1'])
|
||||
array2.insert(1, ['user2'])
|
||||
users[1].connect()
|
||||
users[2].connect()
|
||||
testConnector.flushAllMessages()
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testDisconnectReallyPreventsSendingMessages = tc => {
|
||||
const { testConnector, users, array0, array1 } = init(tc, { users: 3 })
|
||||
array0.insert(0, ['x', 'y'])
|
||||
testConnector.flushAllMessages()
|
||||
users[1].disconnect()
|
||||
users[2].disconnect()
|
||||
array0.insert(1, ['user0'])
|
||||
array1.insert(1, ['user1'])
|
||||
t.compare(array0.toJSON(), ['x', 'user0', 'y'])
|
||||
t.compare(array1.toJSON(), ['x', 'user1', 'y'])
|
||||
users[1].connect()
|
||||
users[2].connect()
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testDeletionsInLateSync = tc => {
|
||||
const { testConnector, users, array0, array1 } = init(tc, { users: 2 })
|
||||
array0.insert(0, ['x', 'y'])
|
||||
testConnector.flushAllMessages()
|
||||
users[1].disconnect()
|
||||
array1.delete(1, 1)
|
||||
array0.delete(0, 2)
|
||||
users[1].connect()
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInsertThenMergeDeleteOnSync = tc => {
|
||||
const { testConnector, users, array0, array1 } = init(tc, { users: 2 })
|
||||
array0.insert(0, ['x', 'y', 'z'])
|
||||
testConnector.flushAllMessages()
|
||||
users[0].disconnect()
|
||||
array1.delete(0, 3)
|
||||
users[0].connect()
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInsertAndDeleteEvents = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Object<string,any>?}
|
||||
*/
|
||||
let event = null
|
||||
array0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
array0.insert(0, [0, 1, 2])
|
||||
t.assert(event !== null)
|
||||
event = null
|
||||
array0.delete(0)
|
||||
t.assert(event !== null)
|
||||
event = null
|
||||
array0.delete(0, 2)
|
||||
t.assert(event !== null)
|
||||
event = null
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testNestedObserverEvents = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Array<number>}
|
||||
*/
|
||||
const vals = []
|
||||
array0.observe(e => {
|
||||
if (array0.length === 1) {
|
||||
// inserting, will call this observer again
|
||||
// we expect that this observer is called after this event handler finishedn
|
||||
array0.insert(1, [1])
|
||||
vals.push(0)
|
||||
} else {
|
||||
// this should be called the second time an element is inserted (above case)
|
||||
vals.push(1)
|
||||
}
|
||||
})
|
||||
array0.insert(0, [0])
|
||||
t.compareArrays(vals, [0, 1])
|
||||
t.compareArrays(array0.toArray(), [0, 1])
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInsertAndDeleteEventsForTypes = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Object<string,any>|null}
|
||||
*/
|
||||
let event = null
|
||||
array0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
array0.insert(0, [new Y.Array()])
|
||||
t.assert(event !== null)
|
||||
event = null
|
||||
array0.delete(0)
|
||||
t.assert(event !== null)
|
||||
event = null
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported in https://discuss.yjs.dev/t/order-in-which-events-yielded-by-observedeep-should-be-applied/261/2
|
||||
*
|
||||
* Deep observers generate multiple events. When an array added at item at, say, position 0,
|
||||
* and item 1 changed then the array-add event should fire first so that the change event
|
||||
* path is correct. A array binding might lead to an inconsistent state otherwise.
|
||||
*
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testObserveDeepEventOrder = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Array<any>}
|
||||
*/
|
||||
let events = []
|
||||
array0.observeDeep(e => {
|
||||
events = e
|
||||
})
|
||||
array0.insert(0, [new Y.Map()])
|
||||
users[0].transact(() => {
|
||||
array0.get(0).set('a', 'a')
|
||||
array0.insert(0, [0])
|
||||
})
|
||||
for (let i = 1; i < events.length; i++) {
|
||||
t.assert(events[i - 1].path.length <= events[i].path.length, 'path size increases, fire top-level events first')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Correct index when computing event.path in observeDeep - https://github.com/yjs/yjs/issues/457
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testObservedeepIndexes = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const map = doc.getMap()
|
||||
// Create a field with the array as value
|
||||
map.set('my-array', new Y.Array())
|
||||
// Fill the array with some strings and our Map
|
||||
map.get('my-array').push(['a', 'b', 'c', new Y.Map()])
|
||||
/**
|
||||
* @type {Array<any>}
|
||||
*/
|
||||
let eventPath = []
|
||||
map.observeDeep((events) => { eventPath = events[0].path })
|
||||
// set a value on the map inside of our array
|
||||
map.get('my-array').get(3).set('hello', 'world')
|
||||
console.log(eventPath)
|
||||
t.compare(eventPath, ['my-array', 3])
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testChangeEvent = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let changes = null
|
||||
array0.observe(e => {
|
||||
changes = e.changes
|
||||
})
|
||||
const newArr = new Y.Array()
|
||||
array0.insert(0, [newArr, 4, 'dtrn'])
|
||||
t.assert(changes !== null && changes.added.size === 2 && changes.deleted.size === 0)
|
||||
t.compare(changes.delta, [{ insert: [newArr, 4, 'dtrn'] }])
|
||||
changes = null
|
||||
array0.delete(0, 2)
|
||||
t.assert(changes !== null && changes.added.size === 0 && changes.deleted.size === 2)
|
||||
t.compare(changes.delta, [{ delete: 2 }])
|
||||
changes = null
|
||||
array0.insert(1, [0.1])
|
||||
t.assert(changes !== null && changes.added.size === 1 && changes.deleted.size === 0)
|
||||
t.compare(changes.delta, [{ retain: 1 }, { insert: [0.1] }])
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testInsertAndDeleteEventsForTypes2 = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Array<Object<string,any>>}
|
||||
*/
|
||||
const events = []
|
||||
array0.observe(e => {
|
||||
events.push(e)
|
||||
})
|
||||
array0.insert(0, ['hi', new Y.Map()])
|
||||
t.assert(events.length === 1, 'Event is triggered exactly once for insertion of two elements')
|
||||
array0.delete(1)
|
||||
t.assert(events.length === 2, 'Event is triggered exactly once for deletion')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* This issue has been reported here https://github.com/yjs/yjs/issues/155
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testNewChildDoesNotEmitEventInTransaction = tc => {
|
||||
const { array0, users } = init(tc, { users: 2 })
|
||||
let fired = false
|
||||
users[0].transact(() => {
|
||||
const newMap = new Y.Map()
|
||||
newMap.observe(() => {
|
||||
fired = true
|
||||
})
|
||||
array0.insert(0, [newMap])
|
||||
newMap.set('tst', 42)
|
||||
})
|
||||
t.assert(!fired, 'Event does not trigger')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGarbageCollector = tc => {
|
||||
const { testConnector, users, array0 } = init(tc, { users: 3 })
|
||||
array0.insert(0, ['x', 'y', 'z'])
|
||||
testConnector.flushAllMessages()
|
||||
users[0].disconnect()
|
||||
array0.delete(0, 3)
|
||||
users[0].connect()
|
||||
testConnector.flushAllMessages()
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testEventTargetIsSetCorrectlyOnLocal = tc => {
|
||||
const { array0, users } = init(tc, { users: 3 })
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let event
|
||||
array0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
array0.insert(0, ['stuff'])
|
||||
t.assert(event.target === array0, '"target" property is set correctly')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testEventTargetIsSetCorrectlyOnRemote = tc => {
|
||||
const { testConnector, array0, array1, users } = init(tc, { users: 3 })
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let event
|
||||
array0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
array1.insert(0, ['stuff'])
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(event.target === array0, '"target" property is set correctly')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testIteratingArrayContainingTypes = tc => {
|
||||
const y = new Y.Doc()
|
||||
const arr = y.getArray('arr')
|
||||
const numItems = 10
|
||||
for (let i = 0; i < numItems; i++) {
|
||||
const map = new Y.Map()
|
||||
map.set('value', i)
|
||||
arr.push([map])
|
||||
}
|
||||
let cnt = 0
|
||||
for (const item of arr) {
|
||||
t.assert(item.get('value') === cnt++, 'value is correct')
|
||||
}
|
||||
y.destroy()
|
||||
}
|
||||
|
||||
let _uniqueNumber = 0
|
||||
const getUniqueNumber = () => _uniqueNumber++
|
||||
|
||||
/**
|
||||
* @type {Array<function(Doc,prng.PRNG,any):void>}
|
||||
*/
|
||||
const arrayTransactions = [
|
||||
function insert (user, gen) {
|
||||
const yarray = user.getArray('array')
|
||||
const uniqueNumber = getUniqueNumber()
|
||||
const content = []
|
||||
const len = prng.int32(gen, 1, 4)
|
||||
for (let i = 0; i < len; i++) {
|
||||
content.push(uniqueNumber)
|
||||
}
|
||||
const pos = prng.int32(gen, 0, yarray.length)
|
||||
const oldContent = yarray.toArray()
|
||||
yarray.insert(pos, content)
|
||||
oldContent.splice(pos, 0, ...content)
|
||||
t.compareArrays(yarray.toArray(), oldContent) // we want to make sure that fastSearch markers insert at the correct position
|
||||
},
|
||||
function insertTypeArray (user, gen) {
|
||||
const yarray = user.getArray('array')
|
||||
const pos = prng.int32(gen, 0, yarray.length)
|
||||
yarray.insert(pos, [new Y.Array()])
|
||||
const array2 = yarray.get(pos)
|
||||
array2.insert(0, [1, 2, 3, 4])
|
||||
},
|
||||
function insertTypeMap (user, gen) {
|
||||
const yarray = user.getArray('array')
|
||||
const pos = prng.int32(gen, 0, yarray.length)
|
||||
yarray.insert(pos, [new Y.Map()])
|
||||
const map = yarray.get(pos)
|
||||
map.set('someprop', 42)
|
||||
map.set('someprop', 43)
|
||||
map.set('someprop', 44)
|
||||
},
|
||||
function insertTypeNull (user, gen) {
|
||||
const yarray = user.getArray('array')
|
||||
const pos = prng.int32(gen, 0, yarray.length)
|
||||
yarray.insert(pos, [null])
|
||||
},
|
||||
function _delete (user, gen) {
|
||||
const yarray = user.getArray('array')
|
||||
const length = yarray.length
|
||||
if (length > 0) {
|
||||
let somePos = prng.int32(gen, 0, length - 1)
|
||||
let delLength = prng.int32(gen, 1, math.min(2, length - somePos))
|
||||
if (prng.bool(gen)) {
|
||||
const type = yarray.get(somePos)
|
||||
if (type instanceof Y.Array && type.length > 0) {
|
||||
somePos = prng.int32(gen, 0, type.length - 1)
|
||||
delLength = prng.int32(gen, 0, math.min(2, type.length - somePos))
|
||||
type.delete(somePos, delLength)
|
||||
}
|
||||
} else {
|
||||
const oldContent = yarray.toArray()
|
||||
yarray.delete(somePos, delLength)
|
||||
oldContent.splice(somePos, delLength)
|
||||
t.compareArrays(yarray.toArray(), oldContent)
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests6 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 6)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests40 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 40)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests42 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 42)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests43 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 43)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests44 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 44)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests45 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 45)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests46 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 46)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests300 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 300)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests400 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 400)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests500 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 500)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests600 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 600)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests1000 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 1000)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests1800 = tc => {
|
||||
applyRandomTests(tc, arrayTransactions, 1800)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests3000 = tc => {
|
||||
t.skip(!t.production)
|
||||
applyRandomTests(tc, arrayTransactions, 3000)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests5000 = tc => {
|
||||
t.skip(!t.production)
|
||||
applyRandomTests(tc, arrayTransactions, 5000)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYarrayTests30000 = tc => {
|
||||
t.skip(!t.production)
|
||||
applyRandomTests(tc, arrayTransactions, 30000)
|
||||
}
|
756
tests/y-map.tests.js
Normal file
756
tests/y-map.tests.js
Normal file
@ -0,0 +1,756 @@
|
||||
import { init, compare, applyRandomTests, Doc } from './testHelper.js' // eslint-disable-line
|
||||
|
||||
import {
|
||||
compareIDs
|
||||
} from '../src/internals.js'
|
||||
|
||||
import * as Y from '../src/index.js'
|
||||
import * as t from 'lib0/testing'
|
||||
import * as prng from 'lib0/prng'
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testIterators = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
/**
|
||||
* @type {Y.Map<number>}
|
||||
*/
|
||||
const ymap = ydoc.getMap()
|
||||
// we are only checking if the type assumptions are correct
|
||||
/**
|
||||
* @type {Array<number>}
|
||||
*/
|
||||
const vals = Array.from(ymap.values())
|
||||
/**
|
||||
* @type {Array<[string,number]>}
|
||||
*/
|
||||
const entries = Array.from(ymap.entries())
|
||||
/**
|
||||
* @type {Array<string>}
|
||||
*/
|
||||
const keys = Array.from(ymap.keys())
|
||||
console.log(vals, entries, keys)
|
||||
}
|
||||
|
||||
/**
|
||||
* Computing event changes after transaction should result in an error. See yjs#539
|
||||
*
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testMapEventError = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const ymap = doc.getMap()
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let event = null
|
||||
ymap.observe((e) => {
|
||||
event = e
|
||||
})
|
||||
t.fails(() => {
|
||||
t.info(event.keys)
|
||||
})
|
||||
t.fails(() => {
|
||||
t.info(event.keys)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testMapHavingIterableAsConstructorParamTests = tc => {
|
||||
const { map0 } = init(tc, { users: 1 })
|
||||
|
||||
const m1 = new Y.Map(Object.entries({ number: 1, string: 'hello' }))
|
||||
map0.set('m1', m1)
|
||||
t.assert(m1.get('number') === 1)
|
||||
t.assert(m1.get('string') === 'hello')
|
||||
|
||||
const m2 = new Y.Map([
|
||||
['object', { x: 1 }],
|
||||
['boolean', true]
|
||||
])
|
||||
map0.set('m2', m2)
|
||||
t.assert(m2.get('object').x === 1)
|
||||
t.assert(m2.get('boolean') === true)
|
||||
|
||||
const m3 = new Y.Map([...m1, ...m2])
|
||||
map0.set('m3', m3)
|
||||
t.assert(m3.get('number') === 1)
|
||||
t.assert(m3.get('string') === 'hello')
|
||||
t.assert(m3.get('object').x === 1)
|
||||
t.assert(m3.get('boolean') === true)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testBasicMapTests = tc => {
|
||||
const { testConnector, users, map0, map1, map2 } = init(tc, { users: 3 })
|
||||
users[2].disconnect()
|
||||
|
||||
map0.set('null', null)
|
||||
map0.set('number', 1)
|
||||
map0.set('string', 'hello Y')
|
||||
map0.set('object', { key: { key2: 'value' } })
|
||||
map0.set('y-map', new Y.Map())
|
||||
map0.set('boolean1', true)
|
||||
map0.set('boolean0', false)
|
||||
const map = map0.get('y-map')
|
||||
map.set('y-array', new Y.Array())
|
||||
const array = map.get('y-array')
|
||||
array.insert(0, [0])
|
||||
array.insert(0, [-1])
|
||||
|
||||
t.assert(map0.get('null') === null, 'client 0 computed the change (null)')
|
||||
t.assert(map0.get('number') === 1, 'client 0 computed the change (number)')
|
||||
t.assert(map0.get('string') === 'hello Y', 'client 0 computed the change (string)')
|
||||
t.assert(map0.get('boolean0') === false, 'client 0 computed the change (boolean)')
|
||||
t.assert(map0.get('boolean1') === true, 'client 0 computed the change (boolean)')
|
||||
t.compare(map0.get('object'), { key: { key2: 'value' } }, 'client 0 computed the change (object)')
|
||||
t.assert(map0.get('y-map').get('y-array').get(0) === -1, 'client 0 computed the change (type)')
|
||||
t.assert(map0.size === 7, 'client 0 map has correct size')
|
||||
|
||||
users[2].connect()
|
||||
testConnector.flushAllMessages()
|
||||
|
||||
t.assert(map1.get('null') === null, 'client 1 received the update (null)')
|
||||
t.assert(map1.get('number') === 1, 'client 1 received the update (number)')
|
||||
t.assert(map1.get('string') === 'hello Y', 'client 1 received the update (string)')
|
||||
t.assert(map1.get('boolean0') === false, 'client 1 computed the change (boolean)')
|
||||
t.assert(map1.get('boolean1') === true, 'client 1 computed the change (boolean)')
|
||||
t.compare(map1.get('object'), { key: { key2: 'value' } }, 'client 1 received the update (object)')
|
||||
t.assert(map1.get('y-map').get('y-array').get(0) === -1, 'client 1 received the update (type)')
|
||||
t.assert(map1.size === 7, 'client 1 map has correct size')
|
||||
|
||||
// compare disconnected user
|
||||
t.assert(map2.get('null') === null, 'client 2 received the update (null) - was disconnected')
|
||||
t.assert(map2.get('number') === 1, 'client 2 received the update (number) - was disconnected')
|
||||
t.assert(map2.get('string') === 'hello Y', 'client 2 received the update (string) - was disconnected')
|
||||
t.assert(map2.get('boolean0') === false, 'client 2 computed the change (boolean)')
|
||||
t.assert(map2.get('boolean1') === true, 'client 2 computed the change (boolean)')
|
||||
t.compare(map2.get('object'), { key: { key2: 'value' } }, 'client 2 received the update (object) - was disconnected')
|
||||
t.assert(map2.get('y-map').get('y-array').get(0) === -1, 'client 2 received the update (type) - was disconnected')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGetAndSetOfMapProperty = tc => {
|
||||
const { testConnector, users, map0 } = init(tc, { users: 2 })
|
||||
map0.set('stuff', 'stuffy')
|
||||
map0.set('undefined', undefined)
|
||||
map0.set('null', null)
|
||||
t.compare(map0.get('stuff'), 'stuffy')
|
||||
|
||||
testConnector.flushAllMessages()
|
||||
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.compare(u.get('stuff'), 'stuffy')
|
||||
t.assert(u.get('undefined') === undefined, 'undefined')
|
||||
t.compare(u.get('null'), null, 'null')
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testYmapSetsYmap = tc => {
|
||||
const { users, map0 } = init(tc, { users: 2 })
|
||||
const map = map0.set('Map', new Y.Map())
|
||||
t.assert(map0.get('Map') === map)
|
||||
map.set('one', 1)
|
||||
t.compare(map.get('one'), 1)
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testYmapSetsYarray = tc => {
|
||||
const { users, map0 } = init(tc, { users: 2 })
|
||||
const array = map0.set('Array', new Y.Array())
|
||||
t.assert(array === map0.get('Array'))
|
||||
array.insert(0, [1, 2, 3])
|
||||
// @ts-ignore
|
||||
t.compare(map0.toJSON(), { Array: [1, 2, 3] })
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGetAndSetOfMapPropertySyncs = tc => {
|
||||
const { testConnector, users, map0 } = init(tc, { users: 2 })
|
||||
map0.set('stuff', 'stuffy')
|
||||
t.compare(map0.get('stuff'), 'stuffy')
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.compare(u.get('stuff'), 'stuffy')
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGetAndSetOfMapPropertyWithConflict = tc => {
|
||||
const { testConnector, users, map0, map1 } = init(tc, { users: 3 })
|
||||
map0.set('stuff', 'c0')
|
||||
map1.set('stuff', 'c1')
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.compare(u.get('stuff'), 'c1')
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testSizeAndDeleteOfMapProperty = tc => {
|
||||
const { map0 } = init(tc, { users: 1 })
|
||||
map0.set('stuff', 'c0')
|
||||
map0.set('otherstuff', 'c1')
|
||||
t.assert(map0.size === 2, `map size is ${map0.size} expected 2`)
|
||||
map0.delete('stuff')
|
||||
t.assert(map0.size === 1, `map size after delete is ${map0.size}, expected 1`)
|
||||
map0.delete('otherstuff')
|
||||
t.assert(map0.size === 0, `map size after delete is ${map0.size}, expected 0`)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGetAndSetAndDeleteOfMapProperty = tc => {
|
||||
const { testConnector, users, map0, map1 } = init(tc, { users: 3 })
|
||||
map0.set('stuff', 'c0')
|
||||
map1.set('stuff', 'c1')
|
||||
map1.delete('stuff')
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.assert(u.get('stuff') === undefined)
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testSetAndClearOfMapProperties = tc => {
|
||||
const { testConnector, users, map0 } = init(tc, { users: 1 })
|
||||
map0.set('stuff', 'c0')
|
||||
map0.set('otherstuff', 'c1')
|
||||
map0.clear()
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.assert(u.get('stuff') === undefined)
|
||||
t.assert(u.get('otherstuff') === undefined)
|
||||
t.assert(u.size === 0, `map size after clear is ${u.size}, expected 0`)
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testSetAndClearOfMapPropertiesWithConflicts = tc => {
|
||||
const { testConnector, users, map0, map1, map2, map3 } = init(tc, { users: 4 })
|
||||
map0.set('stuff', 'c0')
|
||||
map1.set('stuff', 'c1')
|
||||
map1.set('stuff', 'c2')
|
||||
map2.set('stuff', 'c3')
|
||||
testConnector.flushAllMessages()
|
||||
map0.set('otherstuff', 'c0')
|
||||
map1.set('otherstuff', 'c1')
|
||||
map2.set('otherstuff', 'c2')
|
||||
map3.set('otherstuff', 'c3')
|
||||
map3.clear()
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.assert(u.get('stuff') === undefined)
|
||||
t.assert(u.get('otherstuff') === undefined)
|
||||
t.assert(u.size === 0, `map size after clear is ${u.size}, expected 0`)
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGetAndSetOfMapPropertyWithThreeConflicts = tc => {
|
||||
const { testConnector, users, map0, map1, map2 } = init(tc, { users: 3 })
|
||||
map0.set('stuff', 'c0')
|
||||
map1.set('stuff', 'c1')
|
||||
map1.set('stuff', 'c2')
|
||||
map2.set('stuff', 'c3')
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.compare(u.get('stuff'), 'c3')
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testGetAndSetAndDeleteOfMapPropertyWithThreeConflicts = tc => {
|
||||
const { testConnector, users, map0, map1, map2, map3 } = init(tc, { users: 4 })
|
||||
map0.set('stuff', 'c0')
|
||||
map1.set('stuff', 'c1')
|
||||
map1.set('stuff', 'c2')
|
||||
map2.set('stuff', 'c3')
|
||||
testConnector.flushAllMessages()
|
||||
map0.set('stuff', 'deleteme')
|
||||
map1.set('stuff', 'c1')
|
||||
map2.set('stuff', 'c2')
|
||||
map3.set('stuff', 'c3')
|
||||
map3.delete('stuff')
|
||||
testConnector.flushAllMessages()
|
||||
for (const user of users) {
|
||||
const u = user.getMap('map')
|
||||
t.assert(u.get('stuff') === undefined)
|
||||
}
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testObserveDeepProperties = tc => {
|
||||
const { testConnector, users, map1, map2, map3 } = init(tc, { users: 4 })
|
||||
const _map1 = map1.set('map', new Y.Map())
|
||||
let calls = 0
|
||||
let dmapid
|
||||
map1.observeDeep(events => {
|
||||
events.forEach(event => {
|
||||
calls++
|
||||
// @ts-ignore
|
||||
t.assert(event.keysChanged.has('deepmap'))
|
||||
t.assert(event.path.length === 1)
|
||||
t.assert(event.path[0] === 'map')
|
||||
// @ts-ignore
|
||||
dmapid = event.target.get('deepmap')._item.id
|
||||
})
|
||||
})
|
||||
testConnector.flushAllMessages()
|
||||
const _map3 = map3.get('map')
|
||||
_map3.set('deepmap', new Y.Map())
|
||||
testConnector.flushAllMessages()
|
||||
const _map2 = map2.get('map')
|
||||
_map2.set('deepmap', new Y.Map())
|
||||
testConnector.flushAllMessages()
|
||||
const dmap1 = _map1.get('deepmap')
|
||||
const dmap2 = _map2.get('deepmap')
|
||||
const dmap3 = _map3.get('deepmap')
|
||||
t.assert(calls > 0)
|
||||
t.assert(compareIDs(dmap1._item.id, dmap2._item.id))
|
||||
t.assert(compareIDs(dmap1._item.id, dmap3._item.id))
|
||||
// @ts-ignore we want the possibility of dmapid being undefined
|
||||
t.assert(compareIDs(dmap1._item.id, dmapid))
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testObserversUsingObservedeep = tc => {
|
||||
const { users, map0 } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Array<Array<string|number>>}
|
||||
*/
|
||||
const paths = []
|
||||
let calls = 0
|
||||
map0.observeDeep(events => {
|
||||
events.forEach(event => {
|
||||
paths.push(event.path)
|
||||
})
|
||||
calls++
|
||||
})
|
||||
map0.set('map', new Y.Map())
|
||||
map0.get('map').set('array', new Y.Array())
|
||||
map0.get('map').get('array').insert(0, ['content'])
|
||||
t.assert(calls === 3)
|
||||
t.compare(paths, [[], ['map'], ['map', 'array']])
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testPathsOfSiblingEvents = tc => {
|
||||
const { users, map0 } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Array<Array<string|number>>}
|
||||
*/
|
||||
const paths = []
|
||||
let calls = 0
|
||||
const doc = users[0]
|
||||
map0.set('map', new Y.Map())
|
||||
map0.get('map').set('text1', new Y.Text('initial'))
|
||||
map0.observeDeep(events => {
|
||||
events.forEach(event => {
|
||||
paths.push(event.path)
|
||||
})
|
||||
calls++
|
||||
})
|
||||
doc.transact(() => {
|
||||
map0.get('map').get('text1').insert(0, 'post-')
|
||||
map0.get('map').set('text2', new Y.Text('new'))
|
||||
})
|
||||
t.assert(calls === 1)
|
||||
t.compare(paths, [['map'], ['map', 'text1']])
|
||||
compare(users)
|
||||
}
|
||||
|
||||
// TODO: Test events in Y.Map
|
||||
/**
|
||||
* @param {Object<string,any>} is
|
||||
* @param {Object<string,any>} should
|
||||
*/
|
||||
const compareEvent = (is, should) => {
|
||||
for (const key in should) {
|
||||
t.compare(should[key], is[key])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testThrowsAddAndUpdateAndDeleteEvents = tc => {
|
||||
const { users, map0 } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
let event = {}
|
||||
map0.observe(e => {
|
||||
event = e // just put it on event, should be thrown synchronously anyway
|
||||
})
|
||||
map0.set('stuff', 4)
|
||||
compareEvent(event, {
|
||||
target: map0,
|
||||
keysChanged: new Set(['stuff'])
|
||||
})
|
||||
// update, oldValue is in contents
|
||||
map0.set('stuff', new Y.Array())
|
||||
compareEvent(event, {
|
||||
target: map0,
|
||||
keysChanged: new Set(['stuff'])
|
||||
})
|
||||
// update, oldValue is in opContents
|
||||
map0.set('stuff', 5)
|
||||
// delete
|
||||
map0.delete('stuff')
|
||||
compareEvent(event, {
|
||||
keysChanged: new Set(['stuff']),
|
||||
target: map0
|
||||
})
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testThrowsDeleteEventsOnClear = tc => {
|
||||
const { users, map0 } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
let event = {}
|
||||
map0.observe(e => {
|
||||
event = e // just put it on event, should be thrown synchronously anyway
|
||||
})
|
||||
// set values
|
||||
map0.set('stuff', 4)
|
||||
map0.set('otherstuff', new Y.Array())
|
||||
// clear
|
||||
map0.clear()
|
||||
compareEvent(event, {
|
||||
keysChanged: new Set(['stuff', 'otherstuff']),
|
||||
target: map0
|
||||
})
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testChangeEvent = tc => {
|
||||
const { map0, users } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let changes = null
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let keyChange = null
|
||||
map0.observe(e => {
|
||||
changes = e.changes
|
||||
})
|
||||
map0.set('a', 1)
|
||||
keyChange = changes.keys.get('a')
|
||||
t.assert(changes !== null && keyChange.action === 'add' && keyChange.oldValue === undefined)
|
||||
map0.set('a', 2)
|
||||
keyChange = changes.keys.get('a')
|
||||
t.assert(changes !== null && keyChange.action === 'update' && keyChange.oldValue === 1)
|
||||
users[0].transact(() => {
|
||||
map0.set('a', 3)
|
||||
map0.set('a', 4)
|
||||
})
|
||||
keyChange = changes.keys.get('a')
|
||||
t.assert(changes !== null && keyChange.action === 'update' && keyChange.oldValue === 2)
|
||||
users[0].transact(() => {
|
||||
map0.set('b', 1)
|
||||
map0.set('b', 2)
|
||||
})
|
||||
keyChange = changes.keys.get('b')
|
||||
t.assert(changes !== null && keyChange.action === 'add' && keyChange.oldValue === undefined)
|
||||
users[0].transact(() => {
|
||||
map0.set('c', 1)
|
||||
map0.delete('c')
|
||||
})
|
||||
t.assert(changes !== null && changes.keys.size === 0)
|
||||
users[0].transact(() => {
|
||||
map0.set('d', 1)
|
||||
map0.set('d', 2)
|
||||
})
|
||||
keyChange = changes.keys.get('d')
|
||||
t.assert(changes !== null && keyChange.action === 'add' && keyChange.oldValue === undefined)
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testYmapEventExceptionsShouldCompleteTransaction = _tc => {
|
||||
const doc = new Y.Doc()
|
||||
const map = doc.getMap('map')
|
||||
|
||||
let updateCalled = false
|
||||
let throwingObserverCalled = false
|
||||
let throwingDeepObserverCalled = false
|
||||
doc.on('update', () => {
|
||||
updateCalled = true
|
||||
})
|
||||
|
||||
const throwingObserver = () => {
|
||||
throwingObserverCalled = true
|
||||
throw new Error('Failure')
|
||||
}
|
||||
|
||||
const throwingDeepObserver = () => {
|
||||
throwingDeepObserverCalled = true
|
||||
throw new Error('Failure')
|
||||
}
|
||||
|
||||
map.observe(throwingObserver)
|
||||
map.observeDeep(throwingDeepObserver)
|
||||
|
||||
t.fails(() => {
|
||||
map.set('y', '2')
|
||||
})
|
||||
|
||||
t.assert(updateCalled)
|
||||
t.assert(throwingObserverCalled)
|
||||
t.assert(throwingDeepObserverCalled)
|
||||
|
||||
// check if it works again
|
||||
updateCalled = false
|
||||
throwingObserverCalled = false
|
||||
throwingDeepObserverCalled = false
|
||||
t.fails(() => {
|
||||
map.set('z', '3')
|
||||
})
|
||||
|
||||
t.assert(updateCalled)
|
||||
t.assert(throwingObserverCalled)
|
||||
t.assert(throwingDeepObserverCalled)
|
||||
|
||||
t.assert(map.get('z') === '3')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testYmapEventHasCorrectValueWhenSettingAPrimitive = tc => {
|
||||
const { users, map0 } = init(tc, { users: 3 })
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
let event = {}
|
||||
map0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
map0.set('stuff', 2)
|
||||
t.compare(event.value, event.target.get(event.name))
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testYmapEventHasCorrectValueWhenSettingAPrimitiveFromOtherUser = tc => {
|
||||
const { users, map0, map1, testConnector } = init(tc, { users: 3 })
|
||||
/**
|
||||
* @type {Object<string,any>}
|
||||
*/
|
||||
let event = {}
|
||||
map0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
map1.set('stuff', 2)
|
||||
testConnector.flushAllMessages()
|
||||
t.compare(event.value, event.target.get(event.name))
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {Array<function(Doc,prng.PRNG):void>}
|
||||
*/
|
||||
const mapTransactions = [
|
||||
function set (user, gen) {
|
||||
const key = prng.oneOf(gen, ['one', 'two'])
|
||||
const value = prng.utf16String(gen)
|
||||
user.getMap('map').set(key, value)
|
||||
},
|
||||
function setType (user, gen) {
|
||||
const key = prng.oneOf(gen, ['one', 'two'])
|
||||
const type = prng.oneOf(gen, [new Y.Array(), new Y.Map()])
|
||||
user.getMap('map').set(key, type)
|
||||
if (type instanceof Y.Array) {
|
||||
type.insert(0, [1, 2, 3, 4])
|
||||
} else {
|
||||
type.set('deepkey', 'deepvalue')
|
||||
}
|
||||
},
|
||||
function _delete (user, gen) {
|
||||
const key = prng.oneOf(gen, ['one', 'two'])
|
||||
user.getMap('map').delete(key)
|
||||
}
|
||||
]
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests10 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 3)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests40 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 40)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests42 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 42)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests43 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 43)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests44 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 44)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests45 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 45)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests46 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 46)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests300 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 300)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests400 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 400)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests500 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 500)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests600 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 600)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests1000 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 1000)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests1800 = tc => {
|
||||
applyRandomTests(tc, mapTransactions, 1800)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests5000 = tc => {
|
||||
t.skip(!t.production)
|
||||
applyRandomTests(tc, mapTransactions, 5000)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests10000 = tc => {
|
||||
t.skip(!t.production)
|
||||
applyRandomTests(tc, mapTransactions, 10000)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testRepeatGeneratingYmapTests100000 = tc => {
|
||||
t.skip(!t.production)
|
||||
applyRandomTests(tc, mapTransactions, 100000)
|
||||
}
|
2606
tests/y-text.tests.js
Normal file
2606
tests/y-text.tests.js
Normal file
File diff suppressed because it is too large
Load Diff
223
tests/y-xml.tests.js
Normal file
223
tests/y-xml.tests.js
Normal file
@ -0,0 +1,223 @@
|
||||
import { init, compare } from './testHelper.js'
|
||||
import * as Y from '../src/index.js'
|
||||
|
||||
import * as t from 'lib0/testing'
|
||||
|
||||
export const testCustomTypings = () => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ymap = ydoc.getMap()
|
||||
/**
|
||||
* @type {Y.XmlElement<{ num: number, str: string, [k:string]: object|number|string }>}
|
||||
*/
|
||||
const yxml = ymap.set('yxml', new Y.XmlElement('test'))
|
||||
/**
|
||||
* @type {number|undefined}
|
||||
*/
|
||||
const num = yxml.getAttribute('num')
|
||||
/**
|
||||
* @type {string|undefined}
|
||||
*/
|
||||
const str = yxml.getAttribute('str')
|
||||
/**
|
||||
* @type {object|number|string|undefined}
|
||||
*/
|
||||
const dtrn = yxml.getAttribute('dtrn')
|
||||
const attrs = yxml.getAttributes()
|
||||
/**
|
||||
* @type {object|number|string|undefined}
|
||||
*/
|
||||
const any = attrs.shouldBeAny
|
||||
console.log({ num, str, dtrn, attrs, any })
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testSetProperty = tc => {
|
||||
const { testConnector, users, xml0, xml1 } = init(tc, { users: 2 })
|
||||
xml0.setAttribute('height', '10')
|
||||
t.assert(xml0.getAttribute('height') === '10', 'Simple set+get works')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(xml1.getAttribute('height') === '10', 'Simple set+get works (remote)')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testHasProperty = tc => {
|
||||
const { testConnector, users, xml0, xml1 } = init(tc, { users: 2 })
|
||||
xml0.setAttribute('height', '10')
|
||||
t.assert(xml0.hasAttribute('height'), 'Simple set+has works')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(xml1.hasAttribute('height'), 'Simple set+has works (remote)')
|
||||
|
||||
xml0.removeAttribute('height')
|
||||
t.assert(!xml0.hasAttribute('height'), 'Simple set+remove+has works')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(!xml1.hasAttribute('height'), 'Simple set+remove+has works (remote)')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testEvents = tc => {
|
||||
const { testConnector, users, xml0, xml1 } = init(tc, { users: 2 })
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let event
|
||||
/**
|
||||
* @type {any}
|
||||
*/
|
||||
let remoteEvent
|
||||
xml0.observe(e => {
|
||||
event = e
|
||||
})
|
||||
xml1.observe(e => {
|
||||
remoteEvent = e
|
||||
})
|
||||
xml0.setAttribute('key', 'value')
|
||||
t.assert(event.attributesChanged.has('key'), 'YXmlEvent.attributesChanged on updated key')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(remoteEvent.attributesChanged.has('key'), 'YXmlEvent.attributesChanged on updated key (remote)')
|
||||
// check attributeRemoved
|
||||
xml0.removeAttribute('key')
|
||||
t.assert(event.attributesChanged.has('key'), 'YXmlEvent.attributesChanged on removed attribute')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(remoteEvent.attributesChanged.has('key'), 'YXmlEvent.attributesChanged on removed attribute (remote)')
|
||||
xml0.insert(0, [new Y.XmlText('some text')])
|
||||
t.assert(event.childListChanged, 'YXmlEvent.childListChanged on inserted element')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(remoteEvent.childListChanged, 'YXmlEvent.childListChanged on inserted element (remote)')
|
||||
// test childRemoved
|
||||
xml0.delete(0)
|
||||
t.assert(event.childListChanged, 'YXmlEvent.childListChanged on deleted element')
|
||||
testConnector.flushAllMessages()
|
||||
t.assert(remoteEvent.childListChanged, 'YXmlEvent.childListChanged on deleted element (remote)')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} tc
|
||||
*/
|
||||
export const testTreewalker = tc => {
|
||||
const { users, xml0 } = init(tc, { users: 3 })
|
||||
const paragraph1 = new Y.XmlElement('p')
|
||||
const paragraph2 = new Y.XmlElement('p')
|
||||
const text1 = new Y.XmlText('init')
|
||||
const text2 = new Y.XmlText('text')
|
||||
paragraph1.insert(0, [text1, text2])
|
||||
xml0.insert(0, [paragraph1, paragraph2, new Y.XmlElement('img')])
|
||||
const allParagraphs = xml0.querySelectorAll('p')
|
||||
t.assert(allParagraphs.length === 2, 'found exactly two paragraphs')
|
||||
t.assert(allParagraphs[0] === paragraph1, 'querySelectorAll found paragraph1')
|
||||
t.assert(allParagraphs[1] === paragraph2, 'querySelectorAll found paragraph2')
|
||||
t.assert(xml0.querySelector('p') === paragraph1, 'querySelector found paragraph1')
|
||||
compare(users)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testYtextAttributes = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const ytext = /** @type {Y.XmlText} */ (ydoc.get('', Y.XmlText))
|
||||
ytext.observe(event => {
|
||||
t.compare(event.changes.keys.get('test'), { action: 'add', oldValue: undefined })
|
||||
})
|
||||
ytext.setAttribute('test', 42)
|
||||
t.compare(ytext.getAttribute('test'), 42)
|
||||
t.compare(ytext.getAttributes(), { test: 42 })
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testSiblings = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yxml = ydoc.getXmlFragment()
|
||||
const first = new Y.XmlText()
|
||||
const second = new Y.XmlElement('p')
|
||||
yxml.insert(0, [first, second])
|
||||
t.assert(first.nextSibling === second)
|
||||
t.assert(second.prevSibling === first)
|
||||
t.assert(first.parent === yxml)
|
||||
t.assert(yxml.parent === null)
|
||||
t.assert(yxml.firstChild === first)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testInsertafter = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yxml = ydoc.getXmlFragment()
|
||||
const first = new Y.XmlText()
|
||||
const second = new Y.XmlElement('p')
|
||||
const third = new Y.XmlElement('p')
|
||||
|
||||
const deepsecond1 = new Y.XmlElement('span')
|
||||
const deepsecond2 = new Y.XmlText()
|
||||
second.insertAfter(null, [deepsecond1])
|
||||
second.insertAfter(deepsecond1, [deepsecond2])
|
||||
|
||||
yxml.insertAfter(null, [first, second])
|
||||
yxml.insertAfter(second, [third])
|
||||
|
||||
t.assert(yxml.length === 3)
|
||||
t.assert(second.get(0) === deepsecond1)
|
||||
t.assert(second.get(1) === deepsecond2)
|
||||
|
||||
t.compareArrays(yxml.toArray(), [first, second, third])
|
||||
|
||||
t.fails(() => {
|
||||
const el = new Y.XmlElement('p')
|
||||
el.insertAfter(deepsecond1, [new Y.XmlText()])
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testClone = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yxml = ydoc.getXmlFragment()
|
||||
const first = new Y.XmlText('text')
|
||||
const second = new Y.XmlElement('p')
|
||||
const third = new Y.XmlElement('p')
|
||||
yxml.push([first, second, third])
|
||||
t.compareArrays(yxml.toArray(), [first, second, third])
|
||||
const cloneYxml = yxml.clone()
|
||||
ydoc.getArray('copyarr').insert(0, [cloneYxml])
|
||||
t.assert(cloneYxml.length === 3)
|
||||
t.compare(cloneYxml.toJSON(), yxml.toJSON())
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testFormattingBug = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yxml = /** @type {Y.XmlText} */ (ydoc.get('', Y.XmlText))
|
||||
const delta = [
|
||||
{ insert: 'A', attributes: { em: {}, strong: {} } },
|
||||
{ insert: 'B', attributes: { em: {} } },
|
||||
{ insert: 'C', attributes: { em: {}, strong: {} } }
|
||||
]
|
||||
yxml.applyDelta(delta)
|
||||
t.compare(yxml.toDelta(), delta)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {t.TestCase} _tc
|
||||
*/
|
||||
export const testElement = _tc => {
|
||||
const ydoc = new Y.Doc()
|
||||
const yxmlel = ydoc.getXmlElement()
|
||||
const text1 = new Y.XmlText('text1')
|
||||
const text2 = new Y.XmlText('text2')
|
||||
yxmlel.insert(0, [text1, text2])
|
||||
t.compareArrays(yxmlel.toArray(), [text1, text2])
|
||||
}
|
21
tsconfig.json
Normal file
21
tsconfig.json
Normal file
@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2021",
|
||||
"lib": ["ES2021", "dom"],
|
||||
"module": "node16",
|
||||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": "./",
|
||||
"emitDeclarationOnly": true,
|
||||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"moduleResolution": "nodenext",
|
||||
"paths": {
|
||||
"yjs": ["./src/index.js"]
|
||||
}
|
||||
},
|
||||
"include": ["./src/**/*.js", "./tests/**/*.js"]
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user