Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf4d33dba6 | ||
|
|
9502a4ae60 | ||
|
|
7769fde19d | ||
|
|
1c8e6d8280 | ||
|
|
e0b111510b | ||
|
|
0b769d67ac | ||
|
|
e07b0f4100 | ||
|
|
78c947273e |
@@ -5,9 +5,9 @@ name: Node.js CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ main ]
|
branches: [ master ]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ main ]
|
branches: [ master ]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@@ -16,16 +16,16 @@ jobs:
|
|||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
node-version: [16.x, 20.x]
|
node-version: [10.x, 12.x, 13.x]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
- name: Use Node.js ${{ matrix.node-version }}
|
- name: Use Node.js ${{ matrix.node-version }}
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
- run: npm run lint
|
- run: npm run lint
|
||||||
- run: npm run test
|
- run: npm run test-extensive
|
||||||
env:
|
env:
|
||||||
CI: true
|
CI: true
|
||||||
12
.jsdoc.json
12
.jsdoc.json
@@ -17,13 +17,10 @@
|
|||||||
"useCollapsibles": true,
|
"useCollapsibles": true,
|
||||||
"collapse": true,
|
"collapse": true,
|
||||||
"resources": {
|
"resources": {
|
||||||
"yjs.dev": "Website",
|
"yjs.dev": "Yjs website"
|
||||||
"docs.yjs.dev": "Docs",
|
|
||||||
"discuss.yjs.dev": "Forum",
|
|
||||||
"https://gitter.im/Yjs/community": "Chat"
|
|
||||||
},
|
},
|
||||||
"logo": {
|
"logo": {
|
||||||
"url": "https://yjs.dev/images/logo/yjs-512x512.png",
|
"url": "https://user-images.githubusercontent.com/5553757/48975307-61efb100-f06d-11e8-9177-ee895e5916e5.png",
|
||||||
"width": "162px",
|
"width": "162px",
|
||||||
"height": "162px",
|
"height": "162px",
|
||||||
"link": "/"
|
"link": "/"
|
||||||
@@ -38,7 +35,7 @@
|
|||||||
],
|
],
|
||||||
"default": {
|
"default": {
|
||||||
"staticFiles": {
|
"staticFiles": {
|
||||||
"include": []
|
"include": ["examples/"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -47,6 +44,7 @@
|
|||||||
"encoding": "utf8",
|
"encoding": "utf8",
|
||||||
"private": false,
|
"private": false,
|
||||||
"recurse": true,
|
"recurse": true,
|
||||||
"template": "./node_modules/tui-jsdoc-template"
|
"template": "./node_modules/tui-jsdoc-template",
|
||||||
|
"tutorials": "./examples"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
179
INTERNALS.md
179
INTERNALS.md
@@ -1,179 +0,0 @@
|
|||||||
# Yjs Internals
|
|
||||||
|
|
||||||
This document roughly explains how Yjs works internally. There is a complete
|
|
||||||
walkthrough of the Yjs codebase available as a recording:
|
|
||||||
https://youtu.be/0l5XgnQ6rB4
|
|
||||||
|
|
||||||
The Yjs CRDT algorithm is described in the [YATA
|
|
||||||
paper](https://www.researchgate.net/publication/310212186_Near_Real-Time_Peer-to-Peer_Shared_Editing_on_Extensible_Data_Types)
|
|
||||||
from 2016. For an algorithmic view of how it works, the paper is a reasonable
|
|
||||||
place to start. There are a handful of small improvements implemented in Yjs
|
|
||||||
which aren't described in the paper. The most notable is that items have an
|
|
||||||
`originRight` as well as an `origin` property, which improves performance when
|
|
||||||
many concurrent inserts happen after the same character.
|
|
||||||
|
|
||||||
At its heart, Yjs is a list CRDT. Everything is squeezed into a list in order to
|
|
||||||
reuse the CRDT resolution algorithm:
|
|
||||||
|
|
||||||
- Arrays are easy - they're lists of arbitrary items.
|
|
||||||
- Text is a list of characters, optionally punctuated by formatting markers and
|
|
||||||
embeds for rich text support. Several characters can be wrapped in a single
|
|
||||||
linked list `Item` (this is also known as the compound representation of
|
|
||||||
CRDTs). More information about this in [this blog
|
|
||||||
article](https://blog.kevinjahns.de/are-crdts-suitable-for-shared-editing/).
|
|
||||||
- Maps are lists of entries. The last inserted entry for each key is used, and
|
|
||||||
all other duplicates for each key are flagged as deleted.
|
|
||||||
|
|
||||||
Each client is assigned a unique *clientID* property on first insert. This is a
|
|
||||||
random 53-bit integer (53 bits because that fits in the javascript safe integer
|
|
||||||
range \[JavaScript uses IEEE 754 floats\]).
|
|
||||||
|
|
||||||
## List items
|
|
||||||
|
|
||||||
Each item in a Yjs list is made up of two objects:
|
|
||||||
|
|
||||||
- An `Item` (*src/structs/Item.js*). This is used to relate the item to other
|
|
||||||
adjacent items.
|
|
||||||
- An object in the `AbstractType` hierarchy (subclasses of
|
|
||||||
*src/types/AbstractType.js* - eg `YText`). This stores the actual content in
|
|
||||||
the Yjs document.
|
|
||||||
|
|
||||||
The item and type object pair have a 1-1 mapping. The item's `content` field
|
|
||||||
references the AbstractType object and the AbstractType object's `_item` field
|
|
||||||
references the item.
|
|
||||||
|
|
||||||
Everything inserted in a Yjs document is given a unique ID, formed from a
|
|
||||||
*ID(clientID, clock)* pair (also known as a [Lamport
|
|
||||||
Timestamp](https://en.wikipedia.org/wiki/Lamport_timestamp)). The clock counts
|
|
||||||
up from 0 with the first inserted character or item a client makes. This is
|
|
||||||
similar to automerge's operation IDs, but note that the clock is only
|
|
||||||
incremented by inserts. Deletes are handled in a very different way (see
|
|
||||||
below).
|
|
||||||
|
|
||||||
If a run of characters is inserted into a document (eg `"abc"`), the clock will
|
|
||||||
be incremented for each character (eg 3 times here). But Yjs will only add a
|
|
||||||
single `Item` into the list. This has no effect on the core CRDT algorithm, but
|
|
||||||
the optimization dramatically decreases the number of javascript objects
|
|
||||||
created during normal text editing. This optimization only applies if the
|
|
||||||
characters share the same clientID, they're inserted in order, and all
|
|
||||||
characters have either been deleted or all characters are not deleted. The item
|
|
||||||
will be split if the run is interrupted for any reason (eg a character in the
|
|
||||||
middle of the run is deleted).
|
|
||||||
|
|
||||||
When an item is created, it stores a reference to the IDs of the preceding and
|
|
||||||
succeeding item. These are stored in the item's `origin` and `originRight`
|
|
||||||
fields, respectively. These are used when peers concurrently insert at the same
|
|
||||||
location in a document. Though quite rare in practice, Yjs needs to make sure
|
|
||||||
the list items always resolve to the same order on all peers. The actual logic
|
|
||||||
is relatively simple - its only a couple dozen lines of code and it lives in
|
|
||||||
the `Item#integrate()` method. The YATA paper has much more detail on this
|
|
||||||
algorithm.
|
|
||||||
|
|
||||||
### Item Storage
|
|
||||||
|
|
||||||
The items themselves are stored in two data structures and a cache:
|
|
||||||
|
|
||||||
- The items are stored in a tree of doubly-linked lists in *document order*.
|
|
||||||
Each item has `left` and `right` properties linking to its siblings in the
|
|
||||||
document. Items also have a `parent` property to reference their parent in the
|
|
||||||
document tree (null at the root). (And you can access an item's children, if
|
|
||||||
any, through `item.content`).
|
|
||||||
- All items are referenced in *insertion order* inside the struct store
|
|
||||||
(*src/utils/StructStore.js*). This references the list of items inserted by
|
|
||||||
for each client, in chronological order. This is used to find an item in the
|
|
||||||
tree with a given ID (using a binary search). It is also used to efficiently
|
|
||||||
gather the operations a peer is missing during sync (more on this below).
|
|
||||||
|
|
||||||
When a local insert happens, Yjs needs to map the insert position in the
|
|
||||||
document (eg position 1000) to an ID. With just the linked list, this would
|
|
||||||
require a slow O(n) linear scan of the list. But when editing a document, most
|
|
||||||
inserts are either at the same position as the last insert, or nearby. To
|
|
||||||
improve performance, Yjs stores a cache of the 80 most recently looked up
|
|
||||||
insert positions in the document. This is consulted and updated when a position
|
|
||||||
is looked up to improve performance in the average case. The cache is updated
|
|
||||||
using a heuristic that is still changing (currently, it is updated when a new
|
|
||||||
position significantly diverges from existing markers in the cache). Internally
|
|
||||||
this is referred to as the skip list / fast search marker.
|
|
||||||
|
|
||||||
### Deletions
|
|
||||||
|
|
||||||
Deletions in Yjs are treated very differently from insertions. Insertions are
|
|
||||||
implemented as a sequential operation based CRDT, but deletions are treated as
|
|
||||||
a simpler state based CRDT.
|
|
||||||
|
|
||||||
When an item has been deleted by any peer, at any point in history, it is
|
|
||||||
flagged as deleted on the item. (Internally Yjs uses the `info` bitfield.) Yjs
|
|
||||||
does not record metadata about a deletion:
|
|
||||||
|
|
||||||
- No data is kept on *when* an item was deleted, or which user deleted it.
|
|
||||||
- The struct store does not contain deletion records
|
|
||||||
- The clientID's clock is not incremented
|
|
||||||
|
|
||||||
If garbage collection is enabled in Yjs, when an object is deleted its content
|
|
||||||
is discarded. If a deleted object contains children (eg a field is deleted in
|
|
||||||
an object), the content is replaced with a `GC` object (*src/structs/GC.js*).
|
|
||||||
This is a very lightweight structure - it only stores the length of the removed
|
|
||||||
content.
|
|
||||||
|
|
||||||
Yjs has some special logic to share which content in a document has been
|
|
||||||
deleted:
|
|
||||||
|
|
||||||
- When a delete happens, as well as marking the item, the deleted IDs are
|
|
||||||
listed locally within the transaction. (See below for more information about
|
|
||||||
transactions.) When a transaction has been committed locally, the set of
|
|
||||||
deleted items is appended to a transaction's update message.
|
|
||||||
- A snapshot (a marked point in time in the Yjs history) is specified using
|
|
||||||
both the set of (clientID, clock) pairs *and* the set of all deleted item
|
|
||||||
IDs. The deleted set is O(n), but because deletions usually happen in runs,
|
|
||||||
this data set is usually tiny in practice. (The real world editing trace from
|
|
||||||
the B4 benchmark document contains 182k inserts and 77k deleted characters. The
|
|
||||||
deleted set size in a snapshot is only 4.5Kb).
|
|
||||||
|
|
||||||
## Transactions
|
|
||||||
|
|
||||||
All updates in Yjs happen within a *transaction*. (Defined in
|
|
||||||
*src/utils/Transaction.js*.)
|
|
||||||
|
|
||||||
The transaction collects a set of updates to the Yjs document to be applied on
|
|
||||||
remote peers atomically. Once a transaction has been committed locally, it
|
|
||||||
generates a compressed *update message* which is broadcast to synchronized
|
|
||||||
remote peers to notify them of the local change. The update message contains:
|
|
||||||
|
|
||||||
- The set of newly inserted items
|
|
||||||
- The set of items deleted within the transaction.
|
|
||||||
|
|
||||||
## Network protocol
|
|
||||||
|
|
||||||
The network protocol is not really a part of Yjs. There are a few relevant
|
|
||||||
concepts that can be used to create a custom network protocol:
|
|
||||||
|
|
||||||
* `update`: The Yjs document can be encoded to an *update* object that can be
|
|
||||||
parsed to reconstruct the document. Also every change on the document fires
|
|
||||||
an incremental document update that allows clients to sync with each other.
|
|
||||||
The update object is a Uint8Array that efficiently encodes `Item` objects and
|
|
||||||
the delete set.
|
|
||||||
* `state vector`: A state vector defines the known state of each user (a set of
|
|
||||||
tuples `(client, clock)`). This object is also efficiently encoded as a
|
|
||||||
Uint8Array.
|
|
||||||
|
|
||||||
The client can ask a remote client for missing document updates by sending
|
|
||||||
their state vector (often referred to as *sync step 1*). The remote peer can
|
|
||||||
compute the missing `Item` objects using the `clocks` of the respective clients
|
|
||||||
and compute a minimal update message that reflects all missing updates (sync
|
|
||||||
step 2).
|
|
||||||
|
|
||||||
An implementation of the syncing process is in
|
|
||||||
[y-protocols](https://github.com/yjs/y-protocols).
|
|
||||||
|
|
||||||
## Snapshots
|
|
||||||
|
|
||||||
A snapshot can be used to restore an old document state. It is a `state vector`
|
|
||||||
\+ `delete set`. A client can restore an old document state by iterating through
|
|
||||||
the sequence CRDT and ignoring all Items that have an `id.clock >
|
|
||||||
stateVector[id.client].clock`. Instead of using `item.deleted` the client will
|
|
||||||
use the delete set to find out if an item was deleted or not.
|
|
||||||
|
|
||||||
It is not recommended to restore an old document state using snapshots,
|
|
||||||
although that would certainly be possible. Instead, the old state should be
|
|
||||||
computed by iterating through the newest state and using the additional
|
|
||||||
information from the state vector.
|
|
||||||
4
LICENSE
4
LICENSE
@@ -1,7 +1,7 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2023
|
Copyright (c) 2014
|
||||||
- Kevin Jahns <kevin.jahns@protonmail.com>.
|
- Kevin Jahns <kevin.jahns@rwth-aachen.de>.
|
||||||
- Chair of Computer Science 5 (Databases & Information Systems), RWTH Aachen University, Germany
|
- Chair of Computer Science 5 (Databases & Information Systems), RWTH Aachen University, Germany
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
|||||||
644
README.md
644
README.md
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
> A CRDT framework with a powerful abstraction of shared data
|
> A CRDT framework with a powerful abstraction of shared data
|
||||||
|
|
||||||
Yjs is a [CRDT implementation](#yjs-crdt-algorithm) that exposes its internal
|
Yjs is a [CRDT implementation](#Yjs-CRDT-Algorithm) that exposes its internal
|
||||||
data structure as *shared types*. Shared types are common data types like `Map`
|
data structure as *shared types*. Shared types are common data types like `Map`
|
||||||
or `Array` with superpowers: changes are automatically distributed to other
|
or `Array` with superpowers: changes are automatically distributed to other
|
||||||
peers and merged without merge conflicts.
|
peers and merged without merge conflicts.
|
||||||
@@ -15,132 +15,71 @@ suited for even large documents.
|
|||||||
|
|
||||||
* Demos: [https://github.com/yjs/yjs-demos](https://github.com/yjs/yjs-demos)
|
* Demos: [https://github.com/yjs/yjs-demos](https://github.com/yjs/yjs-demos)
|
||||||
* Discuss: [https://discuss.yjs.dev](https://discuss.yjs.dev)
|
* Discuss: [https://discuss.yjs.dev](https://discuss.yjs.dev)
|
||||||
* Chat: [Gitter](https://gitter.im/Yjs/community) | [Discord](https://discord.gg/T3nqMT6qbM)
|
|
||||||
* Benchmark Yjs vs. Automerge:
|
* Benchmark Yjs vs. Automerge:
|
||||||
[https://github.com/dmonad/crdt-benchmarks](https://github.com/dmonad/crdt-benchmarks)
|
[https://github.com/dmonad/crdt-benchmarks](https://github.com/dmonad/crdt-benchmarks)
|
||||||
* Podcast [**"Yjs Deep Dive into real time collaborative editing solutions":**](https://www.tag1consulting.com/blog/deep-dive-real-time-collaborative-editing-solutions-tagteamtalk-001-0)
|
* Podcast [**"Yjs Deep Dive into real time collaborative editing solutions":**](https://www.tag1consulting.com/blog/deep-dive-real-time-collaborative-editing-solutions-tagteamtalk-001-0)
|
||||||
* Podcast [**"Google Docs-style editing in Gutenberg with the YJS framework":**](https://publishpress.com/blog/yjs/)
|
* Podcast [**"Google Docs-style editing in Gutenberg with the YJS framework":**](https://publishpress.com/blog/yjs/)
|
||||||
|
|
||||||
:construction_worker_woman: If you are looking for professional support, please
|
:construction_worker_woman: If you are looking for professional (paid) support to
|
||||||
consider supporting this project via a "support contract" on
|
build collaborative or distributed applications ping us at
|
||||||
[GitHub Sponsors](https://github.com/sponsors/dmonad). I will attend your issues
|
<yjs@tag1consulting.com>. Otherwise you can find help on our
|
||||||
quicker and we can discuss questions and problems in regular video conferences.
|
[discussion board](https://discuss.yjs.dev).
|
||||||
Otherwise you can find help on our community [discussion board](https://discuss.yjs.dev).
|
|
||||||
|
|
||||||
## Sponsorship
|
## Sponsors
|
||||||
|
|
||||||
Please contribute to the project financially - especially if your company relies
|
I'm currently looking for sponsors that allow me to be less dependent on
|
||||||
on Yjs. [](https://github.com/sponsors/dmonad)
|
contracting work. These awesome backers already fund further development of
|
||||||
|
Yjs:
|
||||||
|
|
||||||
## Professional Support
|
[](https://github.com/vwall)
|
||||||
|
[<img src="https://user-images.githubusercontent.com/5553757/83337333-a7bcb380-a2ba-11ea-837b-e404eb35d318.png"
|
||||||
|
height="60px" />](https://input.com/)
|
||||||
|
[](https://github.com/canadaduane)
|
||||||
|
[](https://github.com/ISNIT0)
|
||||||
|
[<img src="https://room.sh/img/icons/android-chrome-192x192.png" height="60px" />](https://room.sh/)
|
||||||
|
[](https://github.com/journeyapps)
|
||||||
|
[](https://github.com/adabru)
|
||||||
|
[](https://github.com/NathanaelA)
|
||||||
|
[](https://github.com/gremloon)
|
||||||
|
|
||||||
* [Support Contract with the Maintainer](https://github.com/sponsors/dmonad) -
|
Sponsorship also comes with special perks! [](https://github.com/sponsors/dmonad)
|
||||||
By contributing financially to the open-source Yjs project, you can receive
|
|
||||||
professional support directly from the author. This includes the opportunity for
|
|
||||||
weekly video calls to discuss your specific challenges.
|
|
||||||
* [Synergy Codes](https://synergycodes.com/yjs-services/) - Specializing in
|
|
||||||
consulting and developing real-time collaborative editing solutions for visual
|
|
||||||
apps, Synergy Codes focuses on interactive diagrams, complex graphs, charts, and
|
|
||||||
various data visualization types. Their expertise empowers developers to build
|
|
||||||
engaging and interactive visual experiences leveraging the power of Yjs. See
|
|
||||||
their work in action at [Visual Collaboration
|
|
||||||
Showcase](https://yjs-diagram.synergy.codes/).
|
|
||||||
|
|
||||||
## Who is using Yjs
|
## Who is using Yjs
|
||||||
|
|
||||||
* [AFFiNE](https://affine.pro/) A local-first, privacy-first, open source
|
* [Relm](http://www.relm.us/) A collaborative gameworld for teamwork and
|
||||||
knowledge base. :star2:
|
community. :star2:
|
||||||
* [Huly](https://huly.io/) - Open Source All-in-One Project Management Platform :star2:
|
* [Input](https://input.com/) A collaborative note taking app. :star2:
|
||||||
* [Cargo](https://cargo.site/) Site builder for designers and artists :star2:
|
|
||||||
* [Gitbook](https://gitbook.com) Knowledge management for technical teams :star2:
|
|
||||||
* [Evernote](https://evernote.com) Note-taking app :star2:
|
|
||||||
* [Lessonspace](https://thelessonspace.com) Enterprise platform for virtual
|
|
||||||
classrooms and online training :star2:
|
|
||||||
* [Ellipsus](ellipsus.com) - Collaborative writing app for storytelling etc.
|
|
||||||
Supports versioning, change attribution, and "blame". A solution for the whole
|
|
||||||
publishing process (also selling) :star:
|
|
||||||
* [Dynaboard](https://dynaboard.com/) Build web apps collaboratively. :star:
|
|
||||||
* [Relm](https://www.relm.us/) A collaborative gameworld for teamwork and
|
|
||||||
community. :star:
|
|
||||||
* [Room.sh](https://room.sh/) A meeting application with integrated
|
* [Room.sh](https://room.sh/) A meeting application with integrated
|
||||||
collaborative drawing, editing, and coding tools. :star:
|
collaborative drawing, editing, and coding tools. :star:
|
||||||
|
* [http://coronavirustechhandbook.com/](https://coronavirustechhandbook.com/)
|
||||||
|
A collaborative wiki that is edited by thousands of different people to work
|
||||||
|
on a rapid and sophisticated response to the coronavirus outbreak and
|
||||||
|
subsequent impacts. :star:
|
||||||
* [Nimbus Note](https://nimbusweb.me/note.php) A note-taking app designed by
|
* [Nimbus Note](https://nimbusweb.me/note.php) A note-taking app designed by
|
||||||
Nimbus Web. :star:
|
Nimbus Web.
|
||||||
* [Pluxbox RadioManager](https://getradiomanager.com/) A web-based app to
|
* [JoeDocs](https://joedocs.com/) An open collaborative wiki.
|
||||||
collaboratively organize radio broadcasts. :star:
|
* [Pluxbox RadioManager](https://pluxbox.com/) A web-based app to
|
||||||
* [modyfi](https://www.modyfi.com) - Modyfi is the design platform built for
|
collaboratively organize radio broadcasts.
|
||||||
multidisciplinary designers. Design, generate, animate, and more — without
|
* [Cattaz](http://cattaz.io/) A wiki that can run custom applications in the
|
||||||
switching between apps. :star:
|
wiki pages.
|
||||||
* [Sana](https://sanalabs.com/) A learning platform with collaborative text
|
* [Evernote](https://evernote.com) A note-taking and task management application.
|
||||||
editing powered by Yjs.
|
|
||||||
* [Serenity Notes](https://www.serenity.re/en/notes) End-to-end encrypted
|
|
||||||
collaborative notes app.
|
|
||||||
* [PRSM](https://prsm.uk/) Collaborative mind-mapping and system visualisation.
|
|
||||||
*[(source)](https://github.com/micrology/prsm)*
|
|
||||||
* [Alldone](https://alldone.app/) A next-gen project management and
|
|
||||||
collaboration platform.
|
|
||||||
* [Living Spec](https://livingspec.com/) A modern way for product teams to collaborate.
|
|
||||||
* [Slidebeamer](https://slidebeamer.com/) Presentation app.
|
|
||||||
* [BlockSurvey](https://blocksurvey.io) End-to-end encryption for your forms/surveys.
|
|
||||||
* [Skiff](https://skiff.org/) Private, decentralized workspace.
|
|
||||||
* [JupyterLab](https://jupyter.org/) Collaborative computational Notebooks
|
|
||||||
* [JupyterCad](https://jupytercad.readthedocs.io/en/latest/) Extension to
|
|
||||||
JupyterLab that enables collaborative editing of 3d FreeCAD Models.
|
|
||||||
* [Hyperquery](https://hyperquery.ai/) A collaborative data workspace for
|
|
||||||
sharing analyses, documentation, spreadsheets, and dashboards.
|
|
||||||
* [Nosgestesclimat](https://nosgestesclimat.fr/groupe) The french carbon
|
|
||||||
footprint calculator has a group P2P mode based on yjs
|
|
||||||
* [oorja.io](https://oorja.io) Online meeting spaces extensible with
|
|
||||||
collaborative apps, end-to-end encrypted.
|
|
||||||
* [LegendKeeper](https://legendkeeper.com) Collaborative campaign planner and
|
|
||||||
worldbuilding app for tabletop RPGs.
|
|
||||||
* [IllumiDesk](https://illumidesk.com/) Build courses and content with A.I.
|
|
||||||
* [btw](https://www.btw.so) Open-source Medium alternative
|
|
||||||
* [AWS SageMaker](https://aws.amazon.com/sagemaker/) Tools for building Machine
|
|
||||||
Learning Models
|
|
||||||
* [linear](https://linear.app) Streamline issues, projects, and product roadmaps.
|
|
||||||
* [btw](https://www.btw.so) - Personal website builder
|
|
||||||
* [AWS SageMaker](https://aws.amazon.com/sagemaker/) - Machine Learning Service
|
|
||||||
* [Arkiter](https://www.arkiter.com/) - Live interview software
|
|
||||||
* [Appflowy](https://www.appflowy.io/) - They use Yrs
|
|
||||||
* [Multi.app](https://multi.app) - Multiplayer app sharing: Point, draw and edit
|
|
||||||
in shared apps as if they're on your computer. They are using Yrs.
|
|
||||||
* [AppMaster](https://appmaster.io) A No-Code platform for creating
|
|
||||||
production-ready applications with source code generation.
|
|
||||||
* [Synthesia](https://www.synthesia.io) - Collaborative Video Editor
|
|
||||||
* [thinkdeli](https://thinkdeli.com) - A fast and simple notes app powered by AI
|
|
||||||
* [ourboard](https://github.com/raimohanska/ourboard) - A collaborative whiteboard
|
|
||||||
application
|
|
||||||
* [Ellie.ai](https://ellie.ai) - Data Product Design and Collaboration
|
|
||||||
* [GoPeer](https://gopeer.org/) - Collaborative tutoring
|
|
||||||
* [screen.garden](https://screen.garden) - Collaborative backend for PKM apps.
|
|
||||||
* [NextCloud](https://nextcloud.com/) - Content Collaboration Platform
|
|
||||||
* [keystatic](https://github.com/Thinkmill/keystatic) - git-based CMS
|
|
||||||
* [QDAcity](https://qdacity.com) - Collaborative qualitative data analysis platform
|
|
||||||
* [Kanbert](https://kanbert.com) - Project management software
|
|
||||||
* [Eclipse Theia](https://github.com/eclipse-theia/theia) - A cloud & desktop
|
|
||||||
IDE that runs in the browser.
|
|
||||||
* [ScienHub](https://scienhub.com) - Collaborative LaTeX editor in the browser.
|
|
||||||
* [Open Collaboration Tools](https://www.open-collab.tools/) - Collaborative
|
|
||||||
editing for your IDE or custom editor
|
|
||||||
* [Typst](https://typst.app/) - Compose, edit, and automate technical documents
|
|
||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
* [Overview](#overview)
|
* [Overview](#Overview)
|
||||||
* [Bindings](#bindings)
|
* [Bindings](#Bindings)
|
||||||
* [Providers](#providers)
|
* [Providers](#Providers)
|
||||||
* [Tooling](#tooling)
|
* [Getting Started](#Getting-Started)
|
||||||
* [Ports](#ports)
|
* [API](#API)
|
||||||
* [Getting Started](#getting-started)
|
* [Shared Types](#Shared-Types)
|
||||||
* [API](#api)
|
* [Y.Doc](#YDoc)
|
||||||
* [Shared Types](#shared-types)
|
* [Document Updates](#Document-Updates)
|
||||||
* [Y.Doc](#ydoc)
|
* [Relative Positions](#Relative-Positions)
|
||||||
* [Document Updates](#document-updates)
|
* [Y.UndoManager](#YUndoManager)
|
||||||
* [Relative Positions](#relative-positions)
|
* [Miscellaneous](#Miscellaneous)
|
||||||
* [Y.UndoManager](#yundomanager)
|
* [Typescript Declarations](#Typescript-Declarations)
|
||||||
* [Yjs CRDT Algorithm](#yjs-crdt-algorithm)
|
* [Yjs CRDT Algorithm](#Yjs-CRDT-Algorithm)
|
||||||
* [License and Author](#license-and-author)
|
* [License and Author](#License-and-Author)
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
@@ -152,19 +91,10 @@ are implemented in separate modules.
|
|||||||
|
|
||||||
| Name | Cursors | Binding | Demo |
|
| Name | Cursors | Binding | Demo |
|
||||||
|---|:-:|---|---|
|
|---|:-:|---|---|
|
||||||
| [ProseMirror](https://prosemirror.net/) | ✔ | [y-prosemirror](https://github.com/yjs/y-prosemirror) | [demo](https://demos.yjs.dev/prosemirror/prosemirror.html) |
|
| [ProseMirror](https://prosemirror.net/) | ✔ | [y-prosemirror](http://github.com/yjs/y-prosemirror) | [demo](https://demos.yjs.dev/prosemirror/prosemirror.html) |
|
||||||
| [Quill](https://quilljs.com/) | ✔ | [y-quill](https://github.com/yjs/y-quill) | [demo](https://demos.yjs.dev/quill/quill.html) |
|
| [Quill](https://quilljs.com/) | ✔ | [y-quill](http://github.com/yjs/y-quill) | [demo](https://demos.yjs.dev/quill/quill.html) |
|
||||||
| [CodeMirror](https://codemirror.net/) | ✔ | [y-codemirror](https://github.com/yjs/y-codemirror) | [demo](https://demos.yjs.dev/codemirror/codemirror.html) |
|
| [CodeMirror](https://codemirror.net/) | ✔ | [y-codemirror](http://github.com/yjs/y-codemirror) | [demo](https://demos.yjs.dev/codemirror/codemirror.html) |
|
||||||
| [Monaco](https://microsoft.github.io/monaco-editor/) | ✔ | [y-monaco](https://github.com/yjs/y-monaco) | [demo](https://demos.yjs.dev/monaco/monaco.html) |
|
| [Monaco](https://microsoft.github.io/monaco-editor/) | ✔ | [y-monaco](http://github.com/yjs/y-monaco) | [demo](https://demos.yjs.dev/monaco/monaco.html) |
|
||||||
| [Slate](https://github.com/ianstormtaylor/slate) | ✔ | [slate-yjs](https://github.com/bitphinix/slate-yjs) | [demo](https://bitphinix.github.io/slate-yjs-example) |
|
|
||||||
| [BlockSuite](https://github.com/toeverything/blocksuite) | ✔ | (native) | [demo](https://blocksuite-toeverything.vercel.app/?init) |
|
|
||||||
| [Lexical](https://lexical.dev/) | ✔ | (native) | [demo](https://lexical.dev/docs/collaboration/react#see-it-in-action) |
|
|
||||||
| [valtio](https://github.com/pmndrs/valtio) | | [valtio-yjs](https://github.com/dai-shi/valtio-yjs) | [demo](https://codesandbox.io/s/valtio-yjs-demo-ox3iy) |
|
|
||||||
| [immer](https://github.com/immerjs/immer) | | [immer-yjs](https://github.com/sep2/immer-yjs) | [demo](https://codesandbox.io/s/immer-yjs-demo-6e0znb) |
|
|
||||||
| React | | [react-yjs](https://github.com/nikgraf/react-yjs) | [demo](https://react-yjs-example.vercel.app/) |
|
|
||||||
| React / Vue / Svelte / MobX | | [SyncedStore](https://syncedstore.org) | [demo](https://syncedstore.org/docs/react) |
|
|
||||||
| [mobx-keystone](https://mobx-keystone.js.org/) | | [mobx-keystone-yjs](https://github.com/xaviergonz/mobx-keystone/tree/master/packages/mobx-keystone-yjs) | [demo](https://mobx-keystone.js.org/examples/yjs-binding) |
|
|
||||||
| [PSPDFKit](https://www.nutrient.io/) | | [yjs-pspdfkit](https://github.com/hoangqwe159/yjs-pspdfkit) | [demo](https://github.com/hoangqwe159/yjs-pspdfkit) |
|
|
||||||
|
|
||||||
### Providers
|
### Providers
|
||||||
|
|
||||||
@@ -173,158 +103,36 @@ and storing shared data for offline usage is quite a hassle. **Providers**
|
|||||||
manage all that for you and are the perfect starting point for your
|
manage all that for you and are the perfect starting point for your
|
||||||
collaborative app.
|
collaborative app.
|
||||||
|
|
||||||
> This list of providers is incomplete. Please open PRs to add your providers to
|
|
||||||
> this list!
|
|
||||||
|
|
||||||
#### Connection Providers
|
|
||||||
|
|
||||||
<dl>
|
<dl>
|
||||||
<dt><a href="https://github.com/yjs/y-websocket">y-websocket</a></dt>
|
<dt><a href="http://github.com/yjs/y-webrtc">y-webrtc</a></dt>
|
||||||
<dd>
|
|
||||||
A module that contains a simple websocket backend and a websocket client that
|
|
||||||
connects to that backend. <a href="https://github.com/yjs/y-redis/"><b>y-redis</b></a>,
|
|
||||||
<b>y-sweet</b>, <b>ypy-websocket</b> and <a href="https://tiptap.dev/docs/hocuspocus/introduction">
|
|
||||||
<b>Hocuspocus</b></a> (see below) are alternative
|
|
||||||
backends to y-websocket.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/yjs/y-webrtc">y-webrtc</a></dt>
|
|
||||||
<dd>
|
<dd>
|
||||||
Propagates document updates peer-to-peer using WebRTC. The peers exchange
|
Propagates document updates peer-to-peer using WebRTC. The peers exchange
|
||||||
signaling data over signaling servers. Publicly available signaling servers
|
signaling data over signaling servers. Publically available signaling servers
|
||||||
are available. Communication over the signaling servers can be encrypted by
|
are available. Communication over the signaling servers can be encrypted by
|
||||||
providing a shared secret, keeping the connection information and the shared
|
providing a shared secret, keeping the connection information and the shared
|
||||||
document private.
|
document private.
|
||||||
</dd>
|
</dd>
|
||||||
<dt><a href="https://github.com/liveblocks/liveblocks">@liveblocks/yjs </a> 🌟</dt>
|
<dt><a href="http://github.com/yjs/y-websocket">y-websocket</a></dt>
|
||||||
<dd>
|
<dd>
|
||||||
<a href="https://liveblocks.io/document/yjs">Liveblocks Yjs</a> provides a fully
|
A module that contains a simple websocket backend and a websocket client that
|
||||||
hosted WebSocket infrastructure and persisted data store for Yjs
|
connects to that backend. The backend can be extended to persist updates in a
|
||||||
documents. No configuration or maintenance is required. It also features
|
leveldb database.
|
||||||
Yjs webhook events, REST API to read and update Yjs documents, and a
|
|
||||||
browser DevTools extension.
|
|
||||||
</dd>
|
</dd>
|
||||||
<dt><a href="https://github.com/drifting-in-space/y-sweet">y-sweet</a> ⭐</dt>
|
<dt><a href="http://github.com/yjs/y-indexeddb">y-indexeddb</a></dt>
|
||||||
<dd>
|
|
||||||
A standalone yjs server with persistence to S3 or filesystem. They offer a
|
|
||||||
<a href="https://y-sweet.cloud">cloud service</a> as well.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/ueberdosis/hocuspocus">Hocuspocus</a> ⭐</dt>
|
|
||||||
<dd>
|
|
||||||
A standalone extensible yjs server with sqlite persistence, webhooks, auth and more.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://docs.superviz.com/collaboration/integrations/YJS/overview">@superviz/yjs</a></dt>
|
|
||||||
<dd>
|
|
||||||
SuperViz Yjs Provider comes with a secure, scalable real-time infrastructure
|
|
||||||
for Yjs documents, fully compatible with a set of real-time
|
|
||||||
collaboration components offered by SuperViz. This solution ensures
|
|
||||||
synchronization, offline editing, and real-time updates, enabling
|
|
||||||
multiple users to collaborate effectively within shared workspaces.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://docs.partykit.io/reference/y-partykit-api/">PartyKit</a></dt>
|
|
||||||
<dd>
|
|
||||||
Cloud service for building multiplayer apps.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/marcopolo/y-libp2p">y-libp2p</a></dt>
|
|
||||||
<dd>
|
|
||||||
Uses <a href="https://libp2p.io/">libp2p</a> to propagate updates via
|
|
||||||
<a href="https://github.com/libp2p/specs/tree/master/pubsub/gossipsub">GossipSub</a>.
|
|
||||||
Also includes a peer-sync mechanism to catch up on missed updates.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/yjs/y-dat">y-dat</a></dt>
|
|
||||||
<dd>
|
|
||||||
[WIP] Write document updates efficiently to the dat network using
|
|
||||||
<a href="https://github.com/kappa-db/multifeed">multifeed</a>. Each client has
|
|
||||||
an append-only log of CRDT local updates (hypercore). Multifeed manages and sync
|
|
||||||
hypercores and y-dat listens to changes and applies them to the Yjs document.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/yousefED/matrix-crdt">Matrix-CRDT</a></dt>
|
|
||||||
<dd>
|
|
||||||
Use <a href="https://www.matrix.org">Matrix</a> as an off-the-shelf backend for
|
|
||||||
Yjs by using the <a href="https://github.com/yousefED/matrix-crdt">MatrixProvider</a>.
|
|
||||||
Use Matrix as transport and storage of Yjs updates, so you can focus building
|
|
||||||
your client app and Matrix can provide powerful features like Authentication,
|
|
||||||
Authorization, Federation, hosting (self-hosting or SaaS) and even End-to-End
|
|
||||||
Encryption (E2EE).
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/y-crdt/yrb-actioncable">yrb-actioncable</a></dt>
|
|
||||||
<dd>
|
|
||||||
An ActionCable companion for Yjs clients. There is a fitting
|
|
||||||
<a href="https://github.com/y-crdt/yrb-redis">redis extension</a> as well.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/y-crdt/ypy-websocket">ypy-websocket</a></dt>
|
|
||||||
<dd>
|
|
||||||
Websocket backend, written in Python.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://tinybase.org/">Tinybase</a></dt>
|
|
||||||
<dd>
|
|
||||||
The reactive data store for local-first apps. They support multiple CRDTs and
|
|
||||||
different network technologies.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://codeberg.org/webxdc/y-webxdc">y-webxdc</a></dt>
|
|
||||||
<dd>
|
|
||||||
Provider for sharing data in <a href="https://webxdc.org">webxdc chat apps</a>.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://www.secsync.com/">secsync</a></dt>
|
|
||||||
<dd>
|
|
||||||
An architecture to relay end-to-end encrypted CRDTs over a central service.
|
|
||||||
</dd>
|
|
||||||
|
|
||||||
</dl>
|
|
||||||
|
|
||||||
#### Persistence Providers
|
|
||||||
|
|
||||||
<dl>
|
|
||||||
<dt><a href="https://github.com/yjs/y-indexeddb">y-indexeddb</a></dt>
|
|
||||||
<dd>
|
<dd>
|
||||||
Efficiently persists document updates to the browsers indexeddb database.
|
Efficiently persists document updates to the browsers indexeddb database.
|
||||||
The document is immediately available and only diffs need to be synced through the
|
The document is immediately available and only diffs need to be synced through the
|
||||||
network provider.
|
network provider.
|
||||||
</dd>
|
</dd>
|
||||||
<dt><a href="https://github.com/MaxNoetzold/y-mongodb-provider">y-mongodb-provider</a></dt>
|
<dt><a href="http://github.com/yjs/y-dat">y-dat</a></dt>
|
||||||
<dd>
|
<dd>
|
||||||
Adds persistent storage to a server with MongoDB. Can be used with the
|
[WIP] Write document updates effinciently to the dat network using
|
||||||
y-websocket provider.
|
<a href="https://github.com/kappa-db/multifeed">multifeed</a>. Each client has
|
||||||
</dd>
|
an append-only log of CRDT local updates (hypercore). Multifeed manages and sync
|
||||||
<dt><a href="https://github.com/podraven/y-fire">y-fire</a></dt>
|
hypercores and y-dat listens to changes and applies them to the Yjs document.
|
||||||
<dd>
|
</dd>
|
||||||
A database and connection provider for Yjs based on Firestore.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/malte-j/y-op-sqlite">y-op-sqlite</a></dt>
|
|
||||||
<dd>
|
|
||||||
Persist YJS updates in your React Native app using
|
|
||||||
<a href="https://github.com/OP-Engineering/op-sqlite">op-sqlite</a>
|
|
||||||
, the fastest SQLite library for React Native.
|
|
||||||
</dd>
|
|
||||||
<dt><a href="https://github.com/MaxNoetzold/y-postgresql">y-postgresql</a></dt>
|
|
||||||
<dd>
|
|
||||||
Provides persistent storage for a web server using PostgreSQL and
|
|
||||||
is easily compatible with y-websocket.
|
|
||||||
</dd>
|
|
||||||
</dl>
|
</dl>
|
||||||
|
|
||||||
### Tooling
|
|
||||||
|
|
||||||
* [y-sweet debugger](https://docs.jamsocket.com/y-sweet/advanced/debugger)
|
|
||||||
* [liveblocks devtools](https://liveblocks.io/devtools)
|
|
||||||
* [Yjs inspector](https://inspector.yjs.dev)
|
|
||||||
|
|
||||||
### Ports
|
|
||||||
|
|
||||||
There are several Yjs-compatible ports to other programming languages.
|
|
||||||
|
|
||||||
* [y-octo](https://github.com/toeverything/y-octo) - Rust implementation by
|
|
||||||
[AFFiNE](https://affine.pro)
|
|
||||||
* [y-crdt](https://github.com/y-crdt/y-crdt) - Rust implementation with multiple
|
|
||||||
language bindings to other languages
|
|
||||||
* [yrs](https://github.com/y-crdt/y-crdt/tree/main/yrs) - Rust interface
|
|
||||||
* [ypy](https://github.com/y-crdt/ypy) - Python binding
|
|
||||||
* [yrb](https://github.com/y-crdt/yrb) - Ruby binding
|
|
||||||
* [yswift](https://github.com/y-crdt/yswift) - Swift binding
|
|
||||||
* [yffi](https://github.com/y-crdt/y-crdt/tree/main/yffi) - C-FFI
|
|
||||||
* [ywasm](https://github.com/y-crdt/y-crdt/tree/main/ywasm) - WASM binding
|
|
||||||
* [y_ex](https://github.com/satoren/y_ex) - Elixir bindings
|
|
||||||
* [ycs](https://github.com/yjs/ycs) - .Net compatible C# implementation.
|
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
Install Yjs and a provider with your favorite package manager:
|
Install Yjs and a provider with your favorite package manager:
|
||||||
@@ -342,9 +150,6 @@ PORT=1234 node ./node_modules/y-websocket/bin/server.cjs
|
|||||||
### Example: Observe types
|
### Example: Observe types
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import * as Y from 'yjs';
|
|
||||||
|
|
||||||
const doc = new Y.Doc();
|
|
||||||
const yarray = doc.getArray('my-array')
|
const yarray = doc.getArray('my-array')
|
||||||
yarray.observe(event => {
|
yarray.observe(event => {
|
||||||
console.log('yarray was modified')
|
console.log('yarray was modified')
|
||||||
@@ -394,7 +199,7 @@ const ydoc = new Y.Doc()
|
|||||||
|
|
||||||
// this allows you to instantly get the (cached) documents data
|
// this allows you to instantly get the (cached) documents data
|
||||||
const indexeddbProvider = new IndexeddbPersistence('count-demo', ydoc)
|
const indexeddbProvider = new IndexeddbPersistence('count-demo', ydoc)
|
||||||
indexeddbProvider.whenSynced.then(() => {
|
idbP.whenSynced.then(() => {
|
||||||
console.log('loaded data from indexed db')
|
console.log('loaded data from indexed db')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -437,48 +242,34 @@ necessary.
|
|||||||
</p>
|
</p>
|
||||||
<pre>const yarray = new Y.Array()</pre>
|
<pre>const yarray = new Y.Array()</pre>
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>
|
<b><code>insert(index:number, content:Array<object|boolean|Array|string|number|Uint8Array|Y.Type>)</code></b>
|
||||||
Y.Array.from(Array<object|boolean|Array|string|number|null|Uint8Array|Y.Type>):
|
|
||||||
Y.Array
|
|
||||||
</code></b>
|
|
||||||
<dd>An alternative factory function to create a Y.Array based on existing content.</dd>
|
|
||||||
<b><code>parent:Y.AbstractType|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>insert(index:number, content:Array<object|boolean|Array|string|number|null|Uint8Array|Y.Type>)</code></b>
|
|
||||||
<dd>
|
<dd>
|
||||||
Insert content at <var>index</var>. Note that content is an array of elements.
|
Insert content at <var>index</var>. Note that content is an array of elements.
|
||||||
I.e. <code>array.insert(0, [1])</code> splices the list and inserts 1 at
|
I.e. <code>array.insert(0, [1])</code> splices the list and inserts 1 at
|
||||||
position 0.
|
position 0.
|
||||||
</dd>
|
</dd>
|
||||||
<b><code>push(Array<Object|boolean|Array|string|number|null|Uint8Array|Y.Type>)</code></b>
|
<b><code>push(Array<Object|boolean|Array|string|number|Uint8Array|Y.Type>)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>unshift(Array<Object|boolean|Array|string|number|null|Uint8Array|Y.Type>)</code></b>
|
<b><code>unshift(Array<Object|boolean|Array|string|number|Uint8Array|Y.Type>)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>delete(index:number, length:number)</code></b>
|
<b><code>delete(index:number, length:number)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>get(index:number)</code></b>
|
<b><code>get(index:number)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>slice(start:number, end:number):Array<Object|boolean|Array|string|number|null|Uint8Array|Y.Type></code></b>
|
|
||||||
<dd>Retrieve a range of content</dd>
|
|
||||||
<b><code>length:number</code></b>
|
<b><code>length:number</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b>
|
<b>
|
||||||
<code>
|
<code>
|
||||||
forEach(function(value:object|boolean|Array|string|number|null|Uint8Array|Y.Type,
|
forEach(function(value:object|boolean|Array|string|number|Uint8Array|Y.Type,
|
||||||
index:number, array: Y.Array))
|
index:number, array: Y.Array))
|
||||||
</code>
|
</code>
|
||||||
</b>
|
</b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>map(function(T, number, YArray):M):Array<M></code></b>
|
<b><code>map(function(T, number, YArray):M):Array<M></code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>clone(): Y.Array</code></b>
|
<b><code>toArray():Array<object|boolean|Array|string|number|Uint8Array|Y.Type></code></b>
|
||||||
<dd>
|
|
||||||
Clone all values into a fresh Y.Array instance. The returned type can be
|
|
||||||
included into the Yjs document.
|
|
||||||
</dd>
|
|
||||||
<b><code>toArray():Array<object|boolean|Array|string|number|null|Uint8Array|Y.Type></code></b>
|
|
||||||
<dd>Copies the content of this YArray to a new Array.</dd>
|
<dd>Copies the content of this YArray to a new Array.</dd>
|
||||||
<b><code>toJSON():Array<Object|boolean|Array|string|number|null></code></b>
|
<b><code>toJSON():Array<Object|boolean|Array|string|number></code></b>
|
||||||
<dd>
|
<dd>
|
||||||
Copies the content of this YArray to a new Array. It transforms all child types
|
Copies the content of this YArray to a new Array. It transforms all child types
|
||||||
to JSON using their <code>toJSON</code> method.
|
to JSON using their <code>toJSON</code> method.
|
||||||
@@ -520,28 +311,22 @@ or any of its children.
|
|||||||
</p>
|
</p>
|
||||||
<pre><code>const ymap = new Y.Map()</code></pre>
|
<pre><code>const ymap = new Y.Map()</code></pre>
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>parent:Y.AbstractType|null</code></b>
|
<b><code>get(key:string):object|boolean|string|number|Uint8Array|Y.Type</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>size: number</code></b>
|
<b><code>set(key:string, value:object|boolean|string|number|Uint8Array|Y.Type)</code></b>
|
||||||
<dd>Total number of key/value pairs.</dd>
|
|
||||||
<b><code>get(key:string):object|boolean|string|number|null|Uint8Array|Y.Type</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>set(key:string, value:object|boolean|string|number|null|Uint8Array|Y.Type)</code></b>
|
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>delete(key:string)</code></b>
|
<b><code>delete(key:string)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>has(key:string):boolean</code></b>
|
<b><code>has(key:string):boolean</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>clear()</code></b>
|
<b><code>get(index:number)</code></b>
|
||||||
<dd>Removes all elements from this YMap.</dd>
|
<dd></dd>
|
||||||
<b><code>clone():Y.Map</code></b>
|
<b><code>toJSON():Object<string, Object|boolean|Array|string|number|Uint8Array></code></b>
|
||||||
<dd>Clone this type into a fresh Yjs type.</dd>
|
|
||||||
<b><code>toJSON():Object<string, Object|boolean|Array|string|number|null|Uint8Array></code></b>
|
|
||||||
<dd>
|
<dd>
|
||||||
Copies the <code>[key,value]</code> pairs of this YMap to a new Object.It
|
Copies the <code>[key,value]</code> pairs of this YMap to a new Object.It
|
||||||
transforms all child types to JSON using their <code>toJSON</code> method.
|
transforms all child types to JSON using their <code>toJSON</code> method.
|
||||||
</dd>
|
</dd>
|
||||||
<b><code>forEach(function(value:object|boolean|Array|string|number|null|Uint8Array|Y.Type,
|
<b><code>forEach(function(value:object|boolean|Array|string|number|Uint8Array|Y.Type,
|
||||||
key:string, map: Y.Map))</code></b>
|
key:string, map: Y.Map))</code></b>
|
||||||
<dd>
|
<dd>
|
||||||
Execute the provided function once for every key-value pair.
|
Execute the provided function once for every key-value pair.
|
||||||
@@ -603,8 +388,6 @@ YTextEvents compute changes as deltas.
|
|||||||
</p>
|
</p>
|
||||||
<pre>const ytext = new Y.Text()</pre>
|
<pre>const ytext = new Y.Text()</pre>
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>parent:Y.AbstractType|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>insert(index:number, content:string, [formattingAttributes:Object<string,string>])</code></b>
|
<b><code>insert(index:number, content:string, [formattingAttributes:Object<string,string>])</code></b>
|
||||||
<dd>
|
<dd>
|
||||||
Insert a string at <var>index</var> and assign formatting attributes to it.
|
Insert a string at <var>index</var> and assign formatting attributes to it.
|
||||||
@@ -614,7 +397,7 @@ YTextEvents compute changes as deltas.
|
|||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>format(index:number, length:number, formattingAttributes:Object<string,string>)</code></b>
|
<b><code>format(index:number, length:number, formattingAttributes:Object<string,string>)</code></b>
|
||||||
<dd>Assign formatting attributes to a range in the text</dd>
|
<dd>Assign formatting attributes to a range in the text</dd>
|
||||||
<b><code>applyDelta(delta: Delta, opts:Object<string,any>)</code></b>
|
<b><code>applyDelta(delta, opts:Object<string,any>)</code></b>
|
||||||
<dd>
|
<dd>
|
||||||
See <a href="https://quilljs.com/docs/delta/">Quill Delta</a>
|
See <a href="https://quilljs.com/docs/delta/">Quill Delta</a>
|
||||||
Can set options for preventing remove ending newLines, default is true.
|
Can set options for preventing remove ending newLines, default is true.
|
||||||
@@ -663,22 +446,14 @@ or any of its children.
|
|||||||
</p>
|
</p>
|
||||||
<pre><code>const yxml = new Y.XmlFragment()</code></pre>
|
<pre><code>const yxml = new Y.XmlFragment()</code></pre>
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>parent:Y.AbstractType|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>firstChild:Y.XmlElement|Y.XmlText|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>insert(index:number, content:Array<Y.XmlElement|Y.XmlText>)</code></b>
|
<b><code>insert(index:number, content:Array<Y.XmlElement|Y.XmlText>)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>delete(index:number, length:number)</code></b>
|
<b><code>delete(index:number, length:number)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>get(index:number)</code></b>
|
<b><code>get(index:number)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>slice(start:number, end:number):Array<Y.XmlElement|Y.XmlText></code></b>
|
|
||||||
<dd>Retrieve a range of content</dd>
|
|
||||||
<b><code>length:number</code></b>
|
<b><code>length:number</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>clone():Y.XmlFragment</code></b>
|
|
||||||
<dd>Clone this type into a fresh Yjs type.</dd>
|
|
||||||
<b><code>toArray():Array<Y.XmlElement|Y.XmlText></code></b>
|
<b><code>toArray():Array<Y.XmlElement|Y.XmlText></code></b>
|
||||||
<dd>Copies the children to a new Array.</dd>
|
<dd>Copies the children to a new Array.</dd>
|
||||||
<b><code>toDOM():DocumentFragment</code></b>
|
<b><code>toDOM():DocumentFragment</code></b>
|
||||||
@@ -687,8 +462,6 @@ or any of its children.
|
|||||||
<dd>Get the XML serialization of all descendants.</dd>
|
<dd>Get the XML serialization of all descendants.</dd>
|
||||||
<b><code>toJSON():string</code></b>
|
<b><code>toJSON():string</code></b>
|
||||||
<dd>See <code>toString</code>.</dd>
|
<dd>See <code>toString</code>.</dd>
|
||||||
<b><code>createTreeWalker(filter: function(AbstractType<any>):boolean):Iterable</code></b>
|
|
||||||
<dd>Create an Iterable that walks through the children.</dd>
|
|
||||||
<b><code>observe(function(YXmlEvent, Transaction):void)</code></b>
|
<b><code>observe(function(YXmlEvent, Transaction):void)</code></b>
|
||||||
<dd>
|
<dd>
|
||||||
Adds an event listener to this type that will be called synchronously every time
|
Adds an event listener to this type that will be called synchronously every time
|
||||||
@@ -724,14 +497,6 @@ content and be actually XML compliant.
|
|||||||
</p>
|
</p>
|
||||||
<pre><code>const yxml = new Y.XmlElement()</code></pre>
|
<pre><code>const yxml = new Y.XmlElement()</code></pre>
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>parent:Y.AbstractType|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>firstChild:Y.XmlElement|Y.XmlText|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>nextSibling:Y.XmlElement|Y.XmlText|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>prevSibling:Y.XmlElement|Y.XmlText|null</code></b>
|
|
||||||
<dd></dd>
|
|
||||||
<b><code>insert(index:number, content:Array<Y.XmlElement|Y.XmlText>)</code></b>
|
<b><code>insert(index:number, content:Array<Y.XmlElement|Y.XmlText>)</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>delete(index:number, length:number)</code></b>
|
<b><code>delete(index:number, length:number)</code></b>
|
||||||
@@ -746,14 +511,8 @@ content and be actually XML compliant.
|
|||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>getAttribute(attributeName:string):string</code></b>
|
<b><code>getAttribute(attributeName:string):string</code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>getAttributes():Object<string,string></code></b>
|
<b><code>getAttributes(attributeName:string):Object<string,string></code></b>
|
||||||
<dd></dd>
|
<dd></dd>
|
||||||
<b><code>get(i:number):Y.XmlElement|Y.XmlText</code></b>
|
|
||||||
<dd>Retrieve the i-th element.</dd>
|
|
||||||
<b><code>slice(start:number, end:number):Array<Y.XmlElement|Y.XmlText></code></b>
|
|
||||||
<dd>Retrieve a range of content</dd>
|
|
||||||
<b><code>clone():Y.XmlElement</code></b>
|
|
||||||
<dd>Clone this type into a fresh Yjs type.</dd>
|
|
||||||
<b><code>toArray():Array<Y.XmlElement|Y.XmlText></code></b>
|
<b><code>toArray():Array<Y.XmlElement|Y.XmlText></code></b>
|
||||||
<dd>Copies the children to a new Array.</dd>
|
<dd>Copies the children to a new Array.</dd>
|
||||||
<b><code>toDOM():Element</code></b>
|
<b><code>toDOM():Element</code></b>
|
||||||
@@ -812,13 +571,6 @@ calls. I.e. <code>doc.transact(() => { yarray.insert(..); ymap.set(..) })</code>
|
|||||||
triggers a single change event. <br>You can specify an optional <code>origin</code>
|
triggers a single change event. <br>You can specify an optional <code>origin</code>
|
||||||
parameter that is stored on <code>transaction.origin</code> and
|
parameter that is stored on <code>transaction.origin</code> and
|
||||||
<code>on('update', (update, origin) => ..)</code>.
|
<code>on('update', (update, origin) => ..)</code>.
|
||||||
</dd>
|
|
||||||
<b><code>toJSON():any</code></b>
|
|
||||||
<dd>
|
|
||||||
Deprecated: It is recommended to call toJSON directly on the shared types.
|
|
||||||
Converts the entire document into a js object, recursively traversing each yjs
|
|
||||||
type. Doesn't log types that have not been defined (using
|
|
||||||
<code>ydoc.getType(..)</code>).
|
|
||||||
</dd>
|
</dd>
|
||||||
<b><code>get(string, Y.[TypeClass]):[Type]</code></b>
|
<b><code>get(string, Y.[TypeClass]):[Type]</code></b>
|
||||||
<dd>Define a shared type.</dd>
|
<dd>Define a shared type.</dd>
|
||||||
@@ -826,10 +578,6 @@ type. Doesn't log types that have not been defined (using
|
|||||||
<dd>Define a shared Y.Array type. Is equivalent to <code>y.get(string, Y.Array)</code>.</dd>
|
<dd>Define a shared Y.Array type. Is equivalent to <code>y.get(string, Y.Array)</code>.</dd>
|
||||||
<b><code>getMap(string):Y.Map</code></b>
|
<b><code>getMap(string):Y.Map</code></b>
|
||||||
<dd>Define a shared Y.Map type. Is equivalent to <code>y.get(string, Y.Map)</code>.</dd>
|
<dd>Define a shared Y.Map type. Is equivalent to <code>y.get(string, Y.Map)</code>.</dd>
|
||||||
<b><code>getText(string):Y.Text</code></b>
|
|
||||||
<dd>Define a shared Y.Text type. Is equivalent to <code>y.get(string, Y.Text)</code>.</dd>
|
|
||||||
<b><code>getXmlElement(string, string):Y.XmlElement</code></b>
|
|
||||||
<dd>Define a shared Y.XmlElement type. Is equivalent to <code>y.get(string, Y.XmlElement)</code>.</dd>
|
|
||||||
<b><code>getXmlFragment(string):Y.XmlFragment</code></b>
|
<b><code>getXmlFragment(string):Y.XmlFragment</code></b>
|
||||||
<dd>Define a shared Y.XmlFragment type. Is equivalent to <code>y.get(string, Y.XmlFragment)</code>.</dd>
|
<dd>Define a shared Y.XmlFragment type. Is equivalent to <code>y.get(string, Y.XmlFragment)</code>.</dd>
|
||||||
<b><code>on(string, function)</code></b>
|
<b><code>on(string, function)</code></b>
|
||||||
@@ -844,20 +592,12 @@ type. Doesn't log types that have not been defined (using
|
|||||||
<b><code>on('update', function(updateMessage:Uint8Array, origin:any, Y.Doc):void)</code></b>
|
<b><code>on('update', function(updateMessage:Uint8Array, origin:any, Y.Doc):void)</code></b>
|
||||||
<dd>
|
<dd>
|
||||||
Listen to document updates. Document updates must be transmitted to all other
|
Listen to document updates. Document updates must be transmitted to all other
|
||||||
peers. You can apply document updates in any order and multiple times. Use `updateV2`
|
peers. You can apply document updates in any order and multiple times.
|
||||||
to receive V2 events.
|
|
||||||
</dd>
|
</dd>
|
||||||
<b><code>on('beforeTransaction', function(Y.Transaction, Y.Doc):void)</code></b>
|
<b><code>on('beforeTransaction', function(Y.Transaction, Y.Doc):void)</code></b>
|
||||||
<dd>Emitted before each transaction.</dd>
|
<dd>Emitted before each transaction.</dd>
|
||||||
<b><code>on('afterTransaction', function(Y.Transaction, Y.Doc):void)</code></b>
|
<b><code>on('afterTransaction', function(Y.Transaction, Y.Doc):void)</code></b>
|
||||||
<dd>Emitted after each transaction.</dd>
|
<dd>Emitted after each transaction.</dd>
|
||||||
<b><code>on('beforeAllTransactions', function(Y.Doc):void)</code></b>
|
|
||||||
<dd>
|
|
||||||
Transactions can be nested (e.g. when an event within a transaction calls another
|
|
||||||
transaction). Emitted before the first transaction.
|
|
||||||
</dd>
|
|
||||||
<b><code>on('afterAllTransactions', function(Y.Doc, Array<Y.Transaction>):void)</code></b>
|
|
||||||
<dd>Emitted after the last transaction is cleaned up.</dd>
|
|
||||||
</dl>
|
</dl>
|
||||||
|
|
||||||
### Document Updates
|
### Document Updates
|
||||||
@@ -885,7 +625,7 @@ doc1.getArray('myarray').insert(0, ['Hello doc2, you got this?'])
|
|||||||
doc2.getArray('myarray').get(0) // => 'Hello doc2, you got this?'
|
doc2.getArray('myarray').get(0) // => 'Hello doc2, you got this?'
|
||||||
```
|
```
|
||||||
|
|
||||||
Yjs internally maintains a [state vector](#state-vector) that denotes the next
|
Yjs internally maintains a [state vector](#State-Vector) that denotes the next
|
||||||
expected clock from each client. In a different interpretation it holds the
|
expected clock from each client. In a different interpretation it holds the
|
||||||
number of structs created by each client. When two clients sync, you can either
|
number of structs created by each client. When two clients sync, you can either
|
||||||
exchange the complete document structure or only the differences by sending the
|
exchange the complete document structure or only the differences by sending the
|
||||||
@@ -916,67 +656,6 @@ Y.applyUpdate(ydoc1, diff2)
|
|||||||
Y.applyUpdate(ydoc2, diff1)
|
Y.applyUpdate(ydoc2, diff1)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Example: Syncing clients without loading the Y.Doc
|
|
||||||
|
|
||||||
It is possible to sync clients and compute delta updates without loading the Yjs
|
|
||||||
document to memory. Yjs exposes an API to compute the differences directly on the
|
|
||||||
binary document updates.
|
|
||||||
|
|
||||||
```js
|
|
||||||
// encode the current state as a binary buffer
|
|
||||||
let currentState1 = Y.encodeStateAsUpdate(ydoc1)
|
|
||||||
let currentState2 = Y.encodeStateAsUpdate(ydoc2)
|
|
||||||
// now we can continue syncing clients using state vectors without using the Y.Doc
|
|
||||||
ydoc1.destroy()
|
|
||||||
ydoc2.destroy()
|
|
||||||
|
|
||||||
const stateVector1 = Y.encodeStateVectorFromUpdate(currentState1)
|
|
||||||
const stateVector2 = Y.encodeStateVectorFromUpdate(currentState2)
|
|
||||||
const diff1 = Y.diffUpdate(currentState1, stateVector2)
|
|
||||||
const diff2 = Y.diffUpdate(currentState2, stateVector1)
|
|
||||||
|
|
||||||
// sync clients
|
|
||||||
currentState1 = Y.mergeUpdates([currentState1, diff2])
|
|
||||||
currentState2 = Y.mergeUpdates([currentState2, diff1])
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Obfuscating Updates
|
|
||||||
|
|
||||||
If one of your users runs into a weird bug (e.g. the rich-text editor throws
|
|
||||||
error messages), then you don't have to request the full document from your
|
|
||||||
user. Instead, they can obfuscate the document (i.e. replace the content with
|
|
||||||
meaningless generated content) before sending it to you. Note that someone might
|
|
||||||
still deduce the type of content by looking at the general structure of the
|
|
||||||
document. But this is much better than requesting the original document.
|
|
||||||
|
|
||||||
Obfuscated updates contain all the CRDT-related data that is required for
|
|
||||||
merging. So it is safe to merge obfuscated updates.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
// perform some changes..
|
|
||||||
ydoc.getText().insert(0, 'hello world')
|
|
||||||
const update = Y.encodeStateAsUpdate(ydoc)
|
|
||||||
// the below update contains scrambled data
|
|
||||||
const obfuscatedUpdate = Y.obfuscateUpdate(update)
|
|
||||||
const ydoc2 = new Y.Doc()
|
|
||||||
Y.applyUpdate(ydoc2, obfuscatedUpdate)
|
|
||||||
ydoc2.getText().toString() // => "00000000000"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Using V2 update format
|
|
||||||
|
|
||||||
Yjs implements two update formats. By default you are using the V1 update format.
|
|
||||||
You can opt-in into the V2 update format which provides much better compression.
|
|
||||||
It is not yet used by all providers. However, you can already use it if
|
|
||||||
you are building your own provider. All below functions are available with the
|
|
||||||
suffix "V2". E.g. `Y.applyUpdate` ⇒ `Y.applyUpdateV2`. Also when listening to updates
|
|
||||||
you need to specifically need listen for V2 events e.g. `yDoc.on('updateV2', …)`.
|
|
||||||
We also support conversion functions between both formats:
|
|
||||||
`Y.convertUpdateFormatV1ToV2` & `Y.convertUpdateFormatV2ToV1`.
|
|
||||||
|
|
||||||
#### Update API
|
|
||||||
|
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>Y.applyUpdate(Y.Doc, update:Uint8Array, [transactionOrigin:any])</code></b>
|
<b><code>Y.applyUpdate(Y.Doc, update:Uint8Array, [transactionOrigin:any])</code></b>
|
||||||
<dd>
|
<dd>
|
||||||
@@ -993,48 +672,21 @@ differences to the update message.
|
|||||||
</dd>
|
</dd>
|
||||||
<b><code>Y.encodeStateVector(Y.Doc):Uint8Array</code></b>
|
<b><code>Y.encodeStateVector(Y.Doc):Uint8Array</code></b>
|
||||||
<dd>Computes the state vector and encodes it into an Uint8Array.</dd>
|
<dd>Computes the state vector and encodes it into an Uint8Array.</dd>
|
||||||
<b><code>Y.mergeUpdates(Array<Uint8Array>)</code></b>
|
|
||||||
<dd>
|
|
||||||
Merge several document updates into a single document update while removing
|
|
||||||
duplicate information. The merged document update is always smaller than
|
|
||||||
the separate updates because of the compressed encoding.
|
|
||||||
</dd>
|
|
||||||
<b><code>Y.encodeStateVectorFromUpdate(Uint8Array): Uint8Array</code></b>
|
|
||||||
<dd>
|
|
||||||
Computes the state vector from a document update and encodes it into an Uint8Array.
|
|
||||||
</dd>
|
|
||||||
<b><code>Y.diffUpdate(update: Uint8Array, stateVector: Uint8Array): Uint8Array</code></b>
|
|
||||||
<dd>
|
|
||||||
Encode the missing differences to another update message. This function works
|
|
||||||
similarly to <code>Y.encodeStateAsUpdate(ydoc, stateVector)</code> but works
|
|
||||||
on updates instead.
|
|
||||||
</dd>
|
|
||||||
<b><code>convertUpdateFormatV1ToV2</code></b>
|
|
||||||
<dd>
|
|
||||||
Convert V1 update format to the V2 update format.
|
|
||||||
</dd>
|
|
||||||
<b><code>convertUpdateFormatV2ToV1</code></b>
|
|
||||||
<dd>
|
|
||||||
Convert V2 update format to the V1 update format.
|
|
||||||
</dd>
|
|
||||||
</dl>
|
</dl>
|
||||||
|
|
||||||
### Relative Positions
|
### Relative Positions
|
||||||
|
|
||||||
When working with collaborative documents, we often need to work with positions.
|
> This API is not stable yet
|
||||||
Positions may represent cursor locations, selection ranges, or even assign a
|
|
||||||
comment to a range of text. Normal index-positions (expressed as integers) are
|
|
||||||
not convenient to use because the index-range is invalidated as soon as a remote
|
|
||||||
change manipulates the document. Relative positions give you a powerful API to
|
|
||||||
express positions.
|
|
||||||
|
|
||||||
A relative position is fixated to an element in the shared document and is not
|
This feature is intended for managing selections / cursors. When working with
|
||||||
affected by remote changes. I.e. given the document `"a|c"`, the relative
|
other users that manipulate the shared document, you can't trust that an index
|
||||||
position is attached to `c`. When a remote user modifies the document by
|
position (an integer) will stay at the intended location. A *relative position*
|
||||||
inserting a character before the cursor, the cursor will stay attached to the
|
is fixated to an element in the shared document and is not affected by remote
|
||||||
character `c`. `insert(1, 'x')("a|c") = "ax|c"`. When the relative position is
|
changes. I.e. given the document `"a|c"`, the relative position is attached to
|
||||||
set to the end of the document, it will stay attached to the end of the
|
`c`. When a remote user modifies the document by inserting a character before
|
||||||
document.
|
the cursor, the cursor will stay attached to the character `c`. `insert(1,
|
||||||
|
'x')("a|c") = "ax|c"`. When the *relative position* is set to the end of the
|
||||||
|
document, it will stay attached to the end of the document.
|
||||||
|
|
||||||
#### Example: Transform to RelativePosition and back
|
#### Example: Transform to RelativePosition and back
|
||||||
|
|
||||||
@@ -1070,35 +722,14 @@ pos.index === 2 // => true
|
|||||||
```
|
```
|
||||||
|
|
||||||
<dl>
|
<dl>
|
||||||
<b><code>
|
<b><code>Y.createRelativePositionFromTypeIndex(Uint8Array|Y.Type, number)</code></b>
|
||||||
Y.createRelativePositionFromTypeIndex(type:Uint8Array|Y.Type, index: number
|
<dd></dd>
|
||||||
[, assoc=0])
|
<b><code>Y.createAbsolutePositionFromRelativePosition(RelativePosition, Y.Doc)</code></b>
|
||||||
</code></b>
|
<dd></dd>
|
||||||
<dd>
|
<b><code>Y.encodeRelativePosition(RelativePosition):Uint8Array</code></b>
|
||||||
Create a relative position fixated to the i-th element in any sequence-like
|
<dd></dd>
|
||||||
shared type (if <code>assoc >= 0</code>). By default, the position associates
|
|
||||||
with the character that comes after the specified index position. If
|
|
||||||
<code>assoc < 0</code>, then the relative position associates with the character
|
|
||||||
before the specified index position.
|
|
||||||
</dd>
|
|
||||||
<b><code>
|
|
||||||
Y.createAbsolutePositionFromRelativePosition(RelativePosition, Y.Doc):
|
|
||||||
{ type: Y.AbstractType, index: number, assoc: number } | null
|
|
||||||
</code></b>
|
|
||||||
<dd>
|
|
||||||
Create an absolute position from a relative position. If the relative position
|
|
||||||
cannot be referenced, or the type is deleted, then the result is null.
|
|
||||||
</dd>
|
|
||||||
<b><code>
|
|
||||||
Y.encodeRelativePosition(RelativePosition):Uint8Array
|
|
||||||
</code></b>
|
|
||||||
<dd>
|
|
||||||
Encode a relative position to an Uint8Array. Binary data is the preferred
|
|
||||||
encoding format for document updates. If you prefer JSON encoding, you can
|
|
||||||
simply JSON.stringify / JSON.parse the relative position instead.
|
|
||||||
</dd>
|
|
||||||
<b><code>Y.decodeRelativePosition(Uint8Array):RelativePosition</code></b>
|
<b><code>Y.decodeRelativePosition(Uint8Array):RelativePosition</code></b>
|
||||||
<dd>Decode a binary-encoded relative position to a RelativePosition object.</dd>
|
<dd></dd>
|
||||||
</dl>
|
</dl>
|
||||||
|
|
||||||
### Y.UndoManager
|
### Y.UndoManager
|
||||||
@@ -1139,16 +770,6 @@ undo- or the redo-stack.
|
|||||||
</dd>
|
</dd>
|
||||||
<b>
|
<b>
|
||||||
<code>
|
<code>
|
||||||
on('stack-item-updated', { stackItem: { meta: Map<any,any> }, type: 'undo'
|
|
||||||
| 'redo' })
|
|
||||||
</code>
|
|
||||||
</b>
|
|
||||||
<dd>
|
|
||||||
Register an event that is called when an existing <code>StackItem</code> is updated.
|
|
||||||
This happens when two changes happen within a "captureInterval".
|
|
||||||
</dd>
|
|
||||||
<b>
|
|
||||||
<code>
|
|
||||||
on('stack-item-popped', { stackItem: { meta: Map<any,any> }, type: 'undo'
|
on('stack-item-popped', { stackItem: { meta: Map<any,any> }, type: 'undo'
|
||||||
| 'redo' })
|
| 'redo' })
|
||||||
</code>
|
</code>
|
||||||
@@ -1157,14 +778,6 @@ on('stack-item-popped', { stackItem: { meta: Map<any,any> }, type: 'undo'
|
|||||||
Register an event that is called when a <code>StackItem</code> is popped from
|
Register an event that is called when a <code>StackItem</code> is popped from
|
||||||
the undo- or the redo-stack.
|
the undo- or the redo-stack.
|
||||||
</dd>
|
</dd>
|
||||||
<b>
|
|
||||||
<code>
|
|
||||||
on('stack-cleared', { undoStackCleared: boolean, redoStackCleared: boolean })
|
|
||||||
</code>
|
|
||||||
</b>
|
|
||||||
<dd>
|
|
||||||
Register an event that is called when the undo- and/or the redo-stack is cleared.
|
|
||||||
</dd>
|
|
||||||
</dl>
|
</dl>
|
||||||
|
|
||||||
#### Example: Stop Capturing
|
#### Example: Stop Capturing
|
||||||
@@ -1218,7 +831,7 @@ doc.transact(() => {
|
|||||||
ytext.insert(0, 'abc')
|
ytext.insert(0, 'abc')
|
||||||
}, 41)
|
}, 41)
|
||||||
undoManager.undo()
|
undoManager.undo()
|
||||||
ytext.toString() // => 'abc' (not tracked because 41 is not an instance of
|
ytext.toString() // => '' (not tracked because 41 is not an instance of
|
||||||
// `trackedTransactionorigins`)
|
// `trackedTransactionorigins`)
|
||||||
ytext.delete(0, 3) // revert change
|
ytext.delete(0, 3) // revert change
|
||||||
|
|
||||||
@@ -1253,11 +866,29 @@ undoManager.on('stack-item-popped', event => {
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Miscellaneous
|
||||||
|
|
||||||
|
### Typescript Declarations
|
||||||
|
|
||||||
|
Yjs has type descriptions. But until [this
|
||||||
|
ticket](https://github.com/Microsoft/TypeScript/issues/7546) is fixed, this is
|
||||||
|
how you can make use of Yjs type declarations.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"allowJs": true,
|
||||||
|
"checkJs": true,
|
||||||
|
},
|
||||||
|
"maxNodeModuleJsDepth": 5
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Yjs CRDT Algorithm
|
## Yjs CRDT Algorithm
|
||||||
|
|
||||||
*Conflict-free replicated data types* (CRDT) for collaborative editing are an
|
*Conflict-free replicated data types* (CRDT) for collaborative editing are an
|
||||||
alternative approach to *operational transformation* (OT). A very simple
|
alternative approach to *operational transformation* (OT). A very simple
|
||||||
differentiation between the two approaches is that OT attempts to transform
|
differenciation between the two approaches is that OT attempts to transform
|
||||||
index positions to ensure convergence (all clients end up with the same
|
index positions to ensure convergence (all clients end up with the same
|
||||||
content), while CRDTs use mathematical models that usually do not involve index
|
content), while CRDTs use mathematical models that usually do not involve index
|
||||||
transformations, like linked lists. OT is currently the de-facto standard for
|
transformations, like linked lists. OT is currently the de-facto standard for
|
||||||
@@ -1269,29 +900,25 @@ do not require a central source of truth.
|
|||||||
|
|
||||||
Yjs implements a modified version of the algorithm described in [this
|
Yjs implements a modified version of the algorithm described in [this
|
||||||
paper](https://www.researchgate.net/publication/310212186_Near_Real-Time_Peer-to-Peer_Shared_Editing_on_Extensible_Data_Types).
|
paper](https://www.researchgate.net/publication/310212186_Near_Real-Time_Peer-to-Peer_Shared_Editing_on_Extensible_Data_Types).
|
||||||
This [article](https://blog.kevinjahns.de/are-crdts-suitable-for-shared-editing/)
|
I will eventually publish a paper that describes why this approach works so well
|
||||||
explains a simple optimization on the CRDT model and
|
in practice. Note: Since operations make up the document structure, we prefer
|
||||||
gives more insight about the performance characteristics in Yjs.
|
the term *struct* now.
|
||||||
More information about the specific implementation is available in
|
|
||||||
[INTERNALS.md](./INTERNALS.md) and in
|
|
||||||
[this walkthrough of the Yjs codebase](https://youtu.be/0l5XgnQ6rB4).
|
|
||||||
|
|
||||||
CRDTs that are suitable for shared text editing suffer from the fact that they
|
CRDTs suitable for shared text editing suffer from the fact that they only grow
|
||||||
only grow in size. There are CRDTs that do not grow in size, but they do not
|
in size. There are CRDTs that do not grow in size, but they do not have the
|
||||||
have the characteristics that are beneficial for shared text editing (like
|
characteristics that are benificial for shared text editing (like intention
|
||||||
intention preservation). Yjs implements many improvements to the original
|
preservation). Yjs implements many improvements to the original algorithm that
|
||||||
algorithm that diminish the trade-off that the document only grows in size. We
|
diminish the trade-off that the document only grows in size. We can't garbage
|
||||||
can't garbage collect deleted structs (tombstones) while ensuring a unique
|
collect deleted structs (tombstones) while ensuring a unique order of the
|
||||||
order of the structs. But we can 1. merge preceding structs into a single
|
structs. But we can 1. merge preceeding structs into a single struct to reduce
|
||||||
struct to reduce the amount of meta information, 2. we can delete content from
|
the amount of meta information, 2. we can delete content from the struct if it
|
||||||
the struct if it is deleted, and 3. we can garbage collect tombstones if we
|
is deleted, and 3. we can garbage collect tombstones if we don't care about the
|
||||||
don't care about the order of the structs anymore (e.g. if the parent was
|
order of the structs anymore (e.g. if the parent was deleted).
|
||||||
deleted).
|
|
||||||
|
|
||||||
**Examples:**
|
**Examples:**
|
||||||
|
|
||||||
1. If a user inserts elements in sequence, the struct will be merged into a
|
1. If a user inserts elements in sequence, the struct will be merged into a
|
||||||
single struct. E.g. `text.insert(0, 'a'), text.insert(1, 'b');` is
|
single struct. E.g. `array.insert(0, ['a']), array.insert(0, ['b']);` is
|
||||||
first represented as two structs (`[{id: {client, clock: 0}, content: 'a'},
|
first represented as two structs (`[{id: {client, clock: 0}, content: 'a'},
|
||||||
{id: {client, clock: 1}, content: 'b'}`) and then merged into a single
|
{id: {client, clock: 1}, content: 'b'}`) and then merged into a single
|
||||||
struct: `[{id: {client, clock: 0}, content: 'ab'}]`.
|
struct: `[{id: {client, clock: 0}, content: 'ab'}]`.
|
||||||
@@ -1330,6 +957,5 @@ Yjs and all related projects are [**MIT licensed**](./LICENSE).
|
|||||||
Yjs is based on my research as a student at the [RWTH
|
Yjs is based on my research as a student at the [RWTH
|
||||||
i5](http://dbis.rwth-aachen.de/). Now I am working on Yjs in my spare time.
|
i5](http://dbis.rwth-aachen.de/). Now I am working on Yjs in my spare time.
|
||||||
|
|
||||||
Fund this project by donating on [GitHub Sponsors](https://github.com/sponsors/dmonad)
|
Fund this project by donating on [Patreon](https://www.patreon.com/dmonad) or
|
||||||
or hiring [me](https://github.com/dmonad) as a contractor for your collaborative
|
hiring [me](https://github.com/dmonad) for professional support.
|
||||||
app.
|
|
||||||
|
|||||||
143
funding.json
143
funding.json
@@ -1,143 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "v1.0.0",
|
|
||||||
"entity": {
|
|
||||||
"type": "group",
|
|
||||||
"role": "steward",
|
|
||||||
"name": "Kevin Jahns",
|
|
||||||
"email": "kevin.jahns@protonmail.com",
|
|
||||||
"phone": "",
|
|
||||||
"description": "OSS Developer",
|
|
||||||
"webpageUrl": {
|
|
||||||
"url": "https://github.com/yjs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"projects": [
|
|
||||||
{
|
|
||||||
"guid": "yjs",
|
|
||||||
"name": "Yjs",
|
|
||||||
"description": "A library for building collaborative applications. #p2p #local-first #CRDT Funding this project will also enable me to maintain the other Yjs-related technologies.",
|
|
||||||
"webpageUrl": {
|
|
||||||
"url": "https://github.com/yjs/yjs"
|
|
||||||
},
|
|
||||||
"repositoryUrl": {
|
|
||||||
"url": "https://github.com/yjs/yjs"
|
|
||||||
},
|
|
||||||
"licenses": [
|
|
||||||
"spdx:MIT"
|
|
||||||
],
|
|
||||||
"tags": [
|
|
||||||
"collaboration",
|
|
||||||
"p2p",
|
|
||||||
"CRDT",
|
|
||||||
"rich-text",
|
|
||||||
"real-time"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"guid": "ystream",
|
|
||||||
"name": "Y/Stream",
|
|
||||||
"description": "A provider for syncing millions of docs efficiently with other peers. This will become the foundation for building real local-first apps with Yjs.",
|
|
||||||
"webpageUrl": {
|
|
||||||
"url": "https://github.com/yjs/ystream",
|
|
||||||
"wellKnown": "https://github.com/yjs/ystream/blob/main/.well-known/funding-manifest-urls"
|
|
||||||
},
|
|
||||||
"repositoryUrl": {
|
|
||||||
"url": "https://github.com/yjs/ystream",
|
|
||||||
"wellKnown": "https://github.com/yjs/ystream/blob/main/.well-known/funding-manifest-urls"
|
|
||||||
},
|
|
||||||
"licenses": [
|
|
||||||
"spdx:MIT",
|
|
||||||
"spdx:GPL-3.0"
|
|
||||||
],
|
|
||||||
"tags": [
|
|
||||||
"privacy",
|
|
||||||
"collaboration",
|
|
||||||
"p2p",
|
|
||||||
"CRDT",
|
|
||||||
"rich-text",
|
|
||||||
"real-time",
|
|
||||||
"web-development"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"funding": {
|
|
||||||
"channels": [
|
|
||||||
{
|
|
||||||
"guid": "github-sponsors",
|
|
||||||
"type": "payment-provider",
|
|
||||||
"address": "",
|
|
||||||
"description": "For funding of the Yjs project"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"guid": "y-collective",
|
|
||||||
"type": "payment-provider",
|
|
||||||
"address": "https://opencollective.com/y-collective",
|
|
||||||
"description": "For funding the Y-CRDT - the Rust implementation of Yjs and other listed projects."
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"plans": [
|
|
||||||
{
|
|
||||||
"guid": "supporter",
|
|
||||||
"status": "active",
|
|
||||||
"name": "Supporter",
|
|
||||||
"description": "",
|
|
||||||
"amount": 0,
|
|
||||||
"currency": "USD",
|
|
||||||
"frequency": "monthly",
|
|
||||||
"channels": [
|
|
||||||
"github-sponsors",
|
|
||||||
"y-collective"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"guid": "ystream-funding",
|
|
||||||
"status": "active",
|
|
||||||
"name": "YStream Funding",
|
|
||||||
"description": "Fund the next generation of local-first providers.",
|
|
||||||
"amount": 30000,
|
|
||||||
"currency": "USD",
|
|
||||||
"frequency": "one-time",
|
|
||||||
"channels": [
|
|
||||||
"github-sponsors"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"guid": "bronze-sponsor",
|
|
||||||
"status": "active",
|
|
||||||
"name": "Bronze Sponsor",
|
|
||||||
"description": "This is the recommended plan for companies that use Yjs.",
|
|
||||||
"amount": 500,
|
|
||||||
"currency": "USD",
|
|
||||||
"frequency": "monthly",
|
|
||||||
"channels": [
|
|
||||||
"github-sponsors"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"guid": "silver-sponsor",
|
|
||||||
"status": "active",
|
|
||||||
"name": "Silver Sponsor",
|
|
||||||
"description": "This is the recommended plan for large/successfull companies that use Yjs.",
|
|
||||||
"amount": 1000,
|
|
||||||
"currency": "USD",
|
|
||||||
"frequency": "monthly",
|
|
||||||
"channels": [
|
|
||||||
"github-sponsors"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"guid": "gold-sponsor",
|
|
||||||
"status": "active",
|
|
||||||
"name": "Gold Sponsor",
|
|
||||||
"description": "This is the recommended plan for successful companies that build their entire product around Yjs-related technologies.",
|
|
||||||
"amount": 3000,
|
|
||||||
"currency": "USD",
|
|
||||||
"frequency": "monthly",
|
|
||||||
"channels": [
|
|
||||||
"github-sponsors"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"history": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
5847
package-lock.json
generated
5847
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
64
package.json
64
package.json
@@ -1,51 +1,36 @@
|
|||||||
{
|
{
|
||||||
"name": "yjs",
|
"name": "yjs",
|
||||||
"version": "13.6.24",
|
"version": "13.2.0",
|
||||||
"description": "Shared Editing Library",
|
"description": "Shared Editing Library",
|
||||||
"main": "./dist/yjs.cjs",
|
"main": "./dist/yjs.cjs",
|
||||||
"module": "./dist/yjs.mjs",
|
"module": "./dist/yjs.mjs",
|
||||||
"types": "./dist/src/index.d.ts",
|
"types": "./dist/src/index.d.ts",
|
||||||
"type": "module",
|
|
||||||
"sideEffects": false,
|
"sideEffects": false,
|
||||||
"funding": {
|
"funding": {
|
||||||
"type": "GitHub Sponsors ❤",
|
"type": "GitHub Sponsors ❤",
|
||||||
"url": "https://github.com/sponsors/dmonad"
|
"url": "https://github.com/sponsors/dmonad"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "rm -rf dist docs",
|
"test": "npm run dist && node ./dist/tests.cjs --repitition-time 50",
|
||||||
"test": "npm run dist && NODE_ENV=development node ./dist/tests.cjs --repetition-time 50",
|
"test-extensive": "npm run lint && npm run dist && node ./dist/tests.cjs --production --repitition-time 10000",
|
||||||
"test-extensive": "npm run lint && npm run dist && node ./dist/tests.cjs --production --repetition-time 10000",
|
"dist": "rm -rf dist && rollup -c && tsc",
|
||||||
"dist": "npm run clean && rollup -c && tsc",
|
|
||||||
"watch": "rollup -wc",
|
"watch": "rollup -wc",
|
||||||
"lint": "markdownlint README.md && standard && tsc",
|
"lint": "markdownlint README.md && standard && tsc",
|
||||||
"docs": "rm -rf docs; jsdoc --configure ./.jsdoc.json --verbose --readme ./README.md --package ./package.json || true",
|
"docs": "rm -rf docs; jsdoc --configure ./.jsdoc.json --verbose --readme ./README.md --package ./package.json || true",
|
||||||
"serve-docs": "npm run docs && http-server ./docs/",
|
"serve-docs": "npm run docs && http-server ./docs/",
|
||||||
"preversion": "npm run lint && PRODUCTION=1 npm run dist && npm run docs && node ./dist/tests.cjs --repetition-time 1000 && test -e dist/src/index.d.ts && test -e dist/yjs.cjs && test -e dist/yjs.cjs",
|
"preversion": "npm run lint && PRODUCTION=1 npm run dist && npm run docs && node ./dist/tests.cjs --repitition-time 1000",
|
||||||
"debug": "concurrently 'http-server -o test.html' 'npm run watch'",
|
"debug": "concurrently 'http-server -o test.html' 'npm run watch'",
|
||||||
"trace-deopt": "clear && rollup -c && node --trace-deopt dist/test.cjs",
|
"trace-deopt": "clear && rollup -c && node --trace-deopt dist/test.cjs",
|
||||||
"trace-opt": "clear && rollup -c && node --trace-opt dist/test.cjs"
|
"trace-opt": "clear && rollup -c && node --trace-opt dist/test.cjs"
|
||||||
},
|
},
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"types": "./dist/src/index.d.ts",
|
|
||||||
"module": "./dist/yjs.mjs",
|
|
||||||
"import": "./dist/yjs.mjs",
|
|
||||||
"require": "./dist/yjs.cjs"
|
|
||||||
},
|
|
||||||
"./src/index.js": "./src/index.js",
|
|
||||||
"./tests/testHelper.js": "./tests/testHelper.js",
|
|
||||||
"./testHelper": "./dist/testHelper.mjs",
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"files": [
|
"files": [
|
||||||
"dist/yjs.*",
|
"dist/*",
|
||||||
"dist/src",
|
"src/*",
|
||||||
"src",
|
"tests/*",
|
||||||
"tests/testHelper.js",
|
"docs/*"
|
||||||
"dist/testHelper.mjs",
|
|
||||||
"sponsor-y.js"
|
|
||||||
],
|
],
|
||||||
"dictionaries": {
|
"dictionaries": {
|
||||||
|
"doc": "docs",
|
||||||
"test": "tests"
|
"test": "tests"
|
||||||
},
|
},
|
||||||
"standard": {
|
"standard": {
|
||||||
@@ -63,8 +48,7 @@
|
|||||||
"Yjs",
|
"Yjs",
|
||||||
"CRDT",
|
"CRDT",
|
||||||
"offline",
|
"offline",
|
||||||
"offline-first",
|
"shared editing",
|
||||||
"shared-editing",
|
|
||||||
"concurrency",
|
"concurrency",
|
||||||
"collaboration"
|
"collaboration"
|
||||||
],
|
],
|
||||||
@@ -74,26 +58,22 @@
|
|||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/yjs/yjs/issues"
|
"url": "https://github.com/yjs/yjs/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://docs.yjs.dev",
|
"homepage": "https://yjs.dev",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"lib0": "^0.2.99"
|
"lib0": "^0.2.27"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-commonjs": "^24.0.1",
|
"@rollup/plugin-commonjs": "^11.1.0",
|
||||||
"@rollup/plugin-node-resolve": "^15.0.1",
|
"@rollup/plugin-node-resolve": "^7.1.3",
|
||||||
"@types/node": "^18.15.5",
|
|
||||||
"concurrently": "^3.6.1",
|
"concurrently": "^3.6.1",
|
||||||
"http-server": "^0.12.3",
|
"http-server": "^0.12.3",
|
||||||
"jsdoc": "^3.6.7",
|
"jsdoc": "^3.6.4",
|
||||||
"markdownlint-cli": "^0.41.0",
|
"markdownlint-cli": "^0.23.1",
|
||||||
"rollup": "^3.20.0",
|
"rollup": "^1.32.1",
|
||||||
"standard": "^16.0.4",
|
"rollup-cli": "^1.0.9",
|
||||||
|
"standard": "^14.3.4",
|
||||||
"tui-jsdoc-template": "^1.2.2",
|
"tui-jsdoc-template": "^1.2.2",
|
||||||
"typescript": "^4.9.5",
|
"typescript": "^3.9.3",
|
||||||
"y-protocols": "^1.0.5"
|
"y-protocols": "^0.2.3"
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"npm": ">=8.0.0",
|
|
||||||
"node": ">=16.0.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,7 +42,13 @@ export default [{
|
|||||||
name: 'Y',
|
name: 'Y',
|
||||||
file: 'dist/yjs.cjs',
|
file: 'dist/yjs.cjs',
|
||||||
format: 'cjs',
|
format: 'cjs',
|
||||||
sourcemap: true
|
sourcemap: true,
|
||||||
|
paths: path => {
|
||||||
|
if (/^lib0\//.test(path)) {
|
||||||
|
return `lib0/dist/${path.slice(5, -3)}.cjs`
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
||||||
},
|
},
|
||||||
external: id => /^lib0\//.test(id)
|
external: id => /^lib0\//.test(id)
|
||||||
}, {
|
}, {
|
||||||
@@ -54,23 +60,6 @@ export default [{
|
|||||||
sourcemap: true
|
sourcemap: true
|
||||||
},
|
},
|
||||||
external: id => /^lib0\//.test(id)
|
external: id => /^lib0\//.test(id)
|
||||||
}, {
|
|
||||||
input: './tests/testHelper.js',
|
|
||||||
output: {
|
|
||||||
name: 'Y',
|
|
||||||
file: 'dist/testHelper.mjs',
|
|
||||||
format: 'esm',
|
|
||||||
sourcemap: true
|
|
||||||
},
|
|
||||||
external: id => /^lib0\//.test(id) || id === 'yjs',
|
|
||||||
plugins: [{
|
|
||||||
resolveId (importee) {
|
|
||||||
if (importee === '../src/index.js') {
|
|
||||||
return 'yjs'
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}]
|
|
||||||
}, {
|
}, {
|
||||||
input: './tests/index.js',
|
input: './tests/index.js',
|
||||||
output: {
|
output: {
|
||||||
@@ -82,7 +71,7 @@ export default [{
|
|||||||
plugins: [
|
plugins: [
|
||||||
debugResolve,
|
debugResolve,
|
||||||
nodeResolve({
|
nodeResolve({
|
||||||
mainFields: ['browser', 'module', 'main']
|
mainFields: ['module', 'browser', 'main']
|
||||||
}),
|
}),
|
||||||
commonjs()
|
commonjs()
|
||||||
]
|
]
|
||||||
@@ -97,10 +86,9 @@ export default [{
|
|||||||
plugins: [
|
plugins: [
|
||||||
debugResolve,
|
debugResolve,
|
||||||
nodeResolve({
|
nodeResolve({
|
||||||
mainFields: ['node', 'module', 'main'],
|
mainFields: ['module', 'main']
|
||||||
exportConditions: ['node', 'module', 'import', 'default']
|
|
||||||
}),
|
}),
|
||||||
commonjs()
|
commonjs()
|
||||||
],
|
],
|
||||||
external: id => /^lib0\//.test(id)
|
external: ['isomorphic.js']
|
||||||
}]
|
}]
|
||||||
|
|||||||
80
src/index.js
80
src/index.js
@@ -1,4 +1,3 @@
|
|||||||
/** eslint-env browser */
|
|
||||||
|
|
||||||
export {
|
export {
|
||||||
Doc,
|
Doc,
|
||||||
@@ -13,15 +12,12 @@ export {
|
|||||||
YXmlEvent,
|
YXmlEvent,
|
||||||
YMapEvent,
|
YMapEvent,
|
||||||
YArrayEvent,
|
YArrayEvent,
|
||||||
YTextEvent,
|
|
||||||
YEvent,
|
YEvent,
|
||||||
Item,
|
Item,
|
||||||
AbstractStruct,
|
AbstractStruct,
|
||||||
GC,
|
GC,
|
||||||
Skip,
|
|
||||||
ContentBinary,
|
ContentBinary,
|
||||||
ContentDeleted,
|
ContentDeleted,
|
||||||
ContentDoc,
|
|
||||||
ContentEmbed,
|
ContentEmbed,
|
||||||
ContentFormat,
|
ContentFormat,
|
||||||
ContentJSON,
|
ContentJSON,
|
||||||
@@ -29,13 +25,14 @@ export {
|
|||||||
ContentString,
|
ContentString,
|
||||||
ContentType,
|
ContentType,
|
||||||
AbstractType,
|
AbstractType,
|
||||||
|
RelativePosition,
|
||||||
getTypeChildren,
|
getTypeChildren,
|
||||||
createRelativePositionFromTypeIndex,
|
createRelativePositionFromTypeIndex,
|
||||||
createRelativePositionFromJSON,
|
createRelativePositionFromJSON,
|
||||||
createAbsolutePositionFromRelativePosition,
|
createAbsolutePositionFromRelativePosition,
|
||||||
compareRelativePositions,
|
compareRelativePositions,
|
||||||
AbsolutePosition,
|
writeRelativePosition,
|
||||||
RelativePosition,
|
readRelativePosition,
|
||||||
ID,
|
ID,
|
||||||
createID,
|
createID,
|
||||||
compareIDs,
|
compareIDs,
|
||||||
@@ -44,92 +41,23 @@ export {
|
|||||||
createSnapshot,
|
createSnapshot,
|
||||||
createDeleteSet,
|
createDeleteSet,
|
||||||
createDeleteSetFromStructStore,
|
createDeleteSetFromStructStore,
|
||||||
cleanupYTextFormatting,
|
|
||||||
snapshot,
|
snapshot,
|
||||||
emptySnapshot,
|
emptySnapshot,
|
||||||
findRootTypeKey,
|
findRootTypeKey,
|
||||||
findIndexSS,
|
|
||||||
getItem,
|
|
||||||
getItemCleanStart,
|
|
||||||
getItemCleanEnd,
|
|
||||||
typeListToArraySnapshot,
|
typeListToArraySnapshot,
|
||||||
typeMapGetSnapshot,
|
typeMapGetSnapshot,
|
||||||
typeMapGetAllSnapshot,
|
|
||||||
createDocFromSnapshot,
|
|
||||||
iterateDeletedStructs,
|
iterateDeletedStructs,
|
||||||
applyUpdate,
|
applyUpdate,
|
||||||
applyUpdateV2,
|
|
||||||
readUpdate,
|
readUpdate,
|
||||||
readUpdateV2,
|
|
||||||
encodeStateAsUpdate,
|
encodeStateAsUpdate,
|
||||||
encodeStateAsUpdateV2,
|
|
||||||
encodeStateVector,
|
encodeStateVector,
|
||||||
UndoManager,
|
UndoManager,
|
||||||
decodeSnapshot,
|
decodeSnapshot,
|
||||||
encodeSnapshot,
|
encodeSnapshot,
|
||||||
decodeSnapshotV2,
|
|
||||||
encodeSnapshotV2,
|
|
||||||
decodeStateVector,
|
|
||||||
logUpdate,
|
|
||||||
logUpdateV2,
|
|
||||||
decodeUpdate,
|
|
||||||
decodeUpdateV2,
|
|
||||||
relativePositionToJSON,
|
|
||||||
isDeleted,
|
isDeleted,
|
||||||
isParentOf,
|
isParentOf,
|
||||||
equalSnapshots,
|
equalSnapshots,
|
||||||
PermanentUserData, // @TODO experimental
|
PermanentUserData, // @TODO experimental
|
||||||
tryGc,
|
tryGc,
|
||||||
transact,
|
transact
|
||||||
AbstractConnector,
|
|
||||||
logType,
|
|
||||||
mergeUpdates,
|
|
||||||
mergeUpdatesV2,
|
|
||||||
parseUpdateMeta,
|
|
||||||
parseUpdateMetaV2,
|
|
||||||
encodeStateVectorFromUpdate,
|
|
||||||
encodeStateVectorFromUpdateV2,
|
|
||||||
encodeRelativePosition,
|
|
||||||
decodeRelativePosition,
|
|
||||||
diffUpdate,
|
|
||||||
diffUpdateV2,
|
|
||||||
convertUpdateFormatV1ToV2,
|
|
||||||
convertUpdateFormatV2ToV1,
|
|
||||||
obfuscateUpdate,
|
|
||||||
obfuscateUpdateV2,
|
|
||||||
UpdateEncoderV1,
|
|
||||||
UpdateEncoderV2,
|
|
||||||
UpdateDecoderV1,
|
|
||||||
UpdateDecoderV2,
|
|
||||||
equalDeleteSets,
|
|
||||||
mergeDeleteSets,
|
|
||||||
snapshotContainsUpdate
|
|
||||||
} from './internals.js'
|
} from './internals.js'
|
||||||
|
|
||||||
const glo = /** @type {any} */ (typeof globalThis !== 'undefined'
|
|
||||||
? globalThis
|
|
||||||
: typeof window !== 'undefined'
|
|
||||||
? window
|
|
||||||
// @ts-ignore
|
|
||||||
: typeof global !== 'undefined' ? global : {})
|
|
||||||
|
|
||||||
const importIdentifier = '__ $YJS$ __'
|
|
||||||
|
|
||||||
if (glo[importIdentifier] === true) {
|
|
||||||
/**
|
|
||||||
* Dear reader of this message. Please take this seriously.
|
|
||||||
*
|
|
||||||
* If you see this message, make sure that you only import one version of Yjs. In many cases,
|
|
||||||
* your package manager installs two versions of Yjs that are used by different packages within your project.
|
|
||||||
* Another reason for this message is that some parts of your project use the commonjs version of Yjs
|
|
||||||
* and others use the EcmaScript version of Yjs.
|
|
||||||
*
|
|
||||||
* This often leads to issues that are hard to debug. We often need to perform constructor checks,
|
|
||||||
* e.g. `struct instanceof GC`. If you imported different versions of Yjs, it is impossible for us to
|
|
||||||
* do the constructor checks anymore - which might break the CRDT algorithm.
|
|
||||||
*
|
|
||||||
* https://github.com/yjs/yjs/issues/438
|
|
||||||
*/
|
|
||||||
console.error('Yjs was already imported. This breaks constructor checks and will lead to issues! - https://github.com/yjs/yjs/issues/438')
|
|
||||||
}
|
|
||||||
glo[importIdentifier] = true
|
|
||||||
|
|||||||
@@ -1,20 +1,16 @@
|
|||||||
export * from './utils/AbstractConnector.js'
|
|
||||||
export * from './utils/DeleteSet.js'
|
export * from './utils/DeleteSet.js'
|
||||||
export * from './utils/Doc.js'
|
export * from './utils/Doc.js'
|
||||||
export * from './utils/UpdateDecoder.js'
|
|
||||||
export * from './utils/UpdateEncoder.js'
|
|
||||||
export * from './utils/encoding.js'
|
export * from './utils/encoding.js'
|
||||||
export * from './utils/EventHandler.js'
|
export * from './utils/EventHandler.js'
|
||||||
export * from './utils/ID.js'
|
export * from './utils/ID.js'
|
||||||
export * from './utils/isParentOf.js'
|
export * from './utils/isParentOf.js'
|
||||||
export * from './utils/logging.js'
|
|
||||||
export * from './utils/PermanentUserData.js'
|
export * from './utils/PermanentUserData.js'
|
||||||
export * from './utils/RelativePosition.js'
|
export * from './utils/RelativePosition.js'
|
||||||
export * from './utils/Snapshot.js'
|
export * from './utils/Snapshot.js'
|
||||||
export * from './utils/StructStore.js'
|
export * from './utils/StructStore.js'
|
||||||
export * from './utils/Transaction.js'
|
export * from './utils/Transaction.js'
|
||||||
export * from './utils/UndoManager.js'
|
export * from './utils/UndoManager.js'
|
||||||
export * from './utils/updates.js'
|
|
||||||
export * from './utils/YEvent.js'
|
export * from './utils/YEvent.js'
|
||||||
|
|
||||||
export * from './types/AbstractType.js'
|
export * from './types/AbstractType.js'
|
||||||
@@ -31,7 +27,6 @@ export * from './structs/AbstractStruct.js'
|
|||||||
export * from './structs/GC.js'
|
export * from './structs/GC.js'
|
||||||
export * from './structs/ContentBinary.js'
|
export * from './structs/ContentBinary.js'
|
||||||
export * from './structs/ContentDeleted.js'
|
export * from './structs/ContentDeleted.js'
|
||||||
export * from './structs/ContentDoc.js'
|
|
||||||
export * from './structs/ContentEmbed.js'
|
export * from './structs/ContentEmbed.js'
|
||||||
export * from './structs/ContentFormat.js'
|
export * from './structs/ContentFormat.js'
|
||||||
export * from './structs/ContentJSON.js'
|
export * from './structs/ContentJSON.js'
|
||||||
@@ -39,4 +34,3 @@ export * from './structs/ContentAny.js'
|
|||||||
export * from './structs/ContentString.js'
|
export * from './structs/ContentString.js'
|
||||||
export * from './structs/ContentType.js'
|
export * from './structs/ContentType.js'
|
||||||
export * from './structs/Item.js'
|
export * from './structs/Item.js'
|
||||||
export * from './structs/Skip.js'
|
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
UpdateEncoderV1, UpdateEncoderV2, ID, Transaction // eslint-disable-line
|
StructStore, ID, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as encoding from 'lib0/encoding.js' // eslint-disable-line
|
||||||
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
export class AbstractStruct {
|
export class AbstractStruct {
|
||||||
/**
|
/**
|
||||||
@@ -26,14 +28,14 @@ export class AbstractStruct {
|
|||||||
* This method is already assuming that `this.id.clock + this.length === this.id.clock`.
|
* This method is already assuming that `this.id.clock + this.length === this.id.clock`.
|
||||||
* Also this method does *not* remove right from StructStore!
|
* Also this method does *not* remove right from StructStore!
|
||||||
* @param {AbstractStruct} right
|
* @param {AbstractStruct} right
|
||||||
* @return {boolean} whether this merged with right
|
* @return {boolean} wether this merged with right
|
||||||
*/
|
*/
|
||||||
mergeWith (right) {
|
mergeWith (right) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
* @param {encoding.Encoder} encoder The encoder to write data to.
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
* @param {number} encodingRef
|
* @param {number} encodingRef
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
import {
|
import {
|
||||||
UpdateEncoderV1, UpdateEncoderV2, UpdateDecoderV1, UpdateDecoderV2, Transaction, Item, StructStore // eslint-disable-line
|
Transaction, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as env from 'lib0/environment'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as object from 'lib0/object'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
const isDevMode = env.getVariable('node_env') === 'development'
|
|
||||||
|
|
||||||
export class ContentAny {
|
export class ContentAny {
|
||||||
/**
|
/**
|
||||||
@@ -16,7 +14,6 @@ export class ContentAny {
|
|||||||
* @type {Array<any>}
|
* @type {Array<any>}
|
||||||
*/
|
*/
|
||||||
this.arr = arr
|
this.arr = arr
|
||||||
isDevMode && object.deepFreeze(arr)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -80,15 +77,15 @@ export class ContentAny {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
const len = this.arr.length
|
const len = this.arr.length
|
||||||
encoder.writeLen(len - offset)
|
encoding.writeVarUint(encoder, len - offset)
|
||||||
for (let i = offset; i < len; i++) {
|
for (let i = offset; i < len; i++) {
|
||||||
const c = this.arr[i]
|
const c = this.arr[i]
|
||||||
encoder.writeAny(c)
|
encoding.writeAny(encoder, c)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,14 +98,14 @@ export class ContentAny {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentAny}
|
* @return {ContentAny}
|
||||||
*/
|
*/
|
||||||
export const readContentAny = decoder => {
|
export const readContentAny = decoder => {
|
||||||
const len = decoder.readLen()
|
const len = decoding.readVarUint(decoder)
|
||||||
const cs = []
|
const cs = []
|
||||||
for (let i = 0; i < len; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
cs.push(decoder.readAny())
|
cs.push(decoding.readAny(decoder))
|
||||||
}
|
}
|
||||||
return new ContentAny(cs)
|
return new ContentAny(cs)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
import {
|
import {
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
StructStore, Item, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as buffer from 'lib0/buffer.js'
|
||||||
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
export class ContentBinary {
|
export class ContentBinary {
|
||||||
/**
|
/**
|
||||||
@@ -70,11 +73,11 @@ export class ContentBinary {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
encoder.writeBuf(this.content)
|
encoding.writeVarUint8Array(encoder, this.content)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -86,7 +89,7 @@ export class ContentBinary {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentBinary}
|
* @return {ContentBinary}
|
||||||
*/
|
*/
|
||||||
export const readContentBinary = decoder => new ContentBinary(decoder.readBuf())
|
export const readContentBinary = decoder => new ContentBinary(buffer.copyUint8Array(decoding.readVarUint8Array(decoder)))
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
addToDeleteSet,
|
addToDeleteSet,
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
StructStore, Item, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
export class ContentDeleted {
|
export class ContentDeleted {
|
||||||
/**
|
/**
|
||||||
* @param {number} len
|
* @param {number} len
|
||||||
@@ -63,7 +67,7 @@ export class ContentDeleted {
|
|||||||
* @param {Item} item
|
* @param {Item} item
|
||||||
*/
|
*/
|
||||||
integrate (transaction, item) {
|
integrate (transaction, item) {
|
||||||
addToDeleteSet(transaction.deleteSet, item.id.client, item.id.clock, this.len)
|
addToDeleteSet(transaction.deleteSet, item.id, this.len)
|
||||||
item.markDeleted()
|
item.markDeleted()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,11 +80,11 @@ export class ContentDeleted {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
encoder.writeLen(this.len - offset)
|
encoding.writeVarUint(encoder, this.len - offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -94,7 +98,7 @@ export class ContentDeleted {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2 } decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentDeleted}
|
* @return {ContentDeleted}
|
||||||
*/
|
*/
|
||||||
export const readContentDeleted = decoder => new ContentDeleted(decoder.readLen())
|
export const readContentDeleted = decoder => new ContentDeleted(decoding.readVarUint(decoder))
|
||||||
|
|||||||
@@ -1,140 +0,0 @@
|
|||||||
import {
|
|
||||||
Doc, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} guid
|
|
||||||
* @param {Object<string, any>} opts
|
|
||||||
*/
|
|
||||||
const createDocFromOpts = (guid, opts) => new Doc({ guid, ...opts, shouldLoad: opts.shouldLoad || opts.autoLoad || false })
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
export class ContentDoc {
|
|
||||||
/**
|
|
||||||
* @param {Doc} doc
|
|
||||||
*/
|
|
||||||
constructor (doc) {
|
|
||||||
if (doc._item) {
|
|
||||||
console.error('This document was already integrated as a sub-document. You should create a second instance instead with the same guid.')
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @type {Doc}
|
|
||||||
*/
|
|
||||||
this.doc = doc
|
|
||||||
/**
|
|
||||||
* @type {any}
|
|
||||||
*/
|
|
||||||
const opts = {}
|
|
||||||
this.opts = opts
|
|
||||||
if (!doc.gc) {
|
|
||||||
opts.gc = false
|
|
||||||
}
|
|
||||||
if (doc.autoLoad) {
|
|
||||||
opts.autoLoad = true
|
|
||||||
}
|
|
||||||
if (doc.meta !== null) {
|
|
||||||
opts.meta = doc.meta
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
getLength () {
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {Array<any>}
|
|
||||||
*/
|
|
||||||
getContent () {
|
|
||||||
return [this.doc]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {boolean}
|
|
||||||
*/
|
|
||||||
isCountable () {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {ContentDoc}
|
|
||||||
*/
|
|
||||||
copy () {
|
|
||||||
return new ContentDoc(createDocFromOpts(this.doc.guid, this.opts))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} offset
|
|
||||||
* @return {ContentDoc}
|
|
||||||
*/
|
|
||||||
splice (offset) {
|
|
||||||
throw error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ContentDoc} right
|
|
||||||
* @return {boolean}
|
|
||||||
*/
|
|
||||||
mergeWith (right) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
* @param {Item} item
|
|
||||||
*/
|
|
||||||
integrate (transaction, item) {
|
|
||||||
// this needs to be reflected in doc.destroy as well
|
|
||||||
this.doc._item = item
|
|
||||||
transaction.subdocsAdded.add(this.doc)
|
|
||||||
if (this.doc.shouldLoad) {
|
|
||||||
transaction.subdocsLoaded.add(this.doc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
*/
|
|
||||||
delete (transaction) {
|
|
||||||
if (transaction.subdocsAdded.has(this.doc)) {
|
|
||||||
transaction.subdocsAdded.delete(this.doc)
|
|
||||||
} else {
|
|
||||||
transaction.subdocsRemoved.add(this.doc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {StructStore} store
|
|
||||||
*/
|
|
||||||
gc (store) { }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
|
||||||
* @param {number} offset
|
|
||||||
*/
|
|
||||||
write (encoder, offset) {
|
|
||||||
encoder.writeString(this.doc.guid)
|
|
||||||
encoder.writeAny(this.opts)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
getRef () {
|
|
||||||
return 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*
|
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
|
||||||
* @return {ContentDoc}
|
|
||||||
*/
|
|
||||||
export const readContentDoc = decoder => new ContentDoc(createDocFromOpts(decoder.readString(), decoder.readAny()))
|
|
||||||
@@ -1,8 +1,11 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Item, Transaction // eslint-disable-line
|
StructStore, Item, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
@@ -73,11 +76,11 @@ export class ContentEmbed {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
encoder.writeJSON(this.embed)
|
encoding.writeVarString(encoder, JSON.stringify(this.embed))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -91,7 +94,7 @@ export class ContentEmbed {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentEmbed}
|
* @return {ContentEmbed}
|
||||||
*/
|
*/
|
||||||
export const readContentEmbed = decoder => new ContentEmbed(decoder.readJSON())
|
export const readContentEmbed = decoder => new ContentEmbed(JSON.parse(decoding.readVarString(decoder)))
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
YText, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Item, StructStore, Transaction // eslint-disable-line
|
Item, StructStore, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
@@ -46,32 +49,26 @@ export class ContentFormat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {number} _offset
|
* @param {number} offset
|
||||||
* @return {ContentFormat}
|
* @return {ContentFormat}
|
||||||
*/
|
*/
|
||||||
splice (_offset) {
|
splice (offset) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ContentFormat} _right
|
* @param {ContentFormat} right
|
||||||
* @return {boolean}
|
* @return {boolean}
|
||||||
*/
|
*/
|
||||||
mergeWith (_right) {
|
mergeWith (right) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} _transaction
|
* @param {Transaction} transaction
|
||||||
* @param {Item} item
|
* @param {Item} item
|
||||||
*/
|
*/
|
||||||
integrate (_transaction, item) {
|
integrate (transaction, item) {}
|
||||||
// @todo searchmarker are currently unsupported for rich text documents
|
|
||||||
const p = /** @type {YText} */ (item.parent)
|
|
||||||
p._searchMarker = null
|
|
||||||
p._hasFormatting = true
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
*/
|
*/
|
||||||
@@ -81,12 +78,12 @@ export class ContentFormat {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
encoder.writeKey(this.key)
|
encoding.writeVarString(encoder, this.key)
|
||||||
encoder.writeJSON(this.value)
|
encoding.writeVarString(encoder, JSON.stringify(this.value))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -98,7 +95,7 @@ export class ContentFormat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentFormat}
|
* @return {ContentFormat}
|
||||||
*/
|
*/
|
||||||
export const readContentFormat = decoder => new ContentFormat(decoder.readKey(), decoder.readJSON())
|
export const readContentFormat = decoder => new ContentFormat(decoding.readVarString(decoder), JSON.parse(decoding.readVarString(decoder)))
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import {
|
import {
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line
|
Transaction, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
@@ -77,15 +80,15 @@ export class ContentJSON {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
const len = this.arr.length
|
const len = this.arr.length
|
||||||
encoder.writeLen(len - offset)
|
encoding.writeVarUint(encoder, len - offset)
|
||||||
for (let i = offset; i < len; i++) {
|
for (let i = offset; i < len; i++) {
|
||||||
const c = this.arr[i]
|
const c = this.arr[i]
|
||||||
encoder.writeString(c === undefined ? 'undefined' : JSON.stringify(c))
|
encoding.writeVarString(encoder, c === undefined ? 'undefined' : JSON.stringify(c))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,14 +103,14 @@ export class ContentJSON {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentJSON}
|
* @return {ContentJSON}
|
||||||
*/
|
*/
|
||||||
export const readContentJSON = decoder => {
|
export const readContentJSON = decoder => {
|
||||||
const len = decoder.readLen()
|
const len = decoding.readVarUint(decoder)
|
||||||
const cs = []
|
const cs = []
|
||||||
for (let i = 0; i < len; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
const c = decoder.readString()
|
const c = decoding.readVarString(decoder)
|
||||||
if (c === 'undefined') {
|
if (c === 'undefined') {
|
||||||
cs.push(undefined)
|
cs.push(undefined)
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import {
|
import {
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Transaction, Item, StructStore // eslint-disable-line
|
Transaction, Item, StructStore // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
@@ -51,17 +54,6 @@ export class ContentString {
|
|||||||
splice (offset) {
|
splice (offset) {
|
||||||
const right = new ContentString(this.str.slice(offset))
|
const right = new ContentString(this.str.slice(offset))
|
||||||
this.str = this.str.slice(0, offset)
|
this.str = this.str.slice(0, offset)
|
||||||
|
|
||||||
// Prevent encoding invalid documents because of splitting of surrogate pairs: https://github.com/yjs/yjs/issues/248
|
|
||||||
const firstCharCode = this.str.charCodeAt(offset - 1)
|
|
||||||
if (firstCharCode >= 0xD800 && firstCharCode <= 0xDBFF) {
|
|
||||||
// Last character of the left split is the start of a surrogate utf16/ucs2 pair.
|
|
||||||
// We don't support splitting of surrogate pairs because this may lead to invalid documents.
|
|
||||||
// Replace the invalid character with a unicode replacement character (<28> / U+FFFD)
|
|
||||||
this.str = this.str.slice(0, offset - 1) + '<27>'
|
|
||||||
// replace right as well
|
|
||||||
right.str = '<27>' + right.str.slice(1)
|
|
||||||
}
|
|
||||||
return right
|
return right
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,11 +80,11 @@ export class ContentString {
|
|||||||
*/
|
*/
|
||||||
gc (store) {}
|
gc (store) {}
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
encoder.writeString(offset === 0 ? this.str : this.str.slice(offset))
|
encoding.writeVarString(encoder, offset === 0 ? this.str : this.str.slice(offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -106,7 +98,7 @@ export class ContentString {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentString}
|
* @return {ContentString}
|
||||||
*/
|
*/
|
||||||
export const readContentString = decoder => new ContentString(decoder.readString())
|
export const readContentString = decoder => new ContentString(decoding.readVarString(decoder))
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
readYArray,
|
readYArray,
|
||||||
readYMap,
|
readYMap,
|
||||||
@@ -6,13 +7,15 @@ import {
|
|||||||
readYXmlFragment,
|
readYXmlFragment,
|
||||||
readYXmlHook,
|
readYXmlHook,
|
||||||
readYXmlText,
|
readYXmlText,
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line
|
ID, StructStore, Transaction, Item, YEvent, AbstractType // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as encoding from 'lib0/encoding.js' // eslint-disable-line
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractType<any>>}
|
* @type {Array<function(decoding.Decoder):AbstractType<any>>}
|
||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
export const typeRefs = [
|
export const typeRefs = [
|
||||||
@@ -38,7 +41,7 @@ export const YXmlTextRefID = 6
|
|||||||
*/
|
*/
|
||||||
export class ContentType {
|
export class ContentType {
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<YEvent>} type
|
||||||
*/
|
*/
|
||||||
constructor (type) {
|
constructor (type) {
|
||||||
/**
|
/**
|
||||||
@@ -107,8 +110,8 @@ export class ContentType {
|
|||||||
while (item !== null) {
|
while (item !== null) {
|
||||||
if (!item.deleted) {
|
if (!item.deleted) {
|
||||||
item.delete(transaction)
|
item.delete(transaction)
|
||||||
} else if (item.id.clock < (transaction.beforeState.get(item.id.client) || 0)) {
|
} else {
|
||||||
// This will be gc'd later and we want to merge it if possible
|
// Whis will be gc'd later and we want to merge it if possible
|
||||||
// We try to merge all deleted items after each transaction,
|
// We try to merge all deleted items after each transaction,
|
||||||
// but we have no knowledge about that this needs to be merged
|
// but we have no knowledge about that this needs to be merged
|
||||||
// since it is not in transaction.ds. Hence we add it to transaction._mergeStructs
|
// since it is not in transaction.ds. Hence we add it to transaction._mergeStructs
|
||||||
@@ -119,7 +122,7 @@ export class ContentType {
|
|||||||
this.type._map.forEach(item => {
|
this.type._map.forEach(item => {
|
||||||
if (!item.deleted) {
|
if (!item.deleted) {
|
||||||
item.delete(transaction)
|
item.delete(transaction)
|
||||||
} else if (item.id.clock < (transaction.beforeState.get(item.id.client) || 0)) {
|
} else {
|
||||||
// same as above
|
// same as above
|
||||||
transaction._mergeStructs.push(item)
|
transaction._mergeStructs.push(item)
|
||||||
}
|
}
|
||||||
@@ -147,7 +150,7 @@ export class ContentType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@@ -165,7 +168,7 @@ export class ContentType {
|
|||||||
/**
|
/**
|
||||||
* @private
|
* @private
|
||||||
*
|
*
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {ContentType}
|
* @return {ContentType}
|
||||||
*/
|
*/
|
||||||
export const readContentType = decoder => new ContentType(typeRefs[decoder.readTypeRef()](decoder))
|
export const readContentType = decoder => new ContentType(typeRefs[decoding.readVarUint(decoder)](decoder))
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
AbstractStruct,
|
AbstractStruct,
|
||||||
addStruct,
|
addStruct,
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line
|
StructStore, Transaction, ID // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
|
||||||
export const structGCRefNumber = 0
|
export const structGCRefNumber = 0
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -21,9 +24,6 @@ export class GC extends AbstractStruct {
|
|||||||
* @return {boolean}
|
* @return {boolean}
|
||||||
*/
|
*/
|
||||||
mergeWith (right) {
|
mergeWith (right) {
|
||||||
if (this.constructor !== right.constructor) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
this.length += right.length
|
this.length += right.length
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -41,12 +41,12 @@ export class GC extends AbstractStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
encoder.writeInfo(structGCRefNumber)
|
encoding.writeUint8(encoder, structGCRefNumber)
|
||||||
encoder.writeLen(this.length - offset)
|
encoding.writeVarUint(encoder, this.length - offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
|
readID,
|
||||||
|
writeID,
|
||||||
GC,
|
GC,
|
||||||
getState,
|
getState,
|
||||||
AbstractStruct,
|
AbstractStruct,
|
||||||
@@ -16,18 +19,19 @@ import {
|
|||||||
readContentAny,
|
readContentAny,
|
||||||
readContentString,
|
readContentString,
|
||||||
readContentEmbed,
|
readContentEmbed,
|
||||||
readContentDoc,
|
|
||||||
createID,
|
createID,
|
||||||
readContentFormat,
|
readContentFormat,
|
||||||
readContentType,
|
readContentType,
|
||||||
addChangedTypeToTransaction,
|
addChangedTypeToTransaction,
|
||||||
isDeleted,
|
Doc, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line
|
||||||
StackItem, DeleteSet, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, ContentType, ContentDeleted, StructStore, ID, AbstractType, Transaction // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as error from 'lib0/error.js'
|
||||||
import * as binary from 'lib0/binary'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as array from 'lib0/array'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as maplib from 'lib0/map.js'
|
||||||
|
import * as set from 'lib0/set.js'
|
||||||
|
import * as binary from 'lib0/binary.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @todo This should return several items
|
* @todo This should return several items
|
||||||
@@ -120,27 +124,18 @@ export const splitItem = (transaction, leftItem, diff) => {
|
|||||||
return rightItem
|
return rightItem
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<StackItem>} stack
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
const isDeletedByUndoStack = (stack, id) => array.some(stack, /** @param {StackItem} s */ s => isDeleted(s.deletions, id))
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Redoes the effect of this operation.
|
* Redoes the effect of this operation.
|
||||||
*
|
*
|
||||||
* @param {Transaction} transaction The Yjs instance.
|
* @param {Transaction} transaction The Yjs instance.
|
||||||
* @param {Item} item
|
* @param {Item} item
|
||||||
* @param {Set<Item>} redoitems
|
* @param {Set<Item>} redoitems
|
||||||
* @param {DeleteSet} itemsToDelete
|
|
||||||
* @param {boolean} ignoreRemoteMapChanges
|
|
||||||
* @param {import('../utils/UndoManager.js').UndoManager} um
|
|
||||||
*
|
*
|
||||||
* @return {Item|null}
|
* @return {Item|null}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
export const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemoteMapChanges, um) => {
|
export const redoItem = (transaction, item, redoitems) => {
|
||||||
const doc = transaction.doc
|
const doc = transaction.doc
|
||||||
const store = doc.store
|
const store = doc.store
|
||||||
const ownClientID = doc.clientID
|
const ownClientID = doc.clientID
|
||||||
@@ -152,27 +147,42 @@ export const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemo
|
|||||||
/**
|
/**
|
||||||
* @type {Item|null}
|
* @type {Item|null}
|
||||||
*/
|
*/
|
||||||
let left = null
|
let left
|
||||||
/**
|
/**
|
||||||
* @type {Item|null}
|
* @type {Item|null}
|
||||||
*/
|
*/
|
||||||
let right
|
let right
|
||||||
// make sure that parent is redone
|
|
||||||
if (parentItem !== null && parentItem.deleted === true) {
|
|
||||||
// try to undo parent if it will be undone anyway
|
|
||||||
if (parentItem.redone === null && (!redoitems.has(parentItem) || redoItem(transaction, parentItem, redoitems, itemsToDelete, ignoreRemoteMapChanges, um) === null)) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
while (parentItem.redone !== null) {
|
|
||||||
parentItem = getItemCleanStart(transaction, parentItem.redone)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const parentType = parentItem === null ? /** @type {AbstractType<any>} */ (item.parent) : /** @type {ContentType} */ (parentItem.content).type
|
|
||||||
|
|
||||||
if (item.parentSub === null) {
|
if (item.parentSub === null) {
|
||||||
// Is an array item. Insert at the old position
|
// Is an array item. Insert at the old position
|
||||||
left = item.left
|
left = item.left
|
||||||
right = item
|
right = item
|
||||||
|
} else {
|
||||||
|
// Is a map item. Insert as current value
|
||||||
|
left = item
|
||||||
|
while (left.right !== null) {
|
||||||
|
left = left.right
|
||||||
|
if (left.id.client !== ownClientID) {
|
||||||
|
// It is not possible to redo this item because it conflicts with a
|
||||||
|
// change from another client
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (left.right !== null) {
|
||||||
|
left = /** @type {Item} */ (/** @type {AbstractType<any>} */ (item.parent)._map.get(item.parentSub))
|
||||||
|
}
|
||||||
|
right = null
|
||||||
|
}
|
||||||
|
// make sure that parent is redone
|
||||||
|
if (parentItem !== null && parentItem.deleted === true && parentItem.redone === null) {
|
||||||
|
// try to undo parent if it will be undone anyway
|
||||||
|
if (!redoitems.has(parentItem) || redoItem(transaction, parentItem, redoitems) === null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parentItem !== null && parentItem.redone !== null) {
|
||||||
|
while (parentItem.redone !== null) {
|
||||||
|
parentItem = getItemCleanStart(transaction, parentItem.redone)
|
||||||
|
}
|
||||||
// find next cloned_redo items
|
// find next cloned_redo items
|
||||||
while (left !== null) {
|
while (left !== null) {
|
||||||
/**
|
/**
|
||||||
@@ -204,25 +214,6 @@ export const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemo
|
|||||||
}
|
}
|
||||||
right = right.right
|
right = right.right
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
right = null
|
|
||||||
if (item.right && !ignoreRemoteMapChanges) {
|
|
||||||
left = item
|
|
||||||
// Iterate right while right is in itemsToDelete
|
|
||||||
// If it is intended to delete right while item is redone, we can expect that item should replace right.
|
|
||||||
while (left !== null && left.right !== null && (left.right.redone || isDeleted(itemsToDelete, left.right.id) || isDeletedByUndoStack(um.undoStack, left.right.id) || isDeletedByUndoStack(um.redoStack, left.right.id))) {
|
|
||||||
left = left.right
|
|
||||||
// follow redone
|
|
||||||
while (left.redone) left = getItemCleanStart(transaction, left.redone)
|
|
||||||
}
|
|
||||||
if (left && left.right !== null) {
|
|
||||||
// It is not possible to redo this item because it conflicts with a
|
|
||||||
// change from another client
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
left = parentType._map.get(item.parentSub) || null
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const nextClock = getState(store, ownClientID)
|
const nextClock = getState(store, ownClientID)
|
||||||
const nextId = createID(ownClientID, nextClock)
|
const nextId = createID(ownClientID, nextClock)
|
||||||
@@ -230,7 +221,7 @@ export const redoItem = (transaction, item, redoitems, itemsToDelete, ignoreRemo
|
|||||||
nextId,
|
nextId,
|
||||||
left, left && left.lastId,
|
left, left && left.lastId,
|
||||||
right, right && right.id,
|
right, right && right.id,
|
||||||
parentType,
|
parentItem === null ? item.parent : /** @type {ContentType} */ (parentItem.content).type,
|
||||||
item.parentSub,
|
item.parentSub,
|
||||||
item.content.copy()
|
item.content.copy()
|
||||||
)
|
)
|
||||||
@@ -289,7 +280,7 @@ export class Item extends AbstractStruct {
|
|||||||
*/
|
*/
|
||||||
this.parentSub = parentSub
|
this.parentSub = parentSub
|
||||||
/**
|
/**
|
||||||
* If this type's effect is redone this type refers to the type that undid
|
* If this type's effect is reundone this type refers to the type that undid
|
||||||
* this operation.
|
* this operation.
|
||||||
* @type {ID | null}
|
* @type {ID | null}
|
||||||
*/
|
*/
|
||||||
@@ -298,31 +289,9 @@ export class Item extends AbstractStruct {
|
|||||||
* @type {AbstractContent}
|
* @type {AbstractContent}
|
||||||
*/
|
*/
|
||||||
this.content = content
|
this.content = content
|
||||||
/**
|
|
||||||
* bit1: keep
|
|
||||||
* bit2: countable
|
|
||||||
* bit3: deleted
|
|
||||||
* bit4: mark - mark node as fast-search-marker
|
|
||||||
* @type {number} byte
|
|
||||||
*/
|
|
||||||
this.info = this.content.isCountable() ? binary.BIT2 : 0
|
this.info = this.content.isCountable() ? binary.BIT2 : 0
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This is used to mark the item as an indexed fast-search marker
|
|
||||||
*
|
|
||||||
* @type {boolean}
|
|
||||||
*/
|
|
||||||
set marker (isMarked) {
|
|
||||||
if (((this.info & binary.BIT4) > 0) !== isMarked) {
|
|
||||||
this.info ^= binary.BIT4
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
get marker () {
|
|
||||||
return (this.info & binary.BIT4) > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If true, do not garbage collect this Item.
|
* If true, do not garbage collect this Item.
|
||||||
*/
|
*/
|
||||||
@@ -386,14 +355,13 @@ export class Item extends AbstractStruct {
|
|||||||
this.right = getItemCleanStart(transaction, this.rightOrigin)
|
this.right = getItemCleanStart(transaction, this.rightOrigin)
|
||||||
this.rightOrigin = this.right.id
|
this.rightOrigin = this.right.id
|
||||||
}
|
}
|
||||||
if ((this.left && this.left.constructor === GC) || (this.right && this.right.constructor === GC)) {
|
// only set parent if this shouldn't be garbage collected
|
||||||
this.parent = null
|
if (!this.parent) {
|
||||||
} else if (!this.parent) {
|
|
||||||
// only set parent if this shouldn't be garbage collected
|
|
||||||
if (this.left && this.left.constructor === Item) {
|
if (this.left && this.left.constructor === Item) {
|
||||||
this.parent = this.left.parent
|
this.parent = this.left.parent
|
||||||
this.parentSub = this.left.parentSub
|
this.parentSub = this.left.parentSub
|
||||||
} else if (this.right && this.right.constructor === Item) {
|
}
|
||||||
|
if (this.right && this.right.constructor === Item) {
|
||||||
this.parent = this.right.parent
|
this.parent = this.right.parent
|
||||||
this.parentSub = this.right.parentSub
|
this.parentSub = this.right.parentSub
|
||||||
}
|
}
|
||||||
@@ -464,14 +432,10 @@ export class Item extends AbstractStruct {
|
|||||||
if (o.id.client < this.id.client) {
|
if (o.id.client < this.id.client) {
|
||||||
left = o
|
left = o
|
||||||
conflictingItems.clear()
|
conflictingItems.clear()
|
||||||
} else if (compareIDs(this.rightOrigin, o.rightOrigin)) {
|
}
|
||||||
// this and o are conflicting and point to the same integration points. The id decides which item comes first.
|
} else if (o.origin !== null && itemsBeforeOrigin.has(getItem(transaction.doc.store, o.origin))) {
|
||||||
// Since this is to the left of o, we can break here
|
|
||||||
break
|
|
||||||
} // else, o might be integrated before an item that this conflicts with. If so, we will find it in the next iterations
|
|
||||||
} else if (o.origin !== null && itemsBeforeOrigin.has(getItem(transaction.doc.store, o.origin))) { // use getItem instead of getItemCleanEnd because we don't want / need to split items.
|
|
||||||
// case 2
|
// case 2
|
||||||
if (!conflictingItems.has(getItem(transaction.doc.store, o.origin))) {
|
if (o.origin === null || !conflictingItems.has(getItem(transaction.doc.store, o.origin))) {
|
||||||
left = o
|
left = o
|
||||||
conflictingItems.clear()
|
conflictingItems.clear()
|
||||||
}
|
}
|
||||||
@@ -518,7 +482,7 @@ export class Item extends AbstractStruct {
|
|||||||
this.content.integrate(transaction, this)
|
this.content.integrate(transaction, this)
|
||||||
// add parent to transaction.changed
|
// add parent to transaction.changed
|
||||||
addChangedTypeToTransaction(transaction, /** @type {AbstractType<any>} */ (this.parent), this.parentSub)
|
addChangedTypeToTransaction(transaction, /** @type {AbstractType<any>} */ (this.parent), this.parentSub)
|
||||||
if ((/** @type {AbstractType<any>} */ (this.parent)._item !== null && /** @type {AbstractType<any>} */ (this.parent)._item.deleted) || (this.parentSub !== null && this.right !== null)) {
|
if ((/** @type {AbstractType<any>} */ (this.parent)._item !== null && /** @type {AbstractType<any>} */ (this.parent)._item.deleted) || (this.right !== null && this.parentSub !== null)) {
|
||||||
// delete if parent is deleted or if this is not the current attribute value of parent
|
// delete if parent is deleted or if this is not the current attribute value of parent
|
||||||
this.delete(transaction)
|
this.delete(transaction)
|
||||||
}
|
}
|
||||||
@@ -566,7 +530,6 @@ export class Item extends AbstractStruct {
|
|||||||
*/
|
*/
|
||||||
mergeWith (right) {
|
mergeWith (right) {
|
||||||
if (
|
if (
|
||||||
this.constructor === right.constructor &&
|
|
||||||
compareIDs(right.origin, this.lastId) &&
|
compareIDs(right.origin, this.lastId) &&
|
||||||
this.right === right &&
|
this.right === right &&
|
||||||
compareIDs(this.rightOrigin, right.rightOrigin) &&
|
compareIDs(this.rightOrigin, right.rightOrigin) &&
|
||||||
@@ -578,19 +541,6 @@ export class Item extends AbstractStruct {
|
|||||||
this.content.constructor === right.content.constructor &&
|
this.content.constructor === right.content.constructor &&
|
||||||
this.content.mergeWith(right.content)
|
this.content.mergeWith(right.content)
|
||||||
) {
|
) {
|
||||||
const searchMarker = /** @type {AbstractType<any>} */ (this.parent)._searchMarker
|
|
||||||
if (searchMarker) {
|
|
||||||
searchMarker.forEach(marker => {
|
|
||||||
if (marker.p === right) {
|
|
||||||
// right is going to be "forgotten" so we need to update the marker
|
|
||||||
marker.p = this
|
|
||||||
// adjust marker index
|
|
||||||
if (!this.deleted && this.countable) {
|
|
||||||
marker.index -= this.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (right.keep) {
|
if (right.keep) {
|
||||||
this.keep = true
|
this.keep = true
|
||||||
}
|
}
|
||||||
@@ -617,8 +567,8 @@ export class Item extends AbstractStruct {
|
|||||||
parent._length -= this.length
|
parent._length -= this.length
|
||||||
}
|
}
|
||||||
this.markDeleted()
|
this.markDeleted()
|
||||||
addToDeleteSet(transaction.deleteSet, this.id.client, this.id.clock, this.length)
|
addToDeleteSet(transaction.deleteSet, this.id, this.length)
|
||||||
addChangedTypeToTransaction(transaction, parent, this.parentSub)
|
maplib.setIfUndefined(transaction.changed, parent, set.create).add(this.parentSub)
|
||||||
this.content.delete(transaction)
|
this.content.delete(transaction)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -645,7 +595,7 @@ export class Item extends AbstractStruct {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
* @param {encoding.Encoder} encoder The encoder to write data to.
|
||||||
* @param {number} offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (encoder, offset) {
|
write (encoder, offset) {
|
||||||
@@ -656,38 +606,28 @@ export class Item extends AbstractStruct {
|
|||||||
(origin === null ? 0 : binary.BIT8) | // origin is defined
|
(origin === null ? 0 : binary.BIT8) | // origin is defined
|
||||||
(rightOrigin === null ? 0 : binary.BIT7) | // right origin is defined
|
(rightOrigin === null ? 0 : binary.BIT7) | // right origin is defined
|
||||||
(parentSub === null ? 0 : binary.BIT6) // parentSub is non-null
|
(parentSub === null ? 0 : binary.BIT6) // parentSub is non-null
|
||||||
encoder.writeInfo(info)
|
encoding.writeUint8(encoder, info)
|
||||||
if (origin !== null) {
|
if (origin !== null) {
|
||||||
encoder.writeLeftID(origin)
|
writeID(encoder, origin)
|
||||||
}
|
}
|
||||||
if (rightOrigin !== null) {
|
if (rightOrigin !== null) {
|
||||||
encoder.writeRightID(rightOrigin)
|
writeID(encoder, rightOrigin)
|
||||||
}
|
}
|
||||||
if (origin === null && rightOrigin === null) {
|
if (origin === null && rightOrigin === null) {
|
||||||
const parent = /** @type {AbstractType<any>} */ (this.parent)
|
const parent = /** @type {AbstractType<any>} */ (this.parent)
|
||||||
if (parent._item !== undefined) {
|
const parentItem = parent._item
|
||||||
const parentItem = parent._item
|
if (parentItem === null) {
|
||||||
if (parentItem === null) {
|
// parent type on y._map
|
||||||
// parent type on y._map
|
// find the correct key
|
||||||
// find the correct key
|
const ykey = findRootTypeKey(parent)
|
||||||
const ykey = findRootTypeKey(parent)
|
encoding.writeVarUint(encoder, 1) // write parentYKey
|
||||||
encoder.writeParentInfo(true) // write parentYKey
|
encoding.writeVarString(encoder, ykey)
|
||||||
encoder.writeString(ykey)
|
|
||||||
} else {
|
|
||||||
encoder.writeParentInfo(false) // write parent id
|
|
||||||
encoder.writeLeftID(parentItem.id)
|
|
||||||
}
|
|
||||||
} else if (parent.constructor === String) { // this edge case was added by differential updates
|
|
||||||
encoder.writeParentInfo(true) // write parentYKey
|
|
||||||
encoder.writeString(parent)
|
|
||||||
} else if (parent.constructor === ID) {
|
|
||||||
encoder.writeParentInfo(false) // write parent id
|
|
||||||
encoder.writeLeftID(parent)
|
|
||||||
} else {
|
} else {
|
||||||
error.unexpectedCase()
|
encoding.writeVarUint(encoder, 0) // write parent id
|
||||||
|
writeID(encoder, parentItem.id)
|
||||||
}
|
}
|
||||||
if (parentSub !== null) {
|
if (parentSub !== null) {
|
||||||
encoder.writeString(parentSub)
|
encoding.writeVarString(encoder, parentSub)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.content.write(encoder, offset)
|
this.content.write(encoder, offset)
|
||||||
@@ -695,28 +635,26 @@ export class Item extends AbstractStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @param {number} info
|
* @param {number} info
|
||||||
*/
|
*/
|
||||||
export const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder)
|
const readItemContent = (decoder, info) => contentRefs[info & binary.BITS5](decoder)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A lookup map for reading Item content.
|
* A lookup map for reading Item content.
|
||||||
*
|
*
|
||||||
* @type {Array<function(UpdateDecoderV1 | UpdateDecoderV2):AbstractContent>}
|
* @type {Array<function(decoding.Decoder):AbstractContent>}
|
||||||
*/
|
*/
|
||||||
export const contentRefs = [
|
export const contentRefs = [
|
||||||
() => { error.unexpectedCase() }, // GC is not ItemContent
|
() => { throw error.unexpectedCase() }, // GC is not ItemContent
|
||||||
readContentDeleted, // 1
|
readContentDeleted,
|
||||||
readContentJSON, // 2
|
readContentJSON,
|
||||||
readContentBinary, // 3
|
readContentBinary,
|
||||||
readContentString, // 4
|
readContentString,
|
||||||
readContentEmbed, // 5
|
readContentEmbed,
|
||||||
readContentFormat, // 6
|
readContentFormat,
|
||||||
readContentType, // 7
|
readContentType,
|
||||||
readContentAny, // 8
|
readContentAny
|
||||||
readContentDoc, // 9
|
|
||||||
() => { error.unexpectedCase() } // 10 - Skip is not ItemContent
|
|
||||||
]
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -758,48 +696,48 @@ export class AbstractContent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {number} _offset
|
* @param {number} offset
|
||||||
* @return {AbstractContent}
|
* @return {AbstractContent}
|
||||||
*/
|
*/
|
||||||
splice (_offset) {
|
splice (offset) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractContent} _right
|
* @param {AbstractContent} right
|
||||||
* @return {boolean}
|
* @return {boolean}
|
||||||
*/
|
*/
|
||||||
mergeWith (_right) {
|
mergeWith (right) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} _transaction
|
* @param {Transaction} transaction
|
||||||
* @param {Item} _item
|
* @param {Item} item
|
||||||
*/
|
*/
|
||||||
integrate (_transaction, _item) {
|
integrate (transaction, item) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} _transaction
|
* @param {Transaction} transaction
|
||||||
*/
|
*/
|
||||||
delete (_transaction) {
|
delete (transaction) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {StructStore} _store
|
* @param {StructStore} store
|
||||||
*/
|
*/
|
||||||
gc (_store) {
|
gc (store) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} _encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {number} _offset
|
* @param {number} offset
|
||||||
*/
|
*/
|
||||||
write (_encoder, _offset) {
|
write (encoder, offset) {
|
||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -810,3 +748,38 @@ export class AbstractContent {
|
|||||||
throw error.methodUnimplemented()
|
throw error.methodUnimplemented()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {decoding.Decoder} decoder
|
||||||
|
* @param {ID} id
|
||||||
|
* @param {number} info
|
||||||
|
* @param {Doc} doc
|
||||||
|
*/
|
||||||
|
export const readItem = (decoder, id, info, doc) => {
|
||||||
|
/**
|
||||||
|
* The item that was originally to the left of this item.
|
||||||
|
* @type {ID | null}
|
||||||
|
*/
|
||||||
|
const origin = (info & binary.BIT8) === binary.BIT8 ? readID(decoder) : null
|
||||||
|
/**
|
||||||
|
* The item that was originally to the right of this item.
|
||||||
|
* @type {ID | null}
|
||||||
|
*/
|
||||||
|
const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? readID(decoder) : null
|
||||||
|
const canCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
||||||
|
const hasParentYKey = canCopyParentInfo ? decoding.readVarUint(decoder) === 1 : false
|
||||||
|
/**
|
||||||
|
* If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
||||||
|
* and we read the next string as parentYKey.
|
||||||
|
* It indicates how we store/retrieve parent from `y.share`
|
||||||
|
* @type {string|null}
|
||||||
|
*/
|
||||||
|
const parentYKey = canCopyParentInfo && hasParentYKey ? decoding.readVarString(decoder) : null
|
||||||
|
|
||||||
|
return new Item(
|
||||||
|
id, null, origin, null, rightOrigin,
|
||||||
|
canCopyParentInfo && !hasParentYKey ? readID(decoder) : (parentYKey ? doc.get(parentYKey) : null), // parent
|
||||||
|
canCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoding.readVarString(decoder) : null, // parentSub
|
||||||
|
/** @type {AbstractContent} */ (readItemContent(decoder, info)) // item content
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,59 +0,0 @@
|
|||||||
import {
|
|
||||||
AbstractStruct,
|
|
||||||
UpdateEncoderV1, UpdateEncoderV2, StructStore, Transaction, ID // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
|
||||||
import * as error from 'lib0/error'
|
|
||||||
import * as encoding from 'lib0/encoding'
|
|
||||||
|
|
||||||
export const structSkipRefNumber = 10
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
export class Skip extends AbstractStruct {
|
|
||||||
get deleted () {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
delete () {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Skip} right
|
|
||||||
* @return {boolean}
|
|
||||||
*/
|
|
||||||
mergeWith (right) {
|
|
||||||
if (this.constructor !== right.constructor) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
this.length += right.length
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
* @param {number} offset
|
|
||||||
*/
|
|
||||||
integrate (transaction, offset) {
|
|
||||||
// skip structs cannot be integrated
|
|
||||||
error.unexpectedCase()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
|
||||||
* @param {number} offset
|
|
||||||
*/
|
|
||||||
write (encoder, offset) {
|
|
||||||
encoder.writeInfo(structSkipRefNumber)
|
|
||||||
// write as VarUint because Skips can't make use of predictable length-encoding
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, this.length - offset)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
* @param {StructStore} store
|
|
||||||
* @return {null | number}
|
|
||||||
*/
|
|
||||||
getMissing (transaction, store) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
removeEventHandlerListener,
|
removeEventHandlerListener,
|
||||||
callEventHandlerListeners,
|
callEventHandlerListeners,
|
||||||
@@ -10,209 +11,13 @@ import {
|
|||||||
ContentAny,
|
ContentAny,
|
||||||
ContentBinary,
|
ContentBinary,
|
||||||
getItemCleanStart,
|
getItemCleanStart,
|
||||||
ContentDoc, YText, YArray, UpdateEncoderV1, UpdateEncoderV2, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line
|
ID, Doc, Snapshot, Transaction, EventHandler, YEvent, Item, // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as map from 'lib0/map'
|
import * as map from 'lib0/map.js'
|
||||||
import * as iterator from 'lib0/iterator'
|
import * as iterator from 'lib0/iterator.js'
|
||||||
import * as error from 'lib0/error'
|
import * as error from 'lib0/error.js'
|
||||||
import * as math from 'lib0/math'
|
import * as encoding from 'lib0/encoding.js' // eslint-disable-line
|
||||||
import * as log from 'lib0/logging'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* https://docs.yjs.dev/getting-started/working-with-shared-types#caveats
|
|
||||||
*/
|
|
||||||
export const warnPrematureAccess = () => { log.warn('Invalid access: Add Yjs type to a document before reading data.') }
|
|
||||||
|
|
||||||
const maxSearchMarker = 80
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A unique timestamp that identifies each marker.
|
|
||||||
*
|
|
||||||
* Time is relative,.. this is more like an ever-increasing clock.
|
|
||||||
*
|
|
||||||
* @type {number}
|
|
||||||
*/
|
|
||||||
let globalSearchMarkerTimestamp = 0
|
|
||||||
|
|
||||||
export class ArraySearchMarker {
|
|
||||||
/**
|
|
||||||
* @param {Item} p
|
|
||||||
* @param {number} index
|
|
||||||
*/
|
|
||||||
constructor (p, index) {
|
|
||||||
p.marker = true
|
|
||||||
this.p = p
|
|
||||||
this.index = index
|
|
||||||
this.timestamp = globalSearchMarkerTimestamp++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ArraySearchMarker} marker
|
|
||||||
*/
|
|
||||||
const refreshMarkerTimestamp = marker => { marker.timestamp = globalSearchMarkerTimestamp++ }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is rather complex so this function is the only thing that should overwrite a marker
|
|
||||||
*
|
|
||||||
* @param {ArraySearchMarker} marker
|
|
||||||
* @param {Item} p
|
|
||||||
* @param {number} index
|
|
||||||
*/
|
|
||||||
const overwriteMarker = (marker, p, index) => {
|
|
||||||
marker.p.marker = false
|
|
||||||
marker.p = p
|
|
||||||
p.marker = true
|
|
||||||
marker.index = index
|
|
||||||
marker.timestamp = globalSearchMarkerTimestamp++
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<ArraySearchMarker>} searchMarker
|
|
||||||
* @param {Item} p
|
|
||||||
* @param {number} index
|
|
||||||
*/
|
|
||||||
const markPosition = (searchMarker, p, index) => {
|
|
||||||
if (searchMarker.length >= maxSearchMarker) {
|
|
||||||
// override oldest marker (we don't want to create more objects)
|
|
||||||
const marker = searchMarker.reduce((a, b) => a.timestamp < b.timestamp ? a : b)
|
|
||||||
overwriteMarker(marker, p, index)
|
|
||||||
return marker
|
|
||||||
} else {
|
|
||||||
// create new marker
|
|
||||||
const pm = new ArraySearchMarker(p, index)
|
|
||||||
searchMarker.push(pm)
|
|
||||||
return pm
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Search marker help us to find positions in the associative array faster.
|
|
||||||
*
|
|
||||||
* They speed up the process of finding a position without much bookkeeping.
|
|
||||||
*
|
|
||||||
* A maximum of `maxSearchMarker` objects are created.
|
|
||||||
*
|
|
||||||
* This function always returns a refreshed marker (updated timestamp)
|
|
||||||
*
|
|
||||||
* @param {AbstractType<any>} yarray
|
|
||||||
* @param {number} index
|
|
||||||
*/
|
|
||||||
export const findMarker = (yarray, index) => {
|
|
||||||
if (yarray._start === null || index === 0 || yarray._searchMarker === null) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
const marker = yarray._searchMarker.length === 0 ? null : yarray._searchMarker.reduce((a, b) => math.abs(index - a.index) < math.abs(index - b.index) ? a : b)
|
|
||||||
let p = yarray._start
|
|
||||||
let pindex = 0
|
|
||||||
if (marker !== null) {
|
|
||||||
p = marker.p
|
|
||||||
pindex = marker.index
|
|
||||||
refreshMarkerTimestamp(marker) // we used it, we might need to use it again
|
|
||||||
}
|
|
||||||
// iterate to right if possible
|
|
||||||
while (p.right !== null && pindex < index) {
|
|
||||||
if (!p.deleted && p.countable) {
|
|
||||||
if (index < pindex + p.length) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pindex += p.length
|
|
||||||
}
|
|
||||||
p = p.right
|
|
||||||
}
|
|
||||||
// iterate to left if necessary (might be that pindex > index)
|
|
||||||
while (p.left !== null && pindex > index) {
|
|
||||||
p = p.left
|
|
||||||
if (!p.deleted && p.countable) {
|
|
||||||
pindex -= p.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// we want to make sure that p can't be merged with left, because that would screw up everything
|
|
||||||
// in that cas just return what we have (it is most likely the best marker anyway)
|
|
||||||
// iterate to left until p can't be merged with left
|
|
||||||
while (p.left !== null && p.left.id.client === p.id.client && p.left.id.clock + p.left.length === p.id.clock) {
|
|
||||||
p = p.left
|
|
||||||
if (!p.deleted && p.countable) {
|
|
||||||
pindex -= p.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// @todo remove!
|
|
||||||
// assure position
|
|
||||||
// {
|
|
||||||
// let start = yarray._start
|
|
||||||
// let pos = 0
|
|
||||||
// while (start !== p) {
|
|
||||||
// if (!start.deleted && start.countable) {
|
|
||||||
// pos += start.length
|
|
||||||
// }
|
|
||||||
// start = /** @type {Item} */ (start.right)
|
|
||||||
// }
|
|
||||||
// if (pos !== pindex) {
|
|
||||||
// debugger
|
|
||||||
// throw new Error('Gotcha position fail!')
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// if (marker) {
|
|
||||||
// if (window.lengths == null) {
|
|
||||||
// window.lengths = []
|
|
||||||
// window.getLengths = () => window.lengths.sort((a, b) => a - b)
|
|
||||||
// }
|
|
||||||
// window.lengths.push(marker.index - pindex)
|
|
||||||
// console.log('distance', marker.index - pindex, 'len', p && p.parent.length)
|
|
||||||
// }
|
|
||||||
if (marker !== null && math.abs(marker.index - pindex) < /** @type {YText|YArray<any>} */ (p.parent).length / maxSearchMarker) {
|
|
||||||
// adjust existing marker
|
|
||||||
overwriteMarker(marker, p, pindex)
|
|
||||||
return marker
|
|
||||||
} else {
|
|
||||||
// create new marker
|
|
||||||
return markPosition(yarray._searchMarker, p, pindex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update markers when a change happened.
|
|
||||||
*
|
|
||||||
* This should be called before doing a deletion!
|
|
||||||
*
|
|
||||||
* @param {Array<ArraySearchMarker>} searchMarker
|
|
||||||
* @param {number} index
|
|
||||||
* @param {number} len If insertion, len is positive. If deletion, len is negative.
|
|
||||||
*/
|
|
||||||
export const updateMarkerChanges = (searchMarker, index, len) => {
|
|
||||||
for (let i = searchMarker.length - 1; i >= 0; i--) {
|
|
||||||
const m = searchMarker[i]
|
|
||||||
if (len > 0) {
|
|
||||||
/**
|
|
||||||
* @type {Item|null}
|
|
||||||
*/
|
|
||||||
let p = m.p
|
|
||||||
p.marker = false
|
|
||||||
// Ideally we just want to do a simple position comparison, but this will only work if
|
|
||||||
// search markers don't point to deleted items for formats.
|
|
||||||
// Iterate marker to prev undeleted countable position so we know what to do when updating a position
|
|
||||||
while (p && (p.deleted || !p.countable)) {
|
|
||||||
p = p.left
|
|
||||||
if (p && !p.deleted && p.countable) {
|
|
||||||
// adjust position. the loop should break now
|
|
||||||
m.index -= p.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (p === null || p.marker === true) {
|
|
||||||
// remove search marker if updated position is null or if position is already marked
|
|
||||||
searchMarker.splice(i, 1)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
m.p = p
|
|
||||||
p.marker = true
|
|
||||||
}
|
|
||||||
if (index < m.index || (len > 0 && index === m.index)) { // a simple index <= m.index check would actually suffice
|
|
||||||
m.index = math.max(index, m.index + len)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Accumulate all (list) children of a type and return them as an Array.
|
* Accumulate all (list) children of a type and return them as an Array.
|
||||||
@@ -221,7 +26,6 @@ export const updateMarkerChanges = (searchMarker, index, len) => {
|
|||||||
* @return {Array<Item>}
|
* @return {Array<Item>}
|
||||||
*/
|
*/
|
||||||
export const getTypeChildren = t => {
|
export const getTypeChildren = t => {
|
||||||
t.doc ?? warnPrematureAccess()
|
|
||||||
let s = t._start
|
let s = t._start
|
||||||
const arr = []
|
const arr = []
|
||||||
while (s) {
|
while (s) {
|
||||||
@@ -284,20 +88,9 @@ export class AbstractType {
|
|||||||
this._eH = createEventHandler()
|
this._eH = createEventHandler()
|
||||||
/**
|
/**
|
||||||
* Deep event handlers
|
* Deep event handlers
|
||||||
* @type {EventHandler<Array<YEvent<any>>,Transaction>}
|
* @type {EventHandler<Array<YEvent>,Transaction>}
|
||||||
*/
|
*/
|
||||||
this._dEH = createEventHandler()
|
this._dEH = createEventHandler()
|
||||||
/**
|
|
||||||
* @type {null | Array<ArraySearchMarker>}
|
|
||||||
*/
|
|
||||||
this._searchMarker = null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {AbstractType<any>|null}
|
|
||||||
*/
|
|
||||||
get parent () {
|
|
||||||
return this._item ? /** @type {AbstractType<any>} */ (this._item.parent) : null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -323,20 +116,9 @@ export class AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
* @param {encoding.Encoder} encoder
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {AbstractType<EventType>}
|
|
||||||
*/
|
*/
|
||||||
clone () {
|
_write (encoder) { }
|
||||||
throw error.methodUnimplemented()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} _encoder
|
|
||||||
*/
|
|
||||||
_write (_encoder) { }
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The first non-deleted item
|
* The first non-deleted item
|
||||||
@@ -354,13 +136,9 @@ export class AbstractType {
|
|||||||
* Must be implemented by each type.
|
* Must be implemented by each type.
|
||||||
*
|
*
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {Set<null|string>} _parentSubs Keys changed on this type. `null` if list was modified.
|
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||||||
*/
|
*/
|
||||||
_callObserver (transaction, _parentSubs) {
|
_callObserver (transaction, parentSubs) { /* skip if no type is specified */ }
|
||||||
if (!transaction.local && this._searchMarker) {
|
|
||||||
this._searchMarker.length = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Observe all events that are created on this type.
|
* Observe all events that are created on this type.
|
||||||
@@ -374,7 +152,7 @@ export class AbstractType {
|
|||||||
/**
|
/**
|
||||||
* Observe all events that are created by this type and its children.
|
* Observe all events that are created by this type and its children.
|
||||||
*
|
*
|
||||||
* @param {function(Array<YEvent<any>>,Transaction):void} f Observer function
|
* @param {function(Array<YEvent>,Transaction):void} f Observer function
|
||||||
*/
|
*/
|
||||||
observeDeep (f) {
|
observeDeep (f) {
|
||||||
addEventHandlerListener(this._dEH, f)
|
addEventHandlerListener(this._dEH, f)
|
||||||
@@ -392,7 +170,7 @@ export class AbstractType {
|
|||||||
/**
|
/**
|
||||||
* Unregister an observer function.
|
* Unregister an observer function.
|
||||||
*
|
*
|
||||||
* @param {function(Array<YEvent<any>>,Transaction):void} f Observer function
|
* @param {function(Array<YEvent>,Transaction):void} f Observer function
|
||||||
*/
|
*/
|
||||||
unobserveDeep (f) {
|
unobserveDeep (f) {
|
||||||
removeEventHandlerListener(this._dEH, f)
|
removeEventHandlerListener(this._dEH, f)
|
||||||
@@ -405,44 +183,6 @@ export class AbstractType {
|
|||||||
toJSON () {}
|
toJSON () {}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {AbstractType<any>} type
|
|
||||||
* @param {number} start
|
|
||||||
* @param {number} end
|
|
||||||
* @return {Array<any>}
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const typeListSlice = (type, start, end) => {
|
|
||||||
type.doc ?? warnPrematureAccess()
|
|
||||||
if (start < 0) {
|
|
||||||
start = type._length + start
|
|
||||||
}
|
|
||||||
if (end < 0) {
|
|
||||||
end = type._length + end
|
|
||||||
}
|
|
||||||
let len = end - start
|
|
||||||
const cs = []
|
|
||||||
let n = type._start
|
|
||||||
while (n !== null && len > 0) {
|
|
||||||
if (n.countable && !n.deleted) {
|
|
||||||
const c = n.content.getContent()
|
|
||||||
if (c.length <= start) {
|
|
||||||
start -= c.length
|
|
||||||
} else {
|
|
||||||
for (let i = start; i < c.length && len > 0; i++) {
|
|
||||||
cs.push(c[i])
|
|
||||||
len--
|
|
||||||
}
|
|
||||||
start = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
n = n.right
|
|
||||||
}
|
|
||||||
return cs
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<any>} type
|
||||||
* @return {Array<any>}
|
* @return {Array<any>}
|
||||||
@@ -451,7 +191,6 @@ export const typeListSlice = (type, start, end) => {
|
|||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const typeListToArray = type => {
|
export const typeListToArray = type => {
|
||||||
type.doc ?? warnPrematureAccess()
|
|
||||||
const cs = []
|
const cs = []
|
||||||
let n = type._start
|
let n = type._start
|
||||||
while (n !== null) {
|
while (n !== null) {
|
||||||
@@ -490,7 +229,7 @@ export const typeListToArraySnapshot = (type, snapshot) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a provided function on once on every element of this YArray.
|
* Executes a provided function on once on overy element of this YArray.
|
||||||
*
|
*
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<any>} type
|
||||||
* @param {function(any,number,any):void} f A function to execute on every element of this YArray.
|
* @param {function(any,number,any):void} f A function to execute on every element of this YArray.
|
||||||
@@ -501,7 +240,6 @@ export const typeListToArraySnapshot = (type, snapshot) => {
|
|||||||
export const typeListForEach = (type, f) => {
|
export const typeListForEach = (type, f) => {
|
||||||
let index = 0
|
let index = 0
|
||||||
let n = type._start
|
let n = type._start
|
||||||
type.doc ?? warnPrematureAccess()
|
|
||||||
while (n !== null) {
|
while (n !== null) {
|
||||||
if (n.countable && !n.deleted) {
|
if (n.countable && !n.deleted) {
|
||||||
const c = n.content.getContent()
|
const c = n.content.getContent()
|
||||||
@@ -583,7 +321,7 @@ export const typeListCreateIterator = type => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a provided function on once on every element of this YArray.
|
* Executes a provided function on once on overy element of this YArray.
|
||||||
* Operates on a snapshotted state of the document.
|
* Operates on a snapshotted state of the document.
|
||||||
*
|
*
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<any>} type
|
||||||
@@ -616,14 +354,7 @@ export const typeListForEachSnapshot = (type, f, snapshot) => {
|
|||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const typeListGet = (type, index) => {
|
export const typeListGet = (type, index) => {
|
||||||
type.doc ?? warnPrematureAccess()
|
for (let n = type._start; n !== null; n = n.right) {
|
||||||
const marker = findMarker(type, index)
|
|
||||||
let n = type._start
|
|
||||||
if (marker !== null) {
|
|
||||||
n = marker.p
|
|
||||||
index -= marker.index
|
|
||||||
}
|
|
||||||
for (; n !== null; n = n.right) {
|
|
||||||
if (!n.deleted && n.countable) {
|
if (!n.deleted && n.countable) {
|
||||||
if (index < n.length) {
|
if (index < n.length) {
|
||||||
return n.content.getContent()[index]
|
return n.content.getContent()[index]
|
||||||
@@ -637,7 +368,7 @@ export const typeListGet = (type, index) => {
|
|||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {AbstractType<any>} parent
|
* @param {AbstractType<any>} parent
|
||||||
* @param {Item?} referenceItem
|
* @param {Item?} referenceItem
|
||||||
* @param {Array<Object<string,any>|Array<any>|boolean|number|null|string|Uint8Array>} content
|
* @param {Array<Object<string,any>|Array<any>|boolean|number|string|Uint8Array>} content
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
@@ -649,7 +380,7 @@ export const typeListInsertGenericsAfter = (transaction, parent, referenceItem,
|
|||||||
const store = doc.store
|
const store = doc.store
|
||||||
const right = referenceItem === null ? parent._start : referenceItem.right
|
const right = referenceItem === null ? parent._start : referenceItem.right
|
||||||
/**
|
/**
|
||||||
* @type {Array<Object|Array<any>|number|null>}
|
* @type {Array<Object|Array<any>|number>}
|
||||||
*/
|
*/
|
||||||
let jsonContent = []
|
let jsonContent = []
|
||||||
const packJsonContent = () => {
|
const packJsonContent = () => {
|
||||||
@@ -660,77 +391,49 @@ export const typeListInsertGenericsAfter = (transaction, parent, referenceItem,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
content.forEach(c => {
|
content.forEach(c => {
|
||||||
if (c === null) {
|
switch (c.constructor) {
|
||||||
jsonContent.push(c)
|
case Number:
|
||||||
} else {
|
case Object:
|
||||||
switch (c.constructor) {
|
case Boolean:
|
||||||
case Number:
|
case Array:
|
||||||
case Object:
|
case String:
|
||||||
case Boolean:
|
jsonContent.push(c)
|
||||||
case Array:
|
break
|
||||||
case String:
|
default:
|
||||||
jsonContent.push(c)
|
packJsonContent()
|
||||||
break
|
switch (c.constructor) {
|
||||||
default:
|
case Uint8Array:
|
||||||
packJsonContent()
|
case ArrayBuffer:
|
||||||
switch (c.constructor) {
|
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentBinary(new Uint8Array(/** @type {Uint8Array} */ (c))))
|
||||||
case Uint8Array:
|
left.integrate(transaction, 0)
|
||||||
case ArrayBuffer:
|
break
|
||||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentBinary(new Uint8Array(/** @type {Uint8Array} */ (c))))
|
default:
|
||||||
|
if (c instanceof AbstractType) {
|
||||||
|
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentType(c))
|
||||||
left.integrate(transaction, 0)
|
left.integrate(transaction, 0)
|
||||||
break
|
} else {
|
||||||
case Doc:
|
throw new Error('Unexpected content type in insert operation')
|
||||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentDoc(/** @type {Doc} */ (c)))
|
}
|
||||||
left.integrate(transaction, 0)
|
}
|
||||||
break
|
|
||||||
default:
|
|
||||||
if (c instanceof AbstractType) {
|
|
||||||
left = new Item(createID(ownClientId, getState(store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentType(c))
|
|
||||||
left.integrate(transaction, 0)
|
|
||||||
} else {
|
|
||||||
throw new Error('Unexpected content type in insert operation')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
packJsonContent()
|
packJsonContent()
|
||||||
}
|
}
|
||||||
|
|
||||||
const lengthExceeded = () => error.create('Length exceeded!')
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {AbstractType<any>} parent
|
* @param {AbstractType<any>} parent
|
||||||
* @param {number} index
|
* @param {number} index
|
||||||
* @param {Array<Object<string,any>|Array<any>|number|null|string|Uint8Array>} content
|
* @param {Array<Object<string,any>|Array<any>|number|string|Uint8Array>} content
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const typeListInsertGenerics = (transaction, parent, index, content) => {
|
export const typeListInsertGenerics = (transaction, parent, index, content) => {
|
||||||
if (index > parent._length) {
|
|
||||||
throw lengthExceeded()
|
|
||||||
}
|
|
||||||
if (index === 0) {
|
if (index === 0) {
|
||||||
if (parent._searchMarker) {
|
|
||||||
updateMarkerChanges(parent._searchMarker, index, content.length)
|
|
||||||
}
|
|
||||||
return typeListInsertGenericsAfter(transaction, parent, null, content)
|
return typeListInsertGenericsAfter(transaction, parent, null, content)
|
||||||
}
|
}
|
||||||
const startIndex = index
|
|
||||||
const marker = findMarker(parent, index)
|
|
||||||
let n = parent._start
|
let n = parent._start
|
||||||
if (marker !== null) {
|
|
||||||
n = marker.p
|
|
||||||
index -= marker.index
|
|
||||||
// we need to iterate one to the left so that the algorithm works
|
|
||||||
if (index === 0) {
|
|
||||||
// @todo refactor this as it actually doesn't consider formats
|
|
||||||
n = n.prev // important! get the left undeleted item so that we can actually decrease index
|
|
||||||
index += (n && n.countable && !n.deleted) ? n.length : 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (; n !== null; n = n.right) {
|
for (; n !== null; n = n.right) {
|
||||||
if (!n.deleted && n.countable) {
|
if (!n.deleted && n.countable) {
|
||||||
if (index <= n.length) {
|
if (index <= n.length) {
|
||||||
@@ -743,32 +446,6 @@ export const typeListInsertGenerics = (transaction, parent, index, content) => {
|
|||||||
index -= n.length
|
index -= n.length
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (parent._searchMarker) {
|
|
||||||
updateMarkerChanges(parent._searchMarker, startIndex, content.length)
|
|
||||||
}
|
|
||||||
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pushing content is special as we generally want to push after the last item. So we don't have to update
|
|
||||||
* the search marker.
|
|
||||||
*
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
* @param {AbstractType<any>} parent
|
|
||||||
* @param {Array<Object<string,any>|Array<any>|number|null|string|Uint8Array>} content
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const typeListPushGenerics = (transaction, parent, content) => {
|
|
||||||
// Use the marker with the highest index and iterate to the right.
|
|
||||||
const marker = (parent._searchMarker || []).reduce((maxMarker, currMarker) => currMarker.index > maxMarker.index ? currMarker : maxMarker, { index: 0, p: parent._start })
|
|
||||||
let n = marker.p
|
|
||||||
if (n) {
|
|
||||||
while (n.right) {
|
|
||||||
n = n.right
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
return typeListInsertGenericsAfter(transaction, parent, n, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -783,14 +460,7 @@ export const typeListPushGenerics = (transaction, parent, content) => {
|
|||||||
*/
|
*/
|
||||||
export const typeListDelete = (transaction, parent, index, length) => {
|
export const typeListDelete = (transaction, parent, index, length) => {
|
||||||
if (length === 0) { return }
|
if (length === 0) { return }
|
||||||
const startIndex = index
|
|
||||||
const startLength = length
|
|
||||||
const marker = findMarker(parent, index)
|
|
||||||
let n = parent._start
|
let n = parent._start
|
||||||
if (marker !== null) {
|
|
||||||
n = marker.p
|
|
||||||
index -= marker.index
|
|
||||||
}
|
|
||||||
// compute the first item to be deleted
|
// compute the first item to be deleted
|
||||||
for (; n !== null && index > 0; n = n.right) {
|
for (; n !== null && index > 0; n = n.right) {
|
||||||
if (!n.deleted && n.countable) {
|
if (!n.deleted && n.countable) {
|
||||||
@@ -812,10 +482,7 @@ export const typeListDelete = (transaction, parent, index, length) => {
|
|||||||
n = n.right
|
n = n.right
|
||||||
}
|
}
|
||||||
if (length > 0) {
|
if (length > 0) {
|
||||||
throw lengthExceeded()
|
throw error.create('array length exceeded')
|
||||||
}
|
|
||||||
if (parent._searchMarker) {
|
|
||||||
updateMarkerChanges(parent._searchMarker, startIndex, -startLength + length /* in case we remove the above exception */)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -838,7 +505,7 @@ export const typeMapDelete = (transaction, parent, key) => {
|
|||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {AbstractType<any>} parent
|
* @param {AbstractType<any>} parent
|
||||||
* @param {string} key
|
* @param {string} key
|
||||||
* @param {Object|number|null|Array<any>|string|Uint8Array|AbstractType<any>} value
|
* @param {Object|number|Array<any>|string|Uint8Array|AbstractType<any>} value
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
@@ -862,9 +529,6 @@ export const typeMapSet = (transaction, parent, key, value) => {
|
|||||||
case Uint8Array:
|
case Uint8Array:
|
||||||
content = new ContentBinary(/** @type {Uint8Array} */ (value))
|
content = new ContentBinary(/** @type {Uint8Array} */ (value))
|
||||||
break
|
break
|
||||||
case Doc:
|
|
||||||
content = new ContentDoc(/** @type {Doc} */ (value))
|
|
||||||
break
|
|
||||||
default:
|
default:
|
||||||
if (value instanceof AbstractType) {
|
if (value instanceof AbstractType) {
|
||||||
content = new ContentType(value)
|
content = new ContentType(value)
|
||||||
@@ -879,20 +543,19 @@ export const typeMapSet = (transaction, parent, key, value) => {
|
|||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} parent
|
* @param {AbstractType<any>} parent
|
||||||
* @param {string} key
|
* @param {string} key
|
||||||
* @return {Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
* @return {Object<string,any>|number|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const typeMapGet = (parent, key) => {
|
export const typeMapGet = (parent, key) => {
|
||||||
parent.doc ?? warnPrematureAccess()
|
|
||||||
const val = parent._map.get(key)
|
const val = parent._map.get(key)
|
||||||
return val !== undefined && !val.deleted ? val.content.getContent()[val.length - 1] : undefined
|
return val !== undefined && !val.deleted ? val.content.getContent()[val.length - 1] : undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} parent
|
* @param {AbstractType<any>} parent
|
||||||
* @return {Object<string,Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined>}
|
* @return {Object<string,Object<string,any>|number|Array<any>|string|Uint8Array|AbstractType<any>|undefined>}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
@@ -902,12 +565,11 @@ export const typeMapGetAll = (parent) => {
|
|||||||
* @type {Object<string,any>}
|
* @type {Object<string,any>}
|
||||||
*/
|
*/
|
||||||
const res = {}
|
const res = {}
|
||||||
parent.doc ?? warnPrematureAccess()
|
for (const [key, value] of parent._map) {
|
||||||
parent._map.forEach((value, key) => {
|
|
||||||
if (!value.deleted) {
|
if (!value.deleted) {
|
||||||
res[key] = value.content.getContent()[value.length - 1]
|
res[key] = value.content.getContent()[value.length - 1]
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -920,7 +582,6 @@ export const typeMapGetAll = (parent) => {
|
|||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const typeMapHas = (parent, key) => {
|
export const typeMapHas = (parent, key) => {
|
||||||
parent.doc ?? warnPrematureAccess()
|
|
||||||
const val = parent._map.get(key)
|
const val = parent._map.get(key)
|
||||||
return val !== undefined && !val.deleted
|
return val !== undefined && !val.deleted
|
||||||
}
|
}
|
||||||
@@ -929,7 +590,7 @@ export const typeMapHas = (parent, key) => {
|
|||||||
* @param {AbstractType<any>} parent
|
* @param {AbstractType<any>} parent
|
||||||
* @param {string} key
|
* @param {string} key
|
||||||
* @param {Snapshot} snapshot
|
* @param {Snapshot} snapshot
|
||||||
* @return {Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
* @return {Object<string,any>|number|Array<any>|string|Uint8Array|AbstractType<any>|undefined}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
@@ -943,41 +604,10 @@ export const typeMapGetSnapshot = (parent, key, snapshot) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} parent
|
* @param {Map<string,Item>} map
|
||||||
* @param {Snapshot} snapshot
|
|
||||||
* @return {Object<string,Object<string,any>|number|null|Array<any>|string|Uint8Array|AbstractType<any>|undefined>}
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const typeMapGetAllSnapshot = (parent, snapshot) => {
|
|
||||||
/**
|
|
||||||
* @type {Object<string,any>}
|
|
||||||
*/
|
|
||||||
const res = {}
|
|
||||||
parent._map.forEach((value, key) => {
|
|
||||||
/**
|
|
||||||
* @type {Item|null}
|
|
||||||
*/
|
|
||||||
let v = value
|
|
||||||
while (v !== null && (!snapshot.sv.has(v.id.client) || v.id.clock >= (snapshot.sv.get(v.id.client) || 0))) {
|
|
||||||
v = v.left
|
|
||||||
}
|
|
||||||
if (v !== null && isVisible(v, snapshot)) {
|
|
||||||
res[key] = v.content.getContent()[v.length - 1]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {AbstractType<any> & { _map: Map<string, Item> }} type
|
|
||||||
* @return {IterableIterator<Array<any>>}
|
* @return {IterableIterator<Array<any>>}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const createMapIterator = type => {
|
export const createMapIterator = map => iterator.iteratorFilter(map.entries(), /** @param {any} entry */ entry => !entry[1].deleted)
|
||||||
type.doc ?? warnPrematureAccess()
|
|
||||||
return iterator.iteratorFilter(type._map.entries(), /** @param {any} entry */ entry => !entry[1].deleted)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -10,23 +10,31 @@ import {
|
|||||||
typeListForEach,
|
typeListForEach,
|
||||||
typeListCreateIterator,
|
typeListCreateIterator,
|
||||||
typeListInsertGenerics,
|
typeListInsertGenerics,
|
||||||
typeListPushGenerics,
|
|
||||||
typeListDelete,
|
typeListDelete,
|
||||||
typeListMap,
|
typeListMap,
|
||||||
YArrayRefID,
|
YArrayRefID,
|
||||||
callTypeObservers,
|
callTypeObservers,
|
||||||
transact,
|
transact,
|
||||||
warnPrematureAccess,
|
Doc, Transaction, Item // eslint-disable-line
|
||||||
ArraySearchMarker, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
import { typeListSlice } from './AbstractType.js'
|
|
||||||
|
import * as decoding from 'lib0/decoding.js' // eslint-disable-line
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Event that describes the changes on a YArray
|
* Event that describes the changes on a YArray
|
||||||
* @template T
|
* @template T
|
||||||
* @extends YEvent<YArray<T>>
|
|
||||||
*/
|
*/
|
||||||
export class YArrayEvent extends YEvent {}
|
export class YArrayEvent extends YEvent {
|
||||||
|
/**
|
||||||
|
* @param {YArray<T>} yarray The changed type
|
||||||
|
* @param {Transaction} transaction The transaction object
|
||||||
|
*/
|
||||||
|
constructor (yarray, transaction) {
|
||||||
|
super(yarray, transaction)
|
||||||
|
this._transaction = transaction
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A shared Array implementation.
|
* A shared Array implementation.
|
||||||
@@ -42,25 +50,6 @@ export class YArray extends AbstractType {
|
|||||||
* @private
|
* @private
|
||||||
*/
|
*/
|
||||||
this._prelimContent = []
|
this._prelimContent = []
|
||||||
/**
|
|
||||||
* @type {Array<ArraySearchMarker>}
|
|
||||||
*/
|
|
||||||
this._searchMarker = []
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Construct a new YArray containing the specified items.
|
|
||||||
* @template {Object<string,any>|Array<any>|number|null|string|Uint8Array} T
|
|
||||||
* @param {Array<T>} items
|
|
||||||
* @return {YArray<T>}
|
|
||||||
*/
|
|
||||||
static from (items) {
|
|
||||||
/**
|
|
||||||
* @type {YArray<T>}
|
|
||||||
*/
|
|
||||||
const a = new YArray()
|
|
||||||
a.push(items)
|
|
||||||
return a
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -79,34 +68,12 @@ export class YArray extends AbstractType {
|
|||||||
this._prelimContent = null
|
this._prelimContent = null
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {YArray<T>}
|
|
||||||
*/
|
|
||||||
_copy () {
|
_copy () {
|
||||||
return new YArray()
|
return new YArray()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {YArray<T>}
|
|
||||||
*/
|
|
||||||
clone () {
|
|
||||||
/**
|
|
||||||
* @type {YArray<T>}
|
|
||||||
*/
|
|
||||||
const arr = new YArray()
|
|
||||||
arr.insert(0, this.toArray().map(el =>
|
|
||||||
el instanceof AbstractType ? /** @type {typeof el} */ (el.clone()) : el
|
|
||||||
))
|
|
||||||
return arr
|
|
||||||
}
|
|
||||||
|
|
||||||
get length () {
|
get length () {
|
||||||
this.doc ?? warnPrematureAccess()
|
return this._prelimContent === null ? this._length : this._prelimContent.length
|
||||||
return this._length
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -116,7 +83,6 @@ export class YArray extends AbstractType {
|
|||||||
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
* @param {Set<null|string>} parentSubs Keys changed on this type. `null` if list was modified.
|
||||||
*/
|
*/
|
||||||
_callObserver (transaction, parentSubs) {
|
_callObserver (transaction, parentSubs) {
|
||||||
super._callObserver(transaction, parentSubs)
|
|
||||||
callTypeObservers(this, transaction, new YArrayEvent(this, transaction))
|
callTypeObservers(this, transaction, new YArrayEvent(this, transaction))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -139,7 +105,7 @@ export class YArray extends AbstractType {
|
|||||||
insert (index, content) {
|
insert (index, content) {
|
||||||
if (this.doc !== null) {
|
if (this.doc !== null) {
|
||||||
transact(this.doc, transaction => {
|
transact(this.doc, transaction => {
|
||||||
typeListInsertGenerics(transaction, this, index, /** @type {any} */ (content))
|
typeListInsertGenerics(transaction, this, index, content)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
/** @type {Array<any>} */ (this._prelimContent).splice(index, 0, ...content)
|
/** @type {Array<any>} */ (this._prelimContent).splice(index, 0, ...content)
|
||||||
@@ -150,23 +116,15 @@ export class YArray extends AbstractType {
|
|||||||
* Appends content to this YArray.
|
* Appends content to this YArray.
|
||||||
*
|
*
|
||||||
* @param {Array<T>} content Array of content to append.
|
* @param {Array<T>} content Array of content to append.
|
||||||
*
|
|
||||||
* @todo Use the following implementation in all types.
|
|
||||||
*/
|
*/
|
||||||
push (content) {
|
push (content) {
|
||||||
if (this.doc !== null) {
|
this.insert(this.length, content)
|
||||||
transact(this.doc, transaction => {
|
|
||||||
typeListPushGenerics(transaction, this, /** @type {any} */ (content))
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
/** @type {Array<any>} */ (this._prelimContent).push(...content)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prepends content to this YArray.
|
* Preppends content to this YArray.
|
||||||
*
|
*
|
||||||
* @param {Array<T>} content Array of content to prepend.
|
* @param {Array<T>} content Array of content to preppend.
|
||||||
*/
|
*/
|
||||||
unshift (content) {
|
unshift (content) {
|
||||||
this.insert(0, content)
|
this.insert(0, content)
|
||||||
@@ -207,18 +165,6 @@ export class YArray extends AbstractType {
|
|||||||
return typeListToArray(this)
|
return typeListToArray(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a portion of this YArray into a JavaScript Array selected
|
|
||||||
* from start to end (end not included).
|
|
||||||
*
|
|
||||||
* @param {number} [start]
|
|
||||||
* @param {number} [end]
|
|
||||||
* @return {Array<T>}
|
|
||||||
*/
|
|
||||||
slice (start = 0, end = this.length) {
|
|
||||||
return typeListSlice(this, start, end)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Transforms this Shared Type to a JSON object.
|
* Transforms this Shared Type to a JSON object.
|
||||||
*
|
*
|
||||||
@@ -232,7 +178,7 @@ export class YArray extends AbstractType {
|
|||||||
* Returns an Array with the result of calling a provided function on every
|
* Returns an Array with the result of calling a provided function on every
|
||||||
* element of this YArray.
|
* element of this YArray.
|
||||||
*
|
*
|
||||||
* @template M
|
* @template T,M
|
||||||
* @param {function(T,number,YArray<T>):M} f Function that produces an element of the new Array
|
* @param {function(T,number,YArray<T>):M} f Function that produces an element of the new Array
|
||||||
* @return {Array<M>} A new array with each element being the result of the
|
* @return {Array<M>} A new array with each element being the result of the
|
||||||
* callback function
|
* callback function
|
||||||
@@ -242,7 +188,7 @@ export class YArray extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a provided function once on every element of this YArray.
|
* Executes a provided function on once on overy element of this YArray.
|
||||||
*
|
*
|
||||||
* @param {function(T,number,YArray<T>):void} f A function to execute on every element of this YArray.
|
* @param {function(T,number,YArray<T>):void} f A function to execute on every element of this YArray.
|
||||||
*/
|
*/
|
||||||
@@ -258,17 +204,17 @@ export class YArray extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YArrayRefID)
|
encoding.writeVarUint(encoder, YArrayRefID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} _decoder
|
* @param {decoding.Decoder} decoder
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readYArray = _decoder => new YArray()
|
export const readYArray = decoder => new YArray()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @module YMap
|
* @module YMap
|
||||||
*/
|
*/
|
||||||
@@ -13,15 +14,15 @@ import {
|
|||||||
YMapRefID,
|
YMapRefID,
|
||||||
callTypeObservers,
|
callTypeObservers,
|
||||||
transact,
|
transact,
|
||||||
warnPrematureAccess,
|
Doc, Transaction, Item // eslint-disable-line
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Transaction, Item // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as iterator from 'lib0/iterator'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js' // eslint-disable-line
|
||||||
|
import * as iterator from 'lib0/iterator.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template T
|
* @template T
|
||||||
* @extends YEvent<YMap<T>>
|
|
||||||
* Event that describes the changes on a YMap.
|
* Event that describes the changes on a YMap.
|
||||||
*/
|
*/
|
||||||
export class YMapEvent extends YEvent {
|
export class YMapEvent extends YEvent {
|
||||||
@@ -37,11 +38,11 @@ export class YMapEvent extends YEvent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template MapType
|
* @template T number|string|Object|Array|Uint8Array
|
||||||
* A shared Map implementation.
|
* A shared Map implementation.
|
||||||
*
|
*
|
||||||
* @extends AbstractType<YMapEvent<MapType>>
|
* @extends AbstractType<YMapEvent<T>>
|
||||||
* @implements {Iterable<[string, MapType]>}
|
* @implements {Iterable<T>}
|
||||||
*/
|
*/
|
||||||
export class YMap extends AbstractType {
|
export class YMap extends AbstractType {
|
||||||
/**
|
/**
|
||||||
@@ -75,37 +76,16 @@ export class YMap extends AbstractType {
|
|||||||
*/
|
*/
|
||||||
_integrate (y, item) {
|
_integrate (y, item) {
|
||||||
super._integrate(y, item)
|
super._integrate(y, item)
|
||||||
;/** @type {Map<string, any>} */ (this._prelimContent).forEach((value, key) => {
|
for (const [key, value] of /** @type {Map<string, any>} */ (this._prelimContent)) {
|
||||||
this.set(key, value)
|
this.set(key, value)
|
||||||
})
|
}
|
||||||
this._prelimContent = null
|
this._prelimContent = null
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {YMap<MapType>}
|
|
||||||
*/
|
|
||||||
_copy () {
|
_copy () {
|
||||||
return new YMap()
|
return new YMap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {YMap<MapType>}
|
|
||||||
*/
|
|
||||||
clone () {
|
|
||||||
/**
|
|
||||||
* @type {YMap<MapType>}
|
|
||||||
*/
|
|
||||||
const map = new YMap()
|
|
||||||
this.forEach((value, key) => {
|
|
||||||
map.set(key, value instanceof AbstractType ? /** @type {typeof value} */ (value.clone()) : value)
|
|
||||||
})
|
|
||||||
return map
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates YMapEvent and calls observers.
|
* Creates YMapEvent and calls observers.
|
||||||
*
|
*
|
||||||
@@ -119,20 +99,19 @@ export class YMap extends AbstractType {
|
|||||||
/**
|
/**
|
||||||
* Transforms this Shared Type to a JSON object.
|
* Transforms this Shared Type to a JSON object.
|
||||||
*
|
*
|
||||||
* @return {Object<string,any>}
|
* @return {Object<string,T>}
|
||||||
*/
|
*/
|
||||||
toJSON () {
|
toJSON () {
|
||||||
this.doc ?? warnPrematureAccess()
|
|
||||||
/**
|
/**
|
||||||
* @type {Object<string,MapType>}
|
* @type {Object<string,T>}
|
||||||
*/
|
*/
|
||||||
const map = {}
|
const map = {}
|
||||||
this._map.forEach((item, key) => {
|
for (const [key, item] of this._map) {
|
||||||
if (!item.deleted) {
|
if (!item.deleted) {
|
||||||
const v = item.content.getContent()[item.length - 1]
|
const v = item.content.getContent()[item.length - 1]
|
||||||
map[key] = v instanceof AbstractType ? v.toJSON() : v
|
map[key] = v instanceof AbstractType ? v.toJSON() : v
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
return map
|
return map
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,7 +121,7 @@ export class YMap extends AbstractType {
|
|||||||
* @return {number}
|
* @return {number}
|
||||||
*/
|
*/
|
||||||
get size () {
|
get size () {
|
||||||
return [...createMapIterator(this)].length
|
return [...createMapIterator(this._map)].length
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -151,45 +130,47 @@ export class YMap extends AbstractType {
|
|||||||
* @return {IterableIterator<string>}
|
* @return {IterableIterator<string>}
|
||||||
*/
|
*/
|
||||||
keys () {
|
keys () {
|
||||||
return iterator.iteratorMap(createMapIterator(this), /** @param {any} v */ v => v[0])
|
return iterator.iteratorMap(createMapIterator(this._map), /** @param {any} v */ v => v[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the values for each element in the YMap Type.
|
* Returns the keys for each element in the YMap Type.
|
||||||
*
|
*
|
||||||
* @return {IterableIterator<MapType>}
|
* @return {IterableIterator<string>}
|
||||||
*/
|
*/
|
||||||
values () {
|
values () {
|
||||||
return iterator.iteratorMap(createMapIterator(this), /** @param {any} v */ v => v[1].content.getContent()[v[1].length - 1])
|
return iterator.iteratorMap(createMapIterator(this._map), /** @param {any} v */ v => v[1].content.getContent()[v[1].length - 1])
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an Iterator of [key, value] pairs
|
* Returns an Iterator of [key, value] pairs
|
||||||
*
|
*
|
||||||
* @return {IterableIterator<[string, MapType]>}
|
* @return {IterableIterator<any>}
|
||||||
*/
|
*/
|
||||||
entries () {
|
entries () {
|
||||||
return iterator.iteratorMap(createMapIterator(this), /** @param {any} v */ v => /** @type {any} */ ([v[0], v[1].content.getContent()[v[1].length - 1]]))
|
return iterator.iteratorMap(createMapIterator(this._map), /** @param {any} v */ v => [v[0], v[1].content.getContent()[v[1].length - 1]])
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a provided function on once on every key-value pair.
|
* Executes a provided function on once on every key-value pair.
|
||||||
*
|
*
|
||||||
* @param {function(MapType,string,YMap<MapType>):void} f A function to execute on every element of this YArray.
|
* @param {function(T,string,YMap<T>):void} f A function to execute on every element of this YArray.
|
||||||
*/
|
*/
|
||||||
forEach (f) {
|
forEach (f) {
|
||||||
this.doc ?? warnPrematureAccess()
|
/**
|
||||||
this._map.forEach((item, key) => {
|
* @type {Object<string,T>}
|
||||||
|
*/
|
||||||
|
const map = {}
|
||||||
|
for (const [key, item] of this._map) {
|
||||||
if (!item.deleted) {
|
if (!item.deleted) {
|
||||||
f(item.content.getContent()[item.length - 1], key, this)
|
f(item.content.getContent()[item.length - 1], key, this)
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
return map
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns an Iterator of [key, value] pairs
|
* @return {IterableIterator<T>}
|
||||||
*
|
|
||||||
* @return {IterableIterator<[string, MapType]>}
|
|
||||||
*/
|
*/
|
||||||
[Symbol.iterator] () {
|
[Symbol.iterator] () {
|
||||||
return this.entries()
|
return this.entries()
|
||||||
@@ -212,16 +193,14 @@ export class YMap extends AbstractType {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds or updates an element with a specified key and value.
|
* Adds or updates an element with a specified key and value.
|
||||||
* @template {MapType} VAL
|
|
||||||
*
|
*
|
||||||
* @param {string} key The key of the element to add to this YMap
|
* @param {string} key The key of the element to add to this YMap
|
||||||
* @param {VAL} value The value of the element to add
|
* @param {T} value The value of the element to add
|
||||||
* @return {VAL}
|
|
||||||
*/
|
*/
|
||||||
set (key, value) {
|
set (key, value) {
|
||||||
if (this.doc !== null) {
|
if (this.doc !== null) {
|
||||||
transact(this.doc, transaction => {
|
transact(this.doc, transaction => {
|
||||||
typeMapSet(transaction, this, key, /** @type {any} */ (value))
|
typeMapSet(transaction, this, key, value)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
/** @type {Map<string, any>} */ (this._prelimContent).set(key, value)
|
/** @type {Map<string, any>} */ (this._prelimContent).set(key, value)
|
||||||
@@ -233,7 +212,7 @@ export class YMap extends AbstractType {
|
|||||||
* Returns a specified element from this YMap.
|
* Returns a specified element from this YMap.
|
||||||
*
|
*
|
||||||
* @param {string} key
|
* @param {string} key
|
||||||
* @return {MapType|undefined}
|
* @return {T|undefined}
|
||||||
*/
|
*/
|
||||||
get (key) {
|
get (key) {
|
||||||
return /** @type {any} */ (typeMapGet(this, key))
|
return /** @type {any} */ (typeMapGet(this, key))
|
||||||
@@ -250,32 +229,17 @@ export class YMap extends AbstractType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes all elements from this YMap.
|
* @param {encoding.Encoder} encoder
|
||||||
*/
|
|
||||||
clear () {
|
|
||||||
if (this.doc !== null) {
|
|
||||||
transact(this.doc, transaction => {
|
|
||||||
this.forEach(function (_value, key, map) {
|
|
||||||
typeMapDelete(transaction, map, key)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
/** @type {Map<string, any>} */ (this._prelimContent).clear()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YMapRefID)
|
encoding.writeVarUint(encoder, YMapRefID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} _decoder
|
* @param {decoding.Decoder} decoder
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readYMap = _decoder => new YMap()
|
export const readYMap = decoder => new YMap()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,31 +1,25 @@
|
|||||||
import * as object from 'lib0/object'
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
YXmlFragment,
|
YXmlFragment,
|
||||||
transact,
|
transact,
|
||||||
typeMapDelete,
|
typeMapDelete,
|
||||||
typeMapHas,
|
|
||||||
typeMapSet,
|
typeMapSet,
|
||||||
typeMapGet,
|
typeMapGet,
|
||||||
typeMapGetAll,
|
typeMapGetAll,
|
||||||
typeMapGetAllSnapshot,
|
|
||||||
typeListForEach,
|
typeListForEach,
|
||||||
YXmlElementRefID,
|
YXmlElementRefID,
|
||||||
Snapshot, YXmlText, ContentType, AbstractType, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, Item // eslint-disable-line
|
Snapshot, Doc, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
import * as encoding from 'lib0/encoding.js'
|
||||||
* @typedef {Object|number|null|Array<any>|string|Uint8Array|AbstractType<any>} ValueTypes
|
import * as decoding from 'lib0/decoding.js'
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An YXmlElement imitates the behavior of a
|
* An YXmlElement imitates the behavior of a
|
||||||
* https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element
|
* {@link https://developer.mozilla.org/en-US/docs/Web/API/Element|Dom Element}.
|
||||||
*
|
*
|
||||||
* * An YXmlElement has attributes (key value pairs)
|
* * An YXmlElement has attributes (key value pairs)
|
||||||
* * An YXmlElement has childElements that must inherit from YXmlElement
|
* * An YXmlElement has childElements that must inherit from YXmlElement
|
||||||
*
|
|
||||||
* @template {{ [key: string]: ValueTypes }} [KV={ [key: string]: string }]
|
|
||||||
*/
|
*/
|
||||||
export class YXmlElement extends YXmlFragment {
|
export class YXmlElement extends YXmlFragment {
|
||||||
constructor (nodeName = 'UNDEFINED') {
|
constructor (nodeName = 'UNDEFINED') {
|
||||||
@@ -37,22 +31,6 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
this._prelimAttrs = new Map()
|
this._prelimAttrs = new Map()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {YXmlElement|YXmlText|null}
|
|
||||||
*/
|
|
||||||
get nextSibling () {
|
|
||||||
const n = this._item ? this._item.next : null
|
|
||||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {YXmlElement|YXmlText|null}
|
|
||||||
*/
|
|
||||||
get prevSibling () {
|
|
||||||
const n = this._item ? this._item.prev : null
|
|
||||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Integrate this type into the Yjs instance.
|
* Integrate this type into the Yjs instance.
|
||||||
*
|
*
|
||||||
@@ -80,29 +58,6 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
return new YXmlElement(this.nodeName)
|
return new YXmlElement(this.nodeName)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {YXmlElement<KV>}
|
|
||||||
*/
|
|
||||||
clone () {
|
|
||||||
/**
|
|
||||||
* @type {YXmlElement<KV>}
|
|
||||||
*/
|
|
||||||
const el = new YXmlElement(this.nodeName)
|
|
||||||
const attrs = this.getAttributes()
|
|
||||||
object.forEach(attrs, (value, key) => {
|
|
||||||
if (typeof value === 'string') {
|
|
||||||
el.setAttribute(key, value)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
// @ts-ignore
|
|
||||||
el.insert(0, this.toArray().map(item => item instanceof AbstractType ? item.clone() : item))
|
|
||||||
return el
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the XML serialization of this YXmlElement.
|
* Returns the XML serialization of this YXmlElement.
|
||||||
* The attributes are ordered by attribute-name, so you can easily use this
|
* The attributes are ordered by attribute-name, so you can easily use this
|
||||||
@@ -133,7 +88,7 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
/**
|
/**
|
||||||
* Removes an attribute from this YXmlElement.
|
* Removes an attribute from this YXmlElement.
|
||||||
*
|
*
|
||||||
* @param {string} attributeName The attribute name that is to be removed.
|
* @param {String} attributeName The attribute name that is to be removed.
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
@@ -150,10 +105,8 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
/**
|
/**
|
||||||
* Sets or updates an attribute.
|
* Sets or updates an attribute.
|
||||||
*
|
*
|
||||||
* @template {keyof KV & string} KEY
|
* @param {String} attributeName The attribute name that is to be set.
|
||||||
*
|
* @param {String} attributeValue The attribute value that is to be set.
|
||||||
* @param {KEY} attributeName The attribute name that is to be set.
|
|
||||||
* @param {KV[KEY]} attributeValue The attribute value that is to be set.
|
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
@@ -170,11 +123,9 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
/**
|
/**
|
||||||
* Returns an attribute value that belongs to the attribute name.
|
* Returns an attribute value that belongs to the attribute name.
|
||||||
*
|
*
|
||||||
* @template {keyof KV & string} KEY
|
* @param {String} attributeName The attribute name that identifies the
|
||||||
*
|
|
||||||
* @param {KEY} attributeName The attribute name that identifies the
|
|
||||||
* queried value.
|
* queried value.
|
||||||
* @return {KV[KEY]|undefined} The queried attribute value.
|
* @return {String} The queried attribute value.
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
@@ -182,28 +133,16 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
return /** @type {any} */ (typeMapGet(this, attributeName))
|
return /** @type {any} */ (typeMapGet(this, attributeName))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns whether an attribute exists
|
|
||||||
*
|
|
||||||
* @param {string} attributeName The attribute name to check for existence.
|
|
||||||
* @return {boolean} whether the attribute exists.
|
|
||||||
*
|
|
||||||
* @public
|
|
||||||
*/
|
|
||||||
hasAttribute (attributeName) {
|
|
||||||
return /** @type {any} */ (typeMapHas(this, attributeName))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns all attribute name/value pairs in a JSON Object.
|
* Returns all attribute name/value pairs in a JSON Object.
|
||||||
*
|
*
|
||||||
* @param {Snapshot} [snapshot]
|
* @param {Snapshot} [snapshot]
|
||||||
* @return {{ [Key in Extract<keyof KV,string>]?: KV[Key]}} A JSON Object that describes the attributes.
|
* @return {Object<string, any>} A JSON Object that describes the attributes.
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
getAttributes (snapshot) {
|
getAttributes (snapshot) {
|
||||||
return /** @type {any} */ (snapshot ? typeMapGetAllSnapshot(this, snapshot) : typeMapGetAll(this))
|
return typeMapGetAll(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -225,10 +164,7 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
const dom = _document.createElement(this.nodeName)
|
const dom = _document.createElement(this.nodeName)
|
||||||
const attrs = this.getAttributes()
|
const attrs = this.getAttributes()
|
||||||
for (const key in attrs) {
|
for (const key in attrs) {
|
||||||
const value = attrs[key]
|
dom.setAttribute(key, attrs[key])
|
||||||
if (typeof value === 'string') {
|
|
||||||
dom.setAttribute(key, value)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
typeListForEach(this, yxml => {
|
typeListForEach(this, yxml => {
|
||||||
dom.appendChild(yxml.toDOM(_document, hooks, binding))
|
dom.appendChild(yxml.toDOM(_document, hooks, binding))
|
||||||
@@ -245,18 +181,18 @@ export class YXmlElement extends YXmlFragment {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
* @param {encoding.Encoder} encoder The encoder to write data to.
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlElementRefID)
|
encoding.writeVarUint(encoder, YXmlElementRefID)
|
||||||
encoder.writeKey(this.nodeName)
|
encoding.writeVarString(encoder, this.nodeName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {YXmlElement}
|
* @return {YXmlElement}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readYXmlElement = decoder => new YXmlElement(decoder.readKey())
|
export const readYXmlElement = decoder => new YXmlElement(decoding.readVarString(decoder))
|
||||||
|
|||||||
@@ -1,18 +1,18 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
YEvent,
|
YEvent,
|
||||||
YXmlText, YXmlElement, YXmlFragment, Transaction // eslint-disable-line
|
YXmlElement, YXmlFragment, Transaction // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @extends YEvent<YXmlElement|YXmlText|YXmlFragment>
|
|
||||||
* An Event that describes changes on a YXml Element or Yxml Fragment
|
* An Event that describes changes on a YXml Element or Yxml Fragment
|
||||||
*/
|
*/
|
||||||
export class YXmlEvent extends YEvent {
|
export class YXmlEvent extends YEvent {
|
||||||
/**
|
/**
|
||||||
* @param {YXmlElement|YXmlText|YXmlFragment} target The target on which the event is created.
|
* @param {YXmlElement|YXmlFragment} target The target on which the event is created.
|
||||||
* @param {Set<string|null>} subs The set of changed attributes. `null` is included if the
|
* @param {Set<string|null>} subs The set of changed attributes. `null` is included if the
|
||||||
* child list changed.
|
* child list changed.
|
||||||
* @param {Transaction} transaction The transaction instance with which the
|
* @param {Transaction} transaction The transaction instance with wich the
|
||||||
* change was created.
|
* change was created.
|
||||||
*/
|
*/
|
||||||
constructor (target, subs, transaction) {
|
constructor (target, subs, transaction) {
|
||||||
@@ -25,7 +25,7 @@ export class YXmlEvent extends YEvent {
|
|||||||
this.childListChanged = false
|
this.childListChanged = false
|
||||||
/**
|
/**
|
||||||
* Set of all changed attributes.
|
* Set of all changed attributes.
|
||||||
* @type {Set<string>}
|
* @type {Set<string|null>}
|
||||||
*/
|
*/
|
||||||
this.attributesChanged = new Set()
|
this.attributesChanged = new Set()
|
||||||
subs.forEach((sub) => {
|
subs.forEach((sub) => {
|
||||||
|
|||||||
@@ -9,20 +9,16 @@ import {
|
|||||||
typeListMap,
|
typeListMap,
|
||||||
typeListForEach,
|
typeListForEach,
|
||||||
typeListInsertGenerics,
|
typeListInsertGenerics,
|
||||||
typeListInsertGenericsAfter,
|
|
||||||
typeListDelete,
|
typeListDelete,
|
||||||
typeListToArray,
|
typeListToArray,
|
||||||
YXmlFragmentRefID,
|
YXmlFragmentRefID,
|
||||||
callTypeObservers,
|
callTypeObservers,
|
||||||
transact,
|
transact,
|
||||||
typeListGet,
|
Doc, ContentType, Transaction, Item, YXmlText, YXmlHook, Snapshot // eslint-disable-line
|
||||||
typeListSlice,
|
|
||||||
warnPrematureAccess,
|
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, Doc, ContentType, Transaction, Item, YXmlText, YXmlHook // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as error from 'lib0/error'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as array from 'lib0/array'
|
import * as decoding from 'lib0/decoding.js' // eslint-disable-line
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Define the elements to which a set of CSS queries apply.
|
* Define the elements to which a set of CSS queries apply.
|
||||||
@@ -67,7 +63,6 @@ export class YXmlTreeWalker {
|
|||||||
*/
|
*/
|
||||||
this._currentNode = /** @type {Item} */ (root._start)
|
this._currentNode = /** @type {Item} */ (root._start)
|
||||||
this._firstCall = true
|
this._firstCall = true
|
||||||
root.doc ?? warnPrematureAccess()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[Symbol.iterator] () {
|
[Symbol.iterator] () {
|
||||||
@@ -86,7 +81,7 @@ export class YXmlTreeWalker {
|
|||||||
* @type {Item|null}
|
* @type {Item|null}
|
||||||
*/
|
*/
|
||||||
let n = this._currentNode
|
let n = this._currentNode
|
||||||
let type = n && n.content && /** @type {any} */ (n.content).type
|
let type = /** @type {any} */ (n.content).type
|
||||||
if (n !== null && (!this._firstCall || n.deleted || !this._filter(type))) { // if first call, we check if we can use the first item
|
if (n !== null && (!this._firstCall || n.deleted || !this._filter(type))) { // if first call, we check if we can use the first item
|
||||||
do {
|
do {
|
||||||
type = /** @type {any} */ (n.content).type
|
type = /** @type {any} */ (n.content).type
|
||||||
@@ -96,12 +91,8 @@ export class YXmlTreeWalker {
|
|||||||
} else {
|
} else {
|
||||||
// walk right or up in the tree
|
// walk right or up in the tree
|
||||||
while (n !== null) {
|
while (n !== null) {
|
||||||
/**
|
if (n.right !== null) {
|
||||||
* @type {Item | null}
|
n = n.right
|
||||||
*/
|
|
||||||
const nxt = n.next
|
|
||||||
if (nxt !== null) {
|
|
||||||
n = nxt
|
|
||||||
break
|
break
|
||||||
} else if (n.parent === this._root) {
|
} else if (n.parent === this._root) {
|
||||||
n = null
|
n = null
|
||||||
@@ -140,14 +131,6 @@ export class YXmlFragment extends AbstractType {
|
|||||||
this._prelimContent = []
|
this._prelimContent = []
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {YXmlElement|YXmlText|null}
|
|
||||||
*/
|
|
||||||
get firstChild () {
|
|
||||||
const first = this._first
|
|
||||||
return first ? first.content.getContent()[0] : null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Integrate this type into the Yjs instance.
|
* Integrate this type into the Yjs instance.
|
||||||
*
|
*
|
||||||
@@ -168,22 +151,7 @@ export class YXmlFragment extends AbstractType {
|
|||||||
return new YXmlFragment()
|
return new YXmlFragment()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {YXmlFragment}
|
|
||||||
*/
|
|
||||||
clone () {
|
|
||||||
const el = new YXmlFragment()
|
|
||||||
// @ts-ignore
|
|
||||||
el.insert(0, this.toArray().map(item => item instanceof AbstractType ? item.clone() : item))
|
|
||||||
return el
|
|
||||||
}
|
|
||||||
|
|
||||||
get length () {
|
get length () {
|
||||||
this.doc ?? warnPrematureAccess()
|
|
||||||
return this._prelimContent === null ? this._length : this._prelimContent.length
|
return this._prelimContent === null ? this._length : this._prelimContent.length
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -249,7 +217,7 @@ export class YXmlFragment extends AbstractType {
|
|||||||
querySelectorAll (query) {
|
querySelectorAll (query) {
|
||||||
query = query.toUpperCase()
|
query = query.toUpperCase()
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
return array.from(new YXmlTreeWalker(this, element => element.nodeName && element.nodeName.toUpperCase() === query))
|
return Array.from(new YXmlTreeWalker(this, element => element.nodeName && element.nodeName.toUpperCase() === query))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -325,32 +293,6 @@ export class YXmlFragment extends AbstractType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Inserts new content at an index.
|
|
||||||
*
|
|
||||||
* @example
|
|
||||||
* // Insert character 'a' at position 0
|
|
||||||
* xml.insert(0, [new Y.XmlText('text')])
|
|
||||||
*
|
|
||||||
* @param {null|Item|YXmlElement|YXmlText} ref The index to insert content at
|
|
||||||
* @param {Array<YXmlElement|YXmlText>} content The array of content
|
|
||||||
*/
|
|
||||||
insertAfter (ref, content) {
|
|
||||||
if (this.doc !== null) {
|
|
||||||
transact(this.doc, transaction => {
|
|
||||||
const refItem = (ref && ref instanceof AbstractType) ? ref._item : ref
|
|
||||||
typeListInsertGenericsAfter(transaction, this, refItem, content)
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
const pc = /** @type {Array<any>} */ (this._prelimContent)
|
|
||||||
const index = ref === null ? 0 : pc.findIndex(el => el === ref) + 1
|
|
||||||
if (index === 0 && ref !== null) {
|
|
||||||
throw error.create('Reference item not found')
|
|
||||||
}
|
|
||||||
pc.splice(index, 0, ...content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes elements starting from an index.
|
* Deletes elements starting from an index.
|
||||||
*
|
*
|
||||||
@@ -377,73 +319,24 @@ export class YXmlFragment extends AbstractType {
|
|||||||
return typeListToArray(this)
|
return typeListToArray(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Appends content to this YArray.
|
|
||||||
*
|
|
||||||
* @param {Array<YXmlElement|YXmlText>} content Array of content to append.
|
|
||||||
*/
|
|
||||||
push (content) {
|
|
||||||
this.insert(this.length, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepends content to this YArray.
|
|
||||||
*
|
|
||||||
* @param {Array<YXmlElement|YXmlText>} content Array of content to prepend.
|
|
||||||
*/
|
|
||||||
unshift (content) {
|
|
||||||
this.insert(0, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the i-th element from a YArray.
|
|
||||||
*
|
|
||||||
* @param {number} index The index of the element to return from the YArray
|
|
||||||
* @return {YXmlElement|YXmlText}
|
|
||||||
*/
|
|
||||||
get (index) {
|
|
||||||
return typeListGet(this, index)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a portion of this YXmlFragment into a JavaScript Array selected
|
|
||||||
* from start to end (end not included).
|
|
||||||
*
|
|
||||||
* @param {number} [start]
|
|
||||||
* @param {number} [end]
|
|
||||||
* @return {Array<YXmlElement|YXmlText>}
|
|
||||||
*/
|
|
||||||
slice (start = 0, end = this.length) {
|
|
||||||
return typeListSlice(this, start, end)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Executes a provided function on once on every child element.
|
|
||||||
*
|
|
||||||
* @param {function(YXmlElement|YXmlText,number, typeof self):void} f A function to execute on every element of this YArray.
|
|
||||||
*/
|
|
||||||
forEach (f) {
|
|
||||||
typeListForEach(this, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Transform the properties of this type to binary and write it to an
|
* Transform the properties of this type to binary and write it to an
|
||||||
* BinaryEncoder.
|
* BinaryEncoder.
|
||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
* @param {encoding.Encoder} encoder The encoder to write data to.
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlFragmentRefID)
|
encoding.writeVarUint(encoder, YXmlFragmentRefID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} _decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {YXmlFragment}
|
* @return {YXmlFragment}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readYXmlFragment = _decoder => new YXmlFragment()
|
export const readYXmlFragment = decoder => new YXmlFragment()
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
YMap,
|
YMap,
|
||||||
YXmlHookRefID,
|
YXmlHookRefID
|
||||||
UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2 // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* You can manage binding to a custom type with YXmlHook.
|
* You can manage binding to a custom type with YXmlHook.
|
||||||
@@ -28,21 +30,6 @@ export class YXmlHook extends YMap {
|
|||||||
return new YXmlHook(this.hookName)
|
return new YXmlHook(this.hookName)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {YXmlHook}
|
|
||||||
*/
|
|
||||||
clone () {
|
|
||||||
const el = new YXmlHook(this.hookName)
|
|
||||||
this.forEach((value, key) => {
|
|
||||||
el.set(key, value)
|
|
||||||
})
|
|
||||||
return el
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a Dom Element that mirrors this YXmlElement.
|
* Creates a Dom Element that mirrors this YXmlElement.
|
||||||
*
|
*
|
||||||
@@ -79,20 +66,21 @@ export class YXmlHook extends YMap {
|
|||||||
*
|
*
|
||||||
* This is called when this Item is sent to a remote peer.
|
* This is called when this Item is sent to a remote peer.
|
||||||
*
|
*
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder The encoder to write data to.
|
* @param {encoding.Encoder} encoder The encoder to write data to.
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlHookRefID)
|
super._write(encoder)
|
||||||
encoder.writeKey(this.hookName)
|
encoding.writeVarUint(encoder, YXmlHookRefID)
|
||||||
|
encoding.writeVarString(encoder, this.hookName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {YXmlHook}
|
* @return {YXmlHook}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readYXmlHook = decoder =>
|
export const readYXmlHook = decoder =>
|
||||||
new YXmlHook(decoder.readKey())
|
new YXmlHook(decoding.readVarString(decoder))
|
||||||
|
|||||||
@@ -1,47 +1,18 @@
|
|||||||
import {
|
|
||||||
YText,
|
import { YText, YXmlTextRefID } from '../internals.js'
|
||||||
YXmlTextRefID,
|
|
||||||
ContentType, YXmlElement, UpdateDecoderV1, UpdateDecoderV2, UpdateEncoderV1, UpdateEncoderV2, // eslint-disable-line
|
import * as encoding from 'lib0/encoding.js'
|
||||||
} from '../internals.js'
|
import * as decoding from 'lib0/decoding.js' // eslint-disable-line
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents text in a Dom Element. In the future this type will also handle
|
* Represents text in a Dom Element. In the future this type will also handle
|
||||||
* simple formatting information like bold and italic.
|
* simple formatting information like bold and italic.
|
||||||
*/
|
*/
|
||||||
export class YXmlText extends YText {
|
export class YXmlText extends YText {
|
||||||
/**
|
|
||||||
* @type {YXmlElement|YXmlText|null}
|
|
||||||
*/
|
|
||||||
get nextSibling () {
|
|
||||||
const n = this._item ? this._item.next : null
|
|
||||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {YXmlElement|YXmlText|null}
|
|
||||||
*/
|
|
||||||
get prevSibling () {
|
|
||||||
const n = this._item ? this._item.prev : null
|
|
||||||
return n ? /** @type {YXmlElement|YXmlText} */ (/** @type {ContentType} */ (n.content).type) : null
|
|
||||||
}
|
|
||||||
|
|
||||||
_copy () {
|
_copy () {
|
||||||
return new YXmlText()
|
return new YXmlText()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a copy of this data type that can be included somewhere else.
|
|
||||||
*
|
|
||||||
* Note that the content is only readable _after_ it has been included somewhere in the Ydoc.
|
|
||||||
*
|
|
||||||
* @return {YXmlText}
|
|
||||||
*/
|
|
||||||
clone () {
|
|
||||||
const text = new YXmlText()
|
|
||||||
text.applyDelta(this.toDelta())
|
|
||||||
return text
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a Dom Element that mirrors this YXmlText.
|
* Creates a Dom Element that mirrors this YXmlText.
|
||||||
*
|
*
|
||||||
@@ -107,15 +78,15 @@ export class YXmlText extends YText {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
*/
|
*/
|
||||||
_write (encoder) {
|
_write (encoder) {
|
||||||
encoder.writeTypeRef(YXmlTextRefID)
|
encoding.writeVarUint(encoder, YXmlTextRefID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {YXmlText}
|
* @return {YXmlText}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
import { ObservableV2 } from 'lib0/observable'
|
|
||||||
|
|
||||||
import {
|
|
||||||
Doc // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is an abstract interface that all Connectors should implement to keep them interchangeable.
|
|
||||||
*
|
|
||||||
* @note This interface is experimental and it is not advised to actually inherit this class.
|
|
||||||
* It just serves as typing information.
|
|
||||||
*
|
|
||||||
* @extends {ObservableV2<any>}
|
|
||||||
*/
|
|
||||||
export class AbstractConnector extends ObservableV2 {
|
|
||||||
/**
|
|
||||||
* @param {Doc} ydoc
|
|
||||||
* @param {any} awareness
|
|
||||||
*/
|
|
||||||
constructor (ydoc, awareness) {
|
|
||||||
super()
|
|
||||||
this.doc = ydoc
|
|
||||||
this.awareness = awareness
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +1,18 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
findIndexSS,
|
findIndexSS,
|
||||||
getState,
|
getState,
|
||||||
splitItem,
|
splitItem,
|
||||||
|
createID,
|
||||||
iterateStructs,
|
iterateStructs,
|
||||||
UpdateEncoderV2,
|
Item, AbstractStruct, GC, StructStore, Transaction, ID // eslint-disable-line
|
||||||
DSDecoderV1, DSEncoderV1, DSDecoderV2, DSEncoderV2, Item, GC, StructStore, Transaction, ID // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as array from 'lib0/array'
|
import * as array from 'lib0/array.js'
|
||||||
import * as math from 'lib0/math'
|
import * as math from 'lib0/math.js'
|
||||||
import * as map from 'lib0/map'
|
import * as map from 'lib0/map.js'
|
||||||
import * as encoding from 'lib0/encoding'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as decoding from 'lib0/decoding'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
export class DeleteItem {
|
export class DeleteItem {
|
||||||
/**
|
/**
|
||||||
@@ -121,8 +122,8 @@ export const sortAndMergeDeleteSet = ds => {
|
|||||||
for (i = 1, j = 1; i < dels.length; i++) {
|
for (i = 1, j = 1; i < dels.length; i++) {
|
||||||
const left = dels[j - 1]
|
const left = dels[j - 1]
|
||||||
const right = dels[i]
|
const right = dels[i]
|
||||||
if (left.clock + left.len >= right.clock) {
|
if (left.clock + left.len === right.clock) {
|
||||||
left.len = math.max(left.len, right.clock + right.len - left.clock)
|
left.len += right.len
|
||||||
} else {
|
} else {
|
||||||
if (j < i) {
|
if (j < i) {
|
||||||
dels[j] = right
|
dels[j] = right
|
||||||
@@ -162,15 +163,14 @@ export const mergeDeleteSets = dss => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {DeleteSet} ds
|
* @param {DeleteSet} ds
|
||||||
* @param {number} client
|
* @param {ID} id
|
||||||
* @param {number} clock
|
|
||||||
* @param {number} length
|
* @param {number} length
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const addToDeleteSet = (ds, client, clock, length) => {
|
export const addToDeleteSet = (ds, id, length) => {
|
||||||
map.setIfUndefined(ds.clients, client, () => /** @type {Array<DeleteItem>} */ ([])).push(new DeleteItem(clock, length))
|
map.setIfUndefined(ds.clients, id.client, () => []).push(new DeleteItem(id.clock, length))
|
||||||
}
|
}
|
||||||
|
|
||||||
export const createDeleteSet = () => new DeleteSet()
|
export const createDeleteSet = () => new DeleteSet()
|
||||||
@@ -195,7 +195,7 @@ export const createDeleteSetFromStructStore = ss => {
|
|||||||
const clock = struct.id.clock
|
const clock = struct.id.clock
|
||||||
let len = struct.length
|
let len = struct.length
|
||||||
if (i + 1 < structs.length) {
|
if (i + 1 < structs.length) {
|
||||||
for (let next = structs[i + 1]; i + 1 < structs.length && next.deleted; next = structs[++i + 1]) {
|
for (let next = structs[i + 1]; i + 1 < structs.length && next.id.clock === clock + len && next.deleted; next = structs[++i + 1]) {
|
||||||
len += next.length
|
len += next.length
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -210,33 +210,28 @@ export const createDeleteSetFromStructStore = ss => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {DeleteSet} ds
|
* @param {DeleteSet} ds
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const writeDeleteSet = (encoder, ds) => {
|
export const writeDeleteSet = (encoder, ds) => {
|
||||||
encoding.writeVarUint(encoder.restEncoder, ds.clients.size)
|
encoding.writeVarUint(encoder, ds.clients.size)
|
||||||
|
ds.clients.forEach((dsitems, client) => {
|
||||||
// Ensure that the delete set is written in a deterministic order
|
encoding.writeVarUint(encoder, client)
|
||||||
array.from(ds.clients.entries())
|
const len = dsitems.length
|
||||||
.sort((a, b) => b[0] - a[0])
|
encoding.writeVarUint(encoder, len)
|
||||||
.forEach(([client, dsitems]) => {
|
for (let i = 0; i < len; i++) {
|
||||||
encoder.resetDsCurVal()
|
const item = dsitems[i]
|
||||||
encoding.writeVarUint(encoder.restEncoder, client)
|
encoding.writeVarUint(encoder, item.clock)
|
||||||
const len = dsitems.length
|
encoding.writeVarUint(encoder, item.len)
|
||||||
encoding.writeVarUint(encoder.restEncoder, len)
|
}
|
||||||
for (let i = 0; i < len; i++) {
|
})
|
||||||
const item = dsitems[i]
|
|
||||||
encoder.writeDsClock(item.clock)
|
|
||||||
encoder.writeDsLen(item.len)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {DeleteSet}
|
* @return {DeleteSet}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
@@ -244,49 +239,39 @@ export const writeDeleteSet = (encoder, ds) => {
|
|||||||
*/
|
*/
|
||||||
export const readDeleteSet = decoder => {
|
export const readDeleteSet = decoder => {
|
||||||
const ds = new DeleteSet()
|
const ds = new DeleteSet()
|
||||||
const numClients = decoding.readVarUint(decoder.restDecoder)
|
const numClients = decoding.readVarUint(decoder)
|
||||||
for (let i = 0; i < numClients; i++) {
|
for (let i = 0; i < numClients; i++) {
|
||||||
decoder.resetDsCurVal()
|
const client = decoding.readVarUint(decoder)
|
||||||
const client = decoding.readVarUint(decoder.restDecoder)
|
const numberOfDeletes = decoding.readVarUint(decoder)
|
||||||
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
|
for (let i = 0; i < numberOfDeletes; i++) {
|
||||||
if (numberOfDeletes > 0) {
|
addToDeleteSet(ds, createID(client, decoding.readVarUint(decoder)), decoding.readVarUint(decoder))
|
||||||
const dsField = map.setIfUndefined(ds.clients, client, () => /** @type {Array<DeleteItem>} */ ([]))
|
|
||||||
for (let i = 0; i < numberOfDeletes; i++) {
|
|
||||||
dsField.push(new DeleteItem(decoder.readDsClock(), decoder.readDsLen()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ds
|
return ds
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @todo YDecoder also contains references to String and other Decoders. Would make sense to exchange YDecoder.toUint8Array for YDecoder.DsToUint8Array()..
|
* @param {decoding.Decoder} decoder
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {StructStore} store
|
* @param {StructStore} store
|
||||||
* @return {Uint8Array|null} Returns a v2 update containing all deletes that couldn't be applied yet; or null if all deletes were applied successfully.
|
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
export const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
||||||
const unappliedDS = new DeleteSet()
|
const unappliedDS = new DeleteSet()
|
||||||
const numClients = decoding.readVarUint(decoder.restDecoder)
|
const numClients = decoding.readVarUint(decoder)
|
||||||
for (let i = 0; i < numClients; i++) {
|
for (let i = 0; i < numClients; i++) {
|
||||||
decoder.resetDsCurVal()
|
const client = decoding.readVarUint(decoder)
|
||||||
const client = decoding.readVarUint(decoder.restDecoder)
|
const numberOfDeletes = decoding.readVarUint(decoder)
|
||||||
const numberOfDeletes = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
const structs = store.clients.get(client) || []
|
const structs = store.clients.get(client) || []
|
||||||
const state = getState(store, client)
|
const state = getState(store, client)
|
||||||
for (let i = 0; i < numberOfDeletes; i++) {
|
for (let i = 0; i < numberOfDeletes; i++) {
|
||||||
const clock = decoder.readDsClock()
|
const clock = decoding.readVarUint(decoder)
|
||||||
const clockEnd = clock + decoder.readDsLen()
|
const len = decoding.readVarUint(decoder)
|
||||||
if (clock < state) {
|
if (clock < state) {
|
||||||
if (state < clockEnd) {
|
if (state < clock + len) {
|
||||||
addToDeleteSet(unappliedDS, client, state, clockEnd - state)
|
addToDeleteSet(unappliedDS, createID(client, state), clock + len - state)
|
||||||
}
|
}
|
||||||
let index = findIndexSS(structs, clock)
|
let index = findIndexSS(structs, clock)
|
||||||
/**
|
/**
|
||||||
@@ -303,10 +288,10 @@ export const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
|||||||
while (index < structs.length) {
|
while (index < structs.length) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
struct = structs[index++]
|
struct = structs[index++]
|
||||||
if (struct.id.clock < clockEnd) {
|
if (struct.id.clock < clock + len) {
|
||||||
if (!struct.deleted) {
|
if (!struct.deleted) {
|
||||||
if (clockEnd < struct.id.clock + struct.length) {
|
if (clock + len < struct.id.clock + struct.length) {
|
||||||
structs.splice(index, 0, splitItem(transaction, struct, clockEnd - struct.id.clock))
|
structs.splice(index, 0, splitItem(transaction, struct, clock + len - struct.id.clock))
|
||||||
}
|
}
|
||||||
struct.delete(transaction)
|
struct.delete(transaction)
|
||||||
}
|
}
|
||||||
@@ -315,35 +300,14 @@ export const readAndApplyDeleteSet = (decoder, transaction, store) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
addToDeleteSet(unappliedDS, client, clock, clockEnd - clock)
|
addToDeleteSet(unappliedDS, createID(client, clock), len)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (unappliedDS.clients.size > 0) {
|
if (unappliedDS.clients.size > 0) {
|
||||||
const ds = new UpdateEncoderV2()
|
// TODO: no need for encoding+decoding ds anymore
|
||||||
encoding.writeVarUint(ds.restEncoder, 0) // encode 0 structs
|
const unappliedDSEncoder = encoding.createEncoder()
|
||||||
writeDeleteSet(ds, unappliedDS)
|
writeDeleteSet(unappliedDSEncoder, unappliedDS)
|
||||||
return ds.toUint8Array()
|
store.pendingDeleteReaders.push(decoding.createDecoder(encoding.toUint8Array(unappliedDSEncoder)))
|
||||||
}
|
}
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {DeleteSet} ds1
|
|
||||||
* @param {DeleteSet} ds2
|
|
||||||
*/
|
|
||||||
export const equalDeleteSets = (ds1, ds2) => {
|
|
||||||
if (ds1.clients.size !== ds2.clients.size) return false
|
|
||||||
for (const [client, deleteItems1] of ds1.clients.entries()) {
|
|
||||||
const deleteItems2 = /** @type {Array<import('../internals.js').DeleteItem>} */ (ds2.clients.get(client))
|
|
||||||
if (deleteItems2 === undefined || deleteItems1.length !== deleteItems2.length) return false
|
|
||||||
for (let i = 0; i < deleteItems1.length; i++) {
|
|
||||||
const di1 = deleteItems1[i]
|
|
||||||
const di2 = deleteItems2[i]
|
|
||||||
if (di1.clock !== di2.clock || di1.len !== di2.len) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|||||||
272
src/utils/Doc.js
272
src/utils/Doc.js
@@ -8,64 +8,34 @@ import {
|
|||||||
YArray,
|
YArray,
|
||||||
YText,
|
YText,
|
||||||
YMap,
|
YMap,
|
||||||
YXmlElement,
|
|
||||||
YXmlFragment,
|
YXmlFragment,
|
||||||
transact,
|
transact,
|
||||||
ContentDoc, Item, Transaction, YEvent // eslint-disable-line
|
Item, Transaction, YEvent // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import { ObservableV2 } from 'lib0/observable'
|
import { Observable } from 'lib0/observable.js'
|
||||||
import * as random from 'lib0/random'
|
import * as random from 'lib0/random.js'
|
||||||
import * as map from 'lib0/map'
|
import * as map from 'lib0/map.js'
|
||||||
import * as array from 'lib0/array'
|
|
||||||
import * as promise from 'lib0/promise'
|
|
||||||
|
|
||||||
export const generateNewClientId = random.uint32
|
export const generateNewClientId = random.uint32
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object} DocOpts
|
|
||||||
* @property {boolean} [DocOpts.gc=true] Disable garbage collection (default: gc=true)
|
|
||||||
* @property {function(Item):boolean} [DocOpts.gcFilter] Will be called before an Item is garbage collected. Return false to keep the Item.
|
|
||||||
* @property {string} [DocOpts.guid] Define a globally unique identifier for this document
|
|
||||||
* @property {string | null} [DocOpts.collectionid] Associate this document with a collection. This only plays a role if your provider has a concept of collection.
|
|
||||||
* @property {any} [DocOpts.meta] Any kind of meta information you want to associate with this document. If this is a subdocument, remote peers will store the meta information as well.
|
|
||||||
* @property {boolean} [DocOpts.autoLoad] If a subdocument, automatically load document. If this is a subdocument, remote peers will load the document as well automatically.
|
|
||||||
* @property {boolean} [DocOpts.shouldLoad] Whether the document should be synced by the provider now. This is toggled to true when you call ydoc.load()
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object} DocEvents
|
|
||||||
* @property {function(Doc):void} DocEvents.destroy
|
|
||||||
* @property {function(Doc):void} DocEvents.load
|
|
||||||
* @property {function(boolean, Doc):void} DocEvents.sync
|
|
||||||
* @property {function(Uint8Array, any, Doc, Transaction):void} DocEvents.update
|
|
||||||
* @property {function(Uint8Array, any, Doc, Transaction):void} DocEvents.updateV2
|
|
||||||
* @property {function(Doc):void} DocEvents.beforeAllTransactions
|
|
||||||
* @property {function(Transaction, Doc):void} DocEvents.beforeTransaction
|
|
||||||
* @property {function(Transaction, Doc):void} DocEvents.beforeObserverCalls
|
|
||||||
* @property {function(Transaction, Doc):void} DocEvents.afterTransaction
|
|
||||||
* @property {function(Transaction, Doc):void} DocEvents.afterTransactionCleanup
|
|
||||||
* @property {function(Doc, Array<Transaction>):void} DocEvents.afterAllTransactions
|
|
||||||
* @property {function({ loaded: Set<Doc>, added: Set<Doc>, removed: Set<Doc> }, Doc, Transaction):void} DocEvents.subdocs
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Yjs instance handles the state of shared data.
|
* A Yjs instance handles the state of shared data.
|
||||||
* @extends ObservableV2<DocEvents>
|
* @extends Observable<string>
|
||||||
*/
|
*/
|
||||||
export class Doc extends ObservableV2 {
|
export class Doc extends Observable {
|
||||||
/**
|
/**
|
||||||
* @param {DocOpts} opts configuration
|
* @param {Object} conf configuration
|
||||||
|
* @param {boolean} [conf.gc] Disable garbage collection (default: gc=true)
|
||||||
|
* @param {function(Item):boolean} [conf.gcFilter] Will be called before an Item is garbage collected. Return false to keep the Item.
|
||||||
*/
|
*/
|
||||||
constructor ({ guid = random.uuidv4(), collectionid = null, gc = true, gcFilter = () => true, meta = null, autoLoad = false, shouldLoad = true } = {}) {
|
constructor ({ gc = true, gcFilter = () => true } = {}) {
|
||||||
super()
|
super()
|
||||||
this.gc = gc
|
this.gc = gc
|
||||||
this.gcFilter = gcFilter
|
this.gcFilter = gcFilter
|
||||||
this.clientID = generateNewClientId()
|
this.clientID = generateNewClientId()
|
||||||
this.guid = guid
|
|
||||||
this.collectionid = collectionid
|
|
||||||
/**
|
/**
|
||||||
* @type {Map<string, AbstractType<YEvent<any>>>}
|
* @type {Map<string, AbstractType<YEvent>>}
|
||||||
*/
|
*/
|
||||||
this.share = new Map()
|
this.share = new Map()
|
||||||
this.store = new StructStore()
|
this.store = new StructStore()
|
||||||
@@ -77,95 +47,6 @@ export class Doc extends ObservableV2 {
|
|||||||
* @type {Array<Transaction>}
|
* @type {Array<Transaction>}
|
||||||
*/
|
*/
|
||||||
this._transactionCleanups = []
|
this._transactionCleanups = []
|
||||||
/**
|
|
||||||
* @type {Set<Doc>}
|
|
||||||
*/
|
|
||||||
this.subdocs = new Set()
|
|
||||||
/**
|
|
||||||
* If this document is a subdocument - a document integrated into another document - then _item is defined.
|
|
||||||
* @type {Item?}
|
|
||||||
*/
|
|
||||||
this._item = null
|
|
||||||
this.shouldLoad = shouldLoad
|
|
||||||
this.autoLoad = autoLoad
|
|
||||||
this.meta = meta
|
|
||||||
/**
|
|
||||||
* This is set to true when the persistence provider loaded the document from the database or when the `sync` event fires.
|
|
||||||
* Note that not all providers implement this feature. Provider authors are encouraged to fire the `load` event when the doc content is loaded from the database.
|
|
||||||
*
|
|
||||||
* @type {boolean}
|
|
||||||
*/
|
|
||||||
this.isLoaded = false
|
|
||||||
/**
|
|
||||||
* This is set to true when the connection provider has successfully synced with a backend.
|
|
||||||
* Note that when using peer-to-peer providers this event may not provide very useful.
|
|
||||||
* Also note that not all providers implement this feature. Provider authors are encouraged to fire
|
|
||||||
* the `sync` event when the doc has been synced (with `true` as a parameter) or if connection is
|
|
||||||
* lost (with false as a parameter).
|
|
||||||
*/
|
|
||||||
this.isSynced = false
|
|
||||||
this.isDestroyed = false
|
|
||||||
/**
|
|
||||||
* Promise that resolves once the document has been loaded from a persistence provider.
|
|
||||||
*/
|
|
||||||
this.whenLoaded = promise.create(resolve => {
|
|
||||||
this.on('load', () => {
|
|
||||||
this.isLoaded = true
|
|
||||||
resolve(this)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
const provideSyncedPromise = () => promise.create(resolve => {
|
|
||||||
/**
|
|
||||||
* @param {boolean} isSynced
|
|
||||||
*/
|
|
||||||
const eventHandler = (isSynced) => {
|
|
||||||
if (isSynced === undefined || isSynced === true) {
|
|
||||||
this.off('sync', eventHandler)
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.on('sync', eventHandler)
|
|
||||||
})
|
|
||||||
this.on('sync', isSynced => {
|
|
||||||
if (isSynced === false && this.isSynced) {
|
|
||||||
this.whenSynced = provideSyncedPromise()
|
|
||||||
}
|
|
||||||
this.isSynced = isSynced === undefined || isSynced === true
|
|
||||||
if (this.isSynced && !this.isLoaded) {
|
|
||||||
this.emit('load', [this])
|
|
||||||
}
|
|
||||||
})
|
|
||||||
/**
|
|
||||||
* Promise that resolves once the document has been synced with a backend.
|
|
||||||
* This promise is recreated when the connection is lost.
|
|
||||||
* Note the documentation about the `isSynced` property.
|
|
||||||
*/
|
|
||||||
this.whenSynced = provideSyncedPromise()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Notify the parent document that you request to load data into this subdocument (if it is a subdocument).
|
|
||||||
*
|
|
||||||
* `load()` might be used in the future to request any provider to load the most current data.
|
|
||||||
*
|
|
||||||
* It is safe to call `load()` multiple times.
|
|
||||||
*/
|
|
||||||
load () {
|
|
||||||
const item = this._item
|
|
||||||
if (item !== null && !this.shouldLoad) {
|
|
||||||
transact(/** @type {any} */ (item.parent).doc, transaction => {
|
|
||||||
transaction.subdocsLoaded.add(this)
|
|
||||||
}, null, true)
|
|
||||||
}
|
|
||||||
this.shouldLoad = true
|
|
||||||
}
|
|
||||||
|
|
||||||
getSubdocs () {
|
|
||||||
return this.subdocs
|
|
||||||
}
|
|
||||||
|
|
||||||
getSubdocGuids () {
|
|
||||||
return new Set(array.from(this.subdocs).map(doc => doc.guid))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -174,45 +55,42 @@ export class Doc extends ObservableV2 {
|
|||||||
* that happened inside of the transaction are sent as one message to the
|
* that happened inside of the transaction are sent as one message to the
|
||||||
* other peers.
|
* other peers.
|
||||||
*
|
*
|
||||||
* @template T
|
* @param {function(Transaction):void} f The function that should be executed as a transaction
|
||||||
* @param {function(Transaction):T} f The function that should be executed as a transaction
|
|
||||||
* @param {any} [origin] Origin of who started the transaction. Will be stored on transaction.origin
|
* @param {any} [origin] Origin of who started the transaction. Will be stored on transaction.origin
|
||||||
* @return T
|
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
transact (f, origin = null) {
|
transact (f, origin = null) {
|
||||||
return transact(this, f, origin)
|
transact(this, f, origin)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Define a shared data type.
|
* Get a shared data type by name. If it does not yet exist, define its type.
|
||||||
*
|
*
|
||||||
* Multiple calls of `ydoc.get(name, TypeConstructor)` yield the same result
|
* Multiple calls of `y.get(name, TypeConstructor)` yield the same result
|
||||||
* and do not overwrite each other. I.e.
|
* and do not overwrite each other, i.e.
|
||||||
* `ydoc.get(name, Y.Array) === ydoc.get(name, Y.Array)`
|
* `y.get(name, Y.Array) === y.get(name, Y.Array)`
|
||||||
*
|
*
|
||||||
* After this method is called, the type is also available on `ydoc.share.get(name)`.
|
* After this method is called, the type is also available on `y.share.get(name)`.
|
||||||
*
|
*
|
||||||
* *Best Practices:*
|
* *Best Practices:*
|
||||||
* Define all types right after the Y.Doc instance is created and store them in a separate object.
|
* Define all types right after the Yjs instance is created and store them in a separate object.
|
||||||
* Also use the typed methods `getText(name)`, `getArray(name)`, ..
|
* Also use the typed methods `getText(name)`, `getArray(name)`, `getMap(name)`, etc.
|
||||||
*
|
*
|
||||||
* @template {typeof AbstractType<any>} Type
|
|
||||||
* @example
|
* @example
|
||||||
* const ydoc = new Y.Doc(..)
|
* const y = new Y(..)
|
||||||
* const appState = {
|
* const appState = {
|
||||||
* document: ydoc.getText('document')
|
* document: y.getText('document')
|
||||||
* comments: ydoc.getArray('comments')
|
* comments: y.getArray('comments')
|
||||||
* }
|
* }
|
||||||
*
|
*
|
||||||
* @param {string} name
|
* @param {string} name
|
||||||
* @param {Type} TypeConstructor The constructor of the type definition. E.g. Y.Text, Y.Array, Y.Map, ...
|
* @param {Function} TypeConstructor The constructor of the type definition. E.g. Y.Text, Y.Array, Y.Map, ...
|
||||||
* @return {InstanceType<Type>} The created type. Constructed with TypeConstructor
|
* @return {AbstractType<any>} The created type. Constructed with TypeConstructor
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
get (name, TypeConstructor = /** @type {any} */ (AbstractType)) {
|
get (name, TypeConstructor = AbstractType) {
|
||||||
const type = map.setIfUndefined(this.share, name, () => {
|
const type = map.setIfUndefined(this.share, name, () => {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const t = new TypeConstructor()
|
const t = new TypeConstructor()
|
||||||
@@ -238,110 +116,80 @@ export class Doc extends ObservableV2 {
|
|||||||
t._length = type._length
|
t._length = type._length
|
||||||
this.share.set(name, t)
|
this.share.set(name, t)
|
||||||
t._integrate(this, null)
|
t._integrate(this, null)
|
||||||
return /** @type {InstanceType<Type>} */ (t)
|
return t
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`Type with the name ${name} has already been defined with a different constructor`)
|
throw new Error(`Type with the name ${name} has already been defined with a different constructor`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return /** @type {InstanceType<Type>} */ (type)
|
return type
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template T
|
* @template T
|
||||||
* @param {string} [name]
|
* @param {string} name
|
||||||
* @return {YArray<T>}
|
* @return {YArray<T>}
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
getArray (name = '') {
|
getArray (name) {
|
||||||
return /** @type {YArray<T>} */ (this.get(name, YArray))
|
// @ts-ignore
|
||||||
|
return this.get(name, YArray)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} [name]
|
* @param {string} name
|
||||||
* @return {YText}
|
* @return {YText}
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
getText (name = '') {
|
getText (name) {
|
||||||
|
// @ts-ignore
|
||||||
return this.get(name, YText)
|
return this.get(name, YText)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template T
|
* @param {string} name
|
||||||
* @param {string} [name]
|
* @return {YMap<any>}
|
||||||
* @return {YMap<T>}
|
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
getMap (name = '') {
|
getMap (name) {
|
||||||
return /** @type {YMap<T>} */ (this.get(name, YMap))
|
// @ts-ignore
|
||||||
|
return this.get(name, YMap)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} [name]
|
* @param {string} name
|
||||||
* @return {YXmlElement}
|
|
||||||
*
|
|
||||||
* @public
|
|
||||||
*/
|
|
||||||
getXmlElement (name = '') {
|
|
||||||
return /** @type {YXmlElement<{[key:string]:string}>} */ (this.get(name, YXmlElement))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} [name]
|
|
||||||
* @return {YXmlFragment}
|
* @return {YXmlFragment}
|
||||||
*
|
*
|
||||||
* @public
|
* @public
|
||||||
*/
|
*/
|
||||||
getXmlFragment (name = '') {
|
getXmlFragment (name) {
|
||||||
|
// @ts-ignore
|
||||||
return this.get(name, YXmlFragment)
|
return this.get(name, YXmlFragment)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts the entire document into a js object, recursively traversing each yjs type
|
|
||||||
* Doesn't log types that have not been defined (using ydoc.getType(..)).
|
|
||||||
*
|
|
||||||
* @deprecated Do not use this method and rather call toJSON directly on the shared types.
|
|
||||||
*
|
|
||||||
* @return {Object<string, any>}
|
|
||||||
*/
|
|
||||||
toJSON () {
|
|
||||||
/**
|
|
||||||
* @type {Object<string, any>}
|
|
||||||
*/
|
|
||||||
const doc = {}
|
|
||||||
|
|
||||||
this.share.forEach((value, key) => {
|
|
||||||
doc[key] = value.toJSON()
|
|
||||||
})
|
|
||||||
|
|
||||||
return doc
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Emit `destroy` event and unregister all event handlers.
|
* Emit `destroy` event and unregister all event handlers.
|
||||||
*/
|
*/
|
||||||
destroy () {
|
destroy () {
|
||||||
this.isDestroyed = true
|
this.emit('destroyed', [true])
|
||||||
array.from(this.subdocs).forEach(subdoc => subdoc.destroy())
|
|
||||||
const item = this._item
|
|
||||||
if (item !== null) {
|
|
||||||
this._item = null
|
|
||||||
const content = /** @type {ContentDoc} */ (item.content)
|
|
||||||
content.doc = new Doc({ guid: this.guid, ...content.opts, shouldLoad: false })
|
|
||||||
content.doc._item = item
|
|
||||||
transact(/** @type {any} */ (item).parent.doc, transaction => {
|
|
||||||
const doc = content.doc
|
|
||||||
if (!item.deleted) {
|
|
||||||
transaction.subdocsAdded.add(doc)
|
|
||||||
}
|
|
||||||
transaction.subdocsRemoved.add(this)
|
|
||||||
}, null, true)
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
this.emit('destroyed', [true]) // DEPRECATED!
|
|
||||||
this.emit('destroy', [this])
|
|
||||||
super.destroy()
|
super.destroy()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} eventName
|
||||||
|
* @param {function} f
|
||||||
|
*/
|
||||||
|
on (eventName, f) {
|
||||||
|
super.on(eventName, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} eventName
|
||||||
|
* @param {function} f
|
||||||
|
*/
|
||||||
|
off (eventName, f) {
|
||||||
|
super.off(eventName, f)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import * as f from 'lib0/function'
|
import * as f from 'lib0/function.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* General event handler implementation.
|
* General event handler implementation.
|
||||||
@@ -51,12 +51,7 @@ export const addEventHandlerListener = (eventHandler, f) =>
|
|||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const removeEventHandlerListener = (eventHandler, f) => {
|
export const removeEventHandlerListener = (eventHandler, f) => {
|
||||||
const l = eventHandler.l
|
eventHandler.l = eventHandler.l.filter(g => f !== g)
|
||||||
const len = l.length
|
|
||||||
eventHandler.l = l.filter(g => f !== g)
|
|
||||||
if (len === eventHandler.l.length) {
|
|
||||||
console.error('[yjs] Tried to remove event handler that doesn\'t exist.')
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
|
|
||||||
import { AbstractType } from '../internals.js' // eslint-disable-line
|
import { AbstractType } from '../internals.js' // eslint-disable-line
|
||||||
|
|
||||||
import * as decoding from 'lib0/decoding'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
import * as encoding from 'lib0/encoding'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as error from 'lib0/error'
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
export class ID {
|
export class ID {
|
||||||
/**
|
/**
|
||||||
@@ -80,7 +81,7 @@ export const readID = decoder =>
|
|||||||
*/
|
*/
|
||||||
export const findRootTypeKey = type => {
|
export const findRootTypeKey = type => {
|
||||||
// @ts-ignore _y must be defined, otherwise unexpected case
|
// @ts-ignore _y must be defined, otherwise unexpected case
|
||||||
for (const [key, value] of type.doc.share.entries()) {
|
for (const [key, value] of type.doc.share) {
|
||||||
if (value === type) {
|
if (value === type) {
|
||||||
return key
|
return key
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
YArray,
|
YArray,
|
||||||
YMap,
|
YMap,
|
||||||
readDeleteSet,
|
readDeleteSet,
|
||||||
writeDeleteSet,
|
writeDeleteSet,
|
||||||
createDeleteSet,
|
createDeleteSet,
|
||||||
DSEncoderV1, DSDecoderV1, ID, DeleteSet, YArrayEvent, Transaction, Doc // eslint-disable-line
|
ID, DeleteSet, YArrayEvent, Transaction, Doc // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as decoding from 'lib0/decoding'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import { mergeDeleteSets, isDeleted } from './DeleteSet.js'
|
import { mergeDeleteSets, isDeleted } from './DeleteSet.js'
|
||||||
|
|
||||||
export class PermanentUserData {
|
export class PermanentUserData {
|
||||||
@@ -45,12 +46,12 @@ export class PermanentUserData {
|
|||||||
event.changes.added.forEach(item => {
|
event.changes.added.forEach(item => {
|
||||||
item.content.getContent().forEach(encodedDs => {
|
item.content.getContent().forEach(encodedDs => {
|
||||||
if (encodedDs instanceof Uint8Array) {
|
if (encodedDs instanceof Uint8Array) {
|
||||||
this.dss.set(userDescription, mergeDeleteSets([this.dss.get(userDescription) || createDeleteSet(), readDeleteSet(new DSDecoderV1(decoding.createDecoder(encodedDs)))]))
|
this.dss.set(userDescription, mergeDeleteSets([this.dss.get(userDescription) || createDeleteSet(), readDeleteSet(decoding.createDecoder(encodedDs))]))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
this.dss.set(userDescription, mergeDeleteSets(ds.map(encodedDs => readDeleteSet(new DSDecoderV1(decoding.createDecoder(encodedDs))))))
|
this.dss.set(userDescription, mergeDeleteSets(ds.map(encodedDs => readDeleteSet(decoding.createDecoder(encodedDs)))))
|
||||||
ids.observe(/** @param {YArrayEvent<any>} event */ event =>
|
ids.observe(/** @param {YArrayEvent<any>} event */ event =>
|
||||||
event.changes.added.forEach(item => item.content.getContent().forEach(addClientId))
|
event.changes.added.forEach(item => item.content.getContent().forEach(addClientId))
|
||||||
)
|
)
|
||||||
@@ -62,7 +63,7 @@ export class PermanentUserData {
|
|||||||
initUser(storeType.get(userDescription), userDescription)
|
initUser(storeType.get(userDescription), userDescription)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
// add initial data
|
// add intial data
|
||||||
storeType.forEach(initUser)
|
storeType.forEach(initUser)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -70,7 +71,7 @@ export class PermanentUserData {
|
|||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {number} clientid
|
* @param {number} clientid
|
||||||
* @param {string} userDescription
|
* @param {string} userDescription
|
||||||
* @param {Object} conf
|
* @param {Object} [conf]
|
||||||
* @param {function(Transaction, DeleteSet):boolean} [conf.filter]
|
* @param {function(Transaction, DeleteSet):boolean} [conf.filter]
|
||||||
*/
|
*/
|
||||||
setUserMapping (doc, clientid, userDescription, { filter = () => true } = {}) {
|
setUserMapping (doc, clientid, userDescription, { filter = () => true } = {}) {
|
||||||
@@ -83,7 +84,7 @@ export class PermanentUserData {
|
|||||||
users.set(userDescription, user)
|
users.set(userDescription, user)
|
||||||
}
|
}
|
||||||
user.get('ids').push([clientid])
|
user.get('ids').push([clientid])
|
||||||
users.observe(_event => {
|
users.observe(event => {
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
const userOverwrite = users.get(userDescription)
|
const userOverwrite = users.get(userDescription)
|
||||||
if (userOverwrite !== user) {
|
if (userOverwrite !== user) {
|
||||||
@@ -96,11 +97,11 @@ export class PermanentUserData {
|
|||||||
user.get('ids').push([clientid])
|
user.get('ids').push([clientid])
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
const encoder = new DSEncoderV1()
|
const encoder = encoding.createEncoder()
|
||||||
const ds = this.dss.get(userDescription)
|
const ds = this.dss.get(userDescription)
|
||||||
if (ds) {
|
if (ds) {
|
||||||
writeDeleteSet(encoder, ds)
|
writeDeleteSet(encoder, ds)
|
||||||
user.get('ds').push([encoder.toUint8Array()])
|
user.get('ds').push([encoding.toUint8Array(encoder)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, 0)
|
}, 0)
|
||||||
@@ -110,9 +111,9 @@ export class PermanentUserData {
|
|||||||
const yds = user.get('ds')
|
const yds = user.get('ds')
|
||||||
const ds = transaction.deleteSet
|
const ds = transaction.deleteSet
|
||||||
if (transaction.local && ds.clients.size > 0 && filter(transaction, ds)) {
|
if (transaction.local && ds.clients.size > 0 && filter(transaction, ds)) {
|
||||||
const encoder = new DSEncoderV1()
|
const encoder = encoding.createEncoder()
|
||||||
writeDeleteSet(encoder, ds)
|
writeDeleteSet(encoder, ds)
|
||||||
yds.push([encoder.toUint8Array()])
|
yds.push([encoding.toUint8Array(encoder)])
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@@ -131,7 +132,7 @@ export class PermanentUserData {
|
|||||||
* @return {string | null}
|
* @return {string | null}
|
||||||
*/
|
*/
|
||||||
getUserByDeletedId (id) {
|
getUserByDeletedId (id) {
|
||||||
for (const [userDescription, ds] of this.dss.entries()) {
|
for (const [userDescription, ds] of this.dss) {
|
||||||
if (isDeleted(ds, id)) {
|
if (isDeleted(ds, id)) {
|
||||||
return userDescription
|
return userDescription
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
writeID,
|
writeID,
|
||||||
readID,
|
readID,
|
||||||
@@ -8,13 +9,12 @@ import {
|
|||||||
createID,
|
createID,
|
||||||
ContentType,
|
ContentType,
|
||||||
followRedone,
|
followRedone,
|
||||||
getItem,
|
ID, Doc, AbstractType // eslint-disable-line
|
||||||
StructStore, ID, Doc, AbstractType, // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as encoding from 'lib0/encoding'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as decoding from 'lib0/decoding'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
import * as error from 'lib0/error'
|
import * as error from 'lib0/error.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A relative position is based on the Yjs model and is not affected by document changes.
|
* A relative position is based on the Yjs model and is not affected by document changes.
|
||||||
@@ -45,9 +45,8 @@ export class RelativePosition {
|
|||||||
* @param {ID|null} type
|
* @param {ID|null} type
|
||||||
* @param {string|null} tname
|
* @param {string|null} tname
|
||||||
* @param {ID|null} item
|
* @param {ID|null} item
|
||||||
* @param {number} assoc
|
|
||||||
*/
|
*/
|
||||||
constructor (type, tname, item, assoc = 0) {
|
constructor (type, tname, item) {
|
||||||
/**
|
/**
|
||||||
* @type {ID|null}
|
* @type {ID|null}
|
||||||
*/
|
*/
|
||||||
@@ -60,57 +59,23 @@ export class RelativePosition {
|
|||||||
* @type {ID | null}
|
* @type {ID | null}
|
||||||
*/
|
*/
|
||||||
this.item = item
|
this.item = item
|
||||||
/**
|
|
||||||
* A relative position is associated to a specific character. By default
|
|
||||||
* assoc >= 0, the relative position is associated to the character
|
|
||||||
* after the meant position.
|
|
||||||
* I.e. position 1 in 'ab' is associated to character 'b'.
|
|
||||||
*
|
|
||||||
* If assoc < 0, then the relative position is associated to the character
|
|
||||||
* before the meant position.
|
|
||||||
*
|
|
||||||
* @type {number}
|
|
||||||
*/
|
|
||||||
this.assoc = assoc
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {RelativePosition} rpos
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
export const relativePositionToJSON = rpos => {
|
|
||||||
const json = {}
|
|
||||||
if (rpos.type) {
|
|
||||||
json.type = rpos.type
|
|
||||||
}
|
|
||||||
if (rpos.tname) {
|
|
||||||
json.tname = rpos.tname
|
|
||||||
}
|
|
||||||
if (rpos.item) {
|
|
||||||
json.item = rpos.item
|
|
||||||
}
|
|
||||||
if (rpos.assoc != null) {
|
|
||||||
json.assoc = rpos.assoc
|
|
||||||
}
|
|
||||||
return json
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {any} json
|
* @param {any} json
|
||||||
* @return {RelativePosition}
|
* @return {RelativePosition}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const createRelativePositionFromJSON = json => new RelativePosition(json.type == null ? null : createID(json.type.client, json.type.clock), json.tname ?? null, json.item == null ? null : createID(json.item.client, json.item.clock), json.assoc == null ? 0 : json.assoc)
|
export const createRelativePositionFromJSON = json => new RelativePosition(json.type == null ? null : createID(json.type.client, json.type.clock), json.tname || null, json.item == null ? null : createID(json.item.client, json.item.clock))
|
||||||
|
|
||||||
export class AbsolutePosition {
|
export class AbsolutePosition {
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<any>} type
|
||||||
* @param {number} index
|
* @param {number} index
|
||||||
* @param {number} [assoc]
|
|
||||||
*/
|
*/
|
||||||
constructor (type, index, assoc = 0) {
|
constructor (type, index) {
|
||||||
/**
|
/**
|
||||||
* @type {AbstractType<any>}
|
* @type {AbstractType<any>}
|
||||||
*/
|
*/
|
||||||
@@ -119,27 +84,24 @@ export class AbsolutePosition {
|
|||||||
* @type {number}
|
* @type {number}
|
||||||
*/
|
*/
|
||||||
this.index = index
|
this.index = index
|
||||||
this.assoc = assoc
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<any>} type
|
||||||
* @param {number} index
|
* @param {number} index
|
||||||
* @param {number} [assoc]
|
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const createAbsolutePosition = (type, index, assoc = 0) => new AbsolutePosition(type, index, assoc)
|
export const createAbsolutePosition = (type, index) => new AbsolutePosition(type, index)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {AbstractType<any>} type
|
* @param {AbstractType<any>} type
|
||||||
* @param {ID|null} item
|
* @param {ID|null} item
|
||||||
* @param {number} [assoc]
|
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const createRelativePosition = (type, item, assoc) => {
|
export const createRelativePosition = (type, item) => {
|
||||||
let typeid = null
|
let typeid = null
|
||||||
let tname = null
|
let tname = null
|
||||||
if (type._item === null) {
|
if (type._item === null) {
|
||||||
@@ -147,7 +109,7 @@ export const createRelativePosition = (type, item, assoc) => {
|
|||||||
} else {
|
} else {
|
||||||
typeid = createID(type._item.id.client, type._item.id.clock)
|
typeid = createID(type._item.id.client, type._item.id.clock)
|
||||||
}
|
}
|
||||||
return new RelativePosition(typeid, tname, item, assoc)
|
return new RelativePosition(typeid, tname, item)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -155,35 +117,23 @@ export const createRelativePosition = (type, item, assoc) => {
|
|||||||
*
|
*
|
||||||
* @param {AbstractType<any>} type The base type (e.g. YText or YArray).
|
* @param {AbstractType<any>} type The base type (e.g. YText or YArray).
|
||||||
* @param {number} index The absolute position.
|
* @param {number} index The absolute position.
|
||||||
* @param {number} [assoc]
|
|
||||||
* @return {RelativePosition}
|
* @return {RelativePosition}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const createRelativePositionFromTypeIndex = (type, index, assoc = 0) => {
|
export const createRelativePositionFromTypeIndex = (type, index) => {
|
||||||
let t = type._start
|
let t = type._start
|
||||||
if (assoc < 0) {
|
|
||||||
// associated to the left character or the beginning of a type, increment index if possible.
|
|
||||||
if (index === 0) {
|
|
||||||
return createRelativePosition(type, null, assoc)
|
|
||||||
}
|
|
||||||
index--
|
|
||||||
}
|
|
||||||
while (t !== null) {
|
while (t !== null) {
|
||||||
if (!t.deleted && t.countable) {
|
if (!t.deleted && t.countable) {
|
||||||
if (t.length > index) {
|
if (t.length > index) {
|
||||||
// case 1: found position somewhere in the linked list
|
// case 1: found position somewhere in the linked list
|
||||||
return createRelativePosition(type, createID(t.id.client, t.id.clock + index), assoc)
|
return createRelativePosition(type, createID(t.id.client, t.id.clock + index))
|
||||||
}
|
}
|
||||||
index -= t.length
|
index -= t.length
|
||||||
}
|
}
|
||||||
if (t.right === null && assoc < 0) {
|
|
||||||
// left-associated position, return last available id
|
|
||||||
return createRelativePosition(type, t.lastId, assoc)
|
|
||||||
}
|
|
||||||
t = t.right
|
t = t.right
|
||||||
}
|
}
|
||||||
return createRelativePosition(type, null, assoc)
|
return createRelativePosition(type, null)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -193,7 +143,7 @@ export const createRelativePositionFromTypeIndex = (type, index, assoc = 0) => {
|
|||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const writeRelativePosition = (encoder, rpos) => {
|
export const writeRelativePosition = (encoder, rpos) => {
|
||||||
const { type, tname, item, assoc } = rpos
|
const { type, tname, item } = rpos
|
||||||
if (item !== null) {
|
if (item !== null) {
|
||||||
encoding.writeVarUint(encoder, 0)
|
encoding.writeVarUint(encoder, 0)
|
||||||
writeID(encoder, item)
|
writeID(encoder, item)
|
||||||
@@ -208,7 +158,6 @@ export const writeRelativePosition = (encoder, rpos) => {
|
|||||||
} else {
|
} else {
|
||||||
throw error.unexpectedCase()
|
throw error.unexpectedCase()
|
||||||
}
|
}
|
||||||
encoding.writeVarInt(encoder, assoc)
|
|
||||||
return encoder
|
return encoder
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -224,7 +173,7 @@ export const encodeRelativePosition = rpos => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {decoding.Decoder} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {RelativePosition}
|
* @return {RelativePosition|null}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
@@ -246,66 +195,41 @@ export const readRelativePosition = decoder => {
|
|||||||
type = readID(decoder)
|
type = readID(decoder)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const assoc = decoding.hasContent(decoder) ? decoding.readVarInt(decoder) : 0
|
return new RelativePosition(type, tname, itemID)
|
||||||
return new RelativePosition(type, tname, itemID, assoc)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Uint8Array} uint8Array
|
* @param {Uint8Array} uint8Array
|
||||||
* @return {RelativePosition}
|
* @return {RelativePosition|null}
|
||||||
*/
|
*/
|
||||||
export const decodeRelativePosition = uint8Array => readRelativePosition(decoding.createDecoder(uint8Array))
|
export const decodeRelativePosition = uint8Array => readRelativePosition(decoding.createDecoder(uint8Array))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {StructStore} store
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
const getItemWithOffset = (store, id) => {
|
|
||||||
const item = getItem(store, id)
|
|
||||||
const diff = id.clock - item.id.clock
|
|
||||||
return {
|
|
||||||
item, diff
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Transform a relative position to an absolute position.
|
|
||||||
*
|
|
||||||
* If you want to share the relative position with other users, you should set
|
|
||||||
* `followUndoneDeletions` to false to get consistent results across all clients.
|
|
||||||
*
|
|
||||||
* When calculating the absolute position, we try to follow the "undone deletions". This yields
|
|
||||||
* better results for the user who performed undo. However, only the user who performed the undo
|
|
||||||
* will get the better results, the other users don't know which operations recreated a deleted
|
|
||||||
* range of content. There is more information in this ticket: https://github.com/yjs/yjs/issues/638
|
|
||||||
*
|
|
||||||
* @param {RelativePosition} rpos
|
* @param {RelativePosition} rpos
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {boolean} followUndoneDeletions - whether to follow undone deletions - see https://github.com/yjs/yjs/issues/638
|
|
||||||
* @return {AbsolutePosition|null}
|
* @return {AbsolutePosition|null}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const createAbsolutePositionFromRelativePosition = (rpos, doc, followUndoneDeletions = true) => {
|
export const createAbsolutePositionFromRelativePosition = (rpos, doc) => {
|
||||||
const store = doc.store
|
const store = doc.store
|
||||||
const rightID = rpos.item
|
const rightID = rpos.item
|
||||||
const typeID = rpos.type
|
const typeID = rpos.type
|
||||||
const tname = rpos.tname
|
const tname = rpos.tname
|
||||||
const assoc = rpos.assoc
|
|
||||||
let type = null
|
let type = null
|
||||||
let index = 0
|
let index = 0
|
||||||
if (rightID !== null) {
|
if (rightID !== null) {
|
||||||
if (getState(store, rightID.client) <= rightID.clock) {
|
if (getState(store, rightID.client) <= rightID.clock) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
const res = followUndoneDeletions ? followRedone(store, rightID) : getItemWithOffset(store, rightID)
|
const res = followRedone(store, rightID)
|
||||||
const right = res.item
|
const right = res.item
|
||||||
if (!(right instanceof Item)) {
|
if (!(right instanceof Item)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
type = /** @type {AbstractType<any>} */ (right.parent)
|
type = /** @type {AbstractType<any>} */ (right.parent)
|
||||||
if (type._item === null || !type._item.deleted) {
|
if (type._item === null || !type._item.deleted) {
|
||||||
index = (right.deleted || !right.countable) ? 0 : (res.diff + (assoc >= 0 ? 0 : 1)) // adjust position based on left association if necessary
|
index = right.deleted || !right.countable ? 0 : res.diff
|
||||||
let n = right.left
|
let n = right.left
|
||||||
while (n !== null) {
|
while (n !== null) {
|
||||||
if (!n.deleted && n.countable) {
|
if (!n.deleted && n.countable) {
|
||||||
@@ -322,7 +246,7 @@ export const createAbsolutePositionFromRelativePosition = (rpos, doc, followUndo
|
|||||||
// type does not exist yet
|
// type does not exist yet
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
const { item } = followUndoneDeletions ? followRedone(store, typeID) : { item: getItem(store, typeID) }
|
const { item } = followRedone(store, typeID)
|
||||||
if (item instanceof Item && item.content instanceof ContentType) {
|
if (item instanceof Item && item.content instanceof ContentType) {
|
||||||
type = item.content.type
|
type = item.content.type
|
||||||
} else {
|
} else {
|
||||||
@@ -332,22 +256,17 @@ export const createAbsolutePositionFromRelativePosition = (rpos, doc, followUndo
|
|||||||
} else {
|
} else {
|
||||||
throw error.unexpectedCase()
|
throw error.unexpectedCase()
|
||||||
}
|
}
|
||||||
if (assoc >= 0) {
|
index = type._length
|
||||||
index = type._length
|
|
||||||
} else {
|
|
||||||
index = 0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return createAbsolutePosition(type, index, rpos.assoc)
|
return createAbsolutePosition(type, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {RelativePosition|null} a
|
* @param {RelativePosition|null} a
|
||||||
* @param {RelativePosition|null} b
|
* @param {RelativePosition|null} b
|
||||||
* @return {boolean}
|
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const compareRelativePositions = (a, b) => a === b || (
|
export const compareRelativePositions = (a, b) => a === b || (
|
||||||
a !== null && b !== null && a.tname === b.tname && compareIDs(a.item, b.item) && compareIDs(a.type, b.type) && a.assoc === b.assoc
|
a !== null && b !== null && a.tname === b.tname && compareIDs(a.item, b.item) && compareIDs(a.type, b.type)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
isDeleted,
|
isDeleted,
|
||||||
createDeleteSetFromStructStore,
|
createDeleteSetFromStructStore,
|
||||||
@@ -11,19 +12,13 @@ import {
|
|||||||
createDeleteSet,
|
createDeleteSet,
|
||||||
createID,
|
createID,
|
||||||
getState,
|
getState,
|
||||||
findIndexSS,
|
Transaction, Doc, DeleteSet, Item // eslint-disable-line
|
||||||
UpdateEncoderV2,
|
|
||||||
applyUpdateV2,
|
|
||||||
LazyStructReader,
|
|
||||||
equalDeleteSets,
|
|
||||||
UpdateDecoderV1, UpdateDecoderV2, DSEncoderV1, DSEncoderV2, DSDecoderV1, DSDecoderV2, Transaction, Doc, DeleteSet, Item, // eslint-disable-line
|
|
||||||
mergeDeleteSets
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as map from 'lib0/map'
|
import * as map from 'lib0/map.js'
|
||||||
import * as set from 'lib0/set'
|
import * as set from 'lib0/set.js'
|
||||||
import * as decoding from 'lib0/decoding'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as encoding from 'lib0/encoding'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
|
||||||
export class Snapshot {
|
export class Snapshot {
|
||||||
/**
|
/**
|
||||||
@@ -56,12 +51,12 @@ export const equalSnapshots = (snap1, snap2) => {
|
|||||||
if (sv1.size !== sv2.size || ds1.size !== ds2.size) {
|
if (sv1.size !== sv2.size || ds1.size !== ds2.size) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for (const [key, value] of sv1.entries()) {
|
for (const [key, value] of sv1) {
|
||||||
if (sv2.get(key) !== value) {
|
if (sv2.get(key) !== value) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (const [client, dsitems1] of ds1.entries()) {
|
for (const [client, dsitems1] of ds1) {
|
||||||
const dsitems2 = ds2.get(client) || []
|
const dsitems2 = ds2.get(client) || []
|
||||||
if (dsitems1.length !== dsitems2.length) {
|
if (dsitems1.length !== dsitems2.length) {
|
||||||
return false
|
return false
|
||||||
@@ -79,36 +74,24 @@ export const equalSnapshots = (snap1, snap2) => {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Snapshot} snapshot
|
* @param {Snapshot} snapshot
|
||||||
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*/
|
*/
|
||||||
export const encodeSnapshotV2 = (snapshot, encoder = new DSEncoderV2()) => {
|
export const encodeSnapshot = snapshot => {
|
||||||
|
const encoder = encoding.createEncoder()
|
||||||
writeDeleteSet(encoder, snapshot.ds)
|
writeDeleteSet(encoder, snapshot.ds)
|
||||||
writeStateVector(encoder, snapshot.sv)
|
writeStateVector(encoder, snapshot.sv)
|
||||||
return encoder.toUint8Array()
|
return encoding.toUint8Array(encoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Snapshot} snapshot
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
export const encodeSnapshot = snapshot => encodeSnapshotV2(snapshot, new DSEncoderV1())
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Uint8Array} buf
|
* @param {Uint8Array} buf
|
||||||
* @param {DSDecoderV1 | DSDecoderV2} [decoder]
|
|
||||||
* @return {Snapshot}
|
* @return {Snapshot}
|
||||||
*/
|
*/
|
||||||
export const decodeSnapshotV2 = (buf, decoder = new DSDecoderV2(decoding.createDecoder(buf))) => {
|
export const decodeSnapshot = buf => {
|
||||||
|
const decoder = decoding.createDecoder(buf)
|
||||||
return new Snapshot(readDeleteSet(decoder), readStateVector(decoder))
|
return new Snapshot(readDeleteSet(decoder), readStateVector(decoder))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} buf
|
|
||||||
* @return {Snapshot}
|
|
||||||
*/
|
|
||||||
export const decodeSnapshot = buf => decodeSnapshotV2(buf, new DSDecoderV1(decoding.createDecoder(buf)))
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {DeleteSet} ds
|
* @param {DeleteSet} ds
|
||||||
* @param {Map<number,number>} sm
|
* @param {Map<number,number>} sm
|
||||||
@@ -131,9 +114,9 @@ export const snapshot = doc => createSnapshot(createDeleteSetFromStructStore(doc
|
|||||||
* @protected
|
* @protected
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const isVisible = (item, snapshot) => snapshot === undefined
|
export const isVisible = (item, snapshot) => snapshot === undefined ? !item.deleted : (
|
||||||
? !item.deleted
|
snapshot.sv.has(item.id.client) && (snapshot.sv.get(item.id.client) || 0) > item.id.clock && !isDeleted(snapshot.ds, item.id)
|
||||||
: snapshot.sv.has(item.id.client) && (snapshot.sv.get(item.id.client) || 0) > item.id.clock && !isDeleted(snapshot.ds, item.id)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
@@ -149,88 +132,7 @@ export const splitSnapshotAffectedStructs = (transaction, snapshot) => {
|
|||||||
getItemCleanStart(transaction, createID(client, clock))
|
getItemCleanStart(transaction, createID(client, clock))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
iterateDeletedStructs(transaction, snapshot.ds, _item => {})
|
iterateDeletedStructs(transaction, snapshot.ds, item => {})
|
||||||
meta.add(snapshot)
|
meta.add(snapshot)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @example
|
|
||||||
* const ydoc = new Y.Doc({ gc: false })
|
|
||||||
* ydoc.getText().insert(0, 'world!')
|
|
||||||
* const snapshot = Y.snapshot(ydoc)
|
|
||||||
* ydoc.getText().insert(0, 'hello ')
|
|
||||||
* const restored = Y.createDocFromSnapshot(ydoc, snapshot)
|
|
||||||
* assert(restored.getText().toString() === 'world!')
|
|
||||||
*
|
|
||||||
* @param {Doc} originDoc
|
|
||||||
* @param {Snapshot} snapshot
|
|
||||||
* @param {Doc} [newDoc] Optionally, you may define the Yjs document that receives the data from originDoc
|
|
||||||
* @return {Doc}
|
|
||||||
*/
|
|
||||||
export const createDocFromSnapshot = (originDoc, snapshot, newDoc = new Doc()) => {
|
|
||||||
if (originDoc.gc) {
|
|
||||||
// we should not try to restore a GC-ed document, because some of the restored items might have their content deleted
|
|
||||||
throw new Error('Garbage-collection must be disabled in `originDoc`!')
|
|
||||||
}
|
|
||||||
const { sv, ds } = snapshot
|
|
||||||
|
|
||||||
const encoder = new UpdateEncoderV2()
|
|
||||||
originDoc.transact(transaction => {
|
|
||||||
let size = 0
|
|
||||||
sv.forEach(clock => {
|
|
||||||
if (clock > 0) {
|
|
||||||
size++
|
|
||||||
}
|
|
||||||
})
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, size)
|
|
||||||
// splitting the structs before writing them to the encoder
|
|
||||||
for (const [client, clock] of sv) {
|
|
||||||
if (clock === 0) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (clock < getState(originDoc.store, client)) {
|
|
||||||
getItemCleanStart(transaction, createID(client, clock))
|
|
||||||
}
|
|
||||||
const structs = originDoc.store.clients.get(client) || []
|
|
||||||
const lastStructIndex = findIndexSS(structs, clock - 1)
|
|
||||||
// write # encoded structs
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, lastStructIndex + 1)
|
|
||||||
encoder.writeClient(client)
|
|
||||||
// first clock written is 0
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, 0)
|
|
||||||
for (let i = 0; i <= lastStructIndex; i++) {
|
|
||||||
structs[i].write(encoder, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
writeDeleteSet(encoder, ds)
|
|
||||||
})
|
|
||||||
|
|
||||||
applyUpdateV2(newDoc, encoder.toUint8Array(), 'snapshot')
|
|
||||||
return newDoc
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Snapshot} snapshot
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
|
||||||
*/
|
|
||||||
export const snapshotContainsUpdateV2 = (snapshot, update, YDecoder = UpdateDecoderV2) => {
|
|
||||||
const structs = []
|
|
||||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
|
||||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
|
||||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
|
||||||
structs.push(curr)
|
|
||||||
if ((snapshot.sv.get(curr.id.client) || 0) < curr.id.clock + curr.length) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const mergedDS = mergeDeleteSets([snapshot.ds, readDeleteSet(updateDecoder)])
|
|
||||||
return equalDeleteSets(snapshot.ds, mergedDS)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Snapshot} snapshot
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
*/
|
|
||||||
export const snapshotContainsUpdate = (snapshot, update) => snapshotContainsUpdateV2(snapshot, update, UpdateDecoderV1)
|
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
GC,
|
GC,
|
||||||
splitItem,
|
splitItem,
|
||||||
Transaction, ID, Item, DSDecoderV2 // eslint-disable-line
|
AbstractStruct, Transaction, ID, Item // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as math from 'lib0/math'
|
import * as math from 'lib0/math.js'
|
||||||
import * as error from 'lib0/error'
|
import * as error from 'lib0/error.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js' // eslint-disable-line
|
||||||
|
|
||||||
export class StructStore {
|
export class StructStore {
|
||||||
constructor () {
|
constructor () {
|
||||||
@@ -14,13 +16,24 @@ export class StructStore {
|
|||||||
*/
|
*/
|
||||||
this.clients = new Map()
|
this.clients = new Map()
|
||||||
/**
|
/**
|
||||||
* @type {null | { missing: Map<number, number>, update: Uint8Array }}
|
* Store incompleted struct reads here
|
||||||
|
* `i` denotes to the next read operation
|
||||||
|
* We could shift the array of refs instead, but shift is incredible
|
||||||
|
* slow in Chrome for arrays with more than 100k elements
|
||||||
|
* @see tryResumePendingStructRefs
|
||||||
|
* @type {Map<number,{i:number,refs:Array<GC|Item>}>}
|
||||||
*/
|
*/
|
||||||
this.pendingStructs = null
|
this.pendingClientsStructRefs = new Map()
|
||||||
/**
|
/**
|
||||||
* @type {null | Uint8Array}
|
* Stack of pending structs waiting for struct dependencies
|
||||||
|
* Maximum length of stack is structReaders.size
|
||||||
|
* @type {Array<GC|Item>}
|
||||||
*/
|
*/
|
||||||
this.pendingDs = null
|
this.pendingStack = []
|
||||||
|
/**
|
||||||
|
* @type {Array<decoding.Decoder>}
|
||||||
|
*/
|
||||||
|
this.pendingDeleteReaders = []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,13 +79,13 @@ export const getState = (store, client) => {
|
|||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const integrityCheck = store => {
|
export const integretyCheck = store => {
|
||||||
store.clients.forEach(structs => {
|
store.clients.forEach(structs => {
|
||||||
for (let i = 1; i < structs.length; i++) {
|
for (let i = 1; i < structs.length; i++) {
|
||||||
const l = structs[i - 1]
|
const l = structs[i - 1]
|
||||||
const r = structs[i]
|
const r = structs[i]
|
||||||
if (l.id.clock + l.length !== r.id.clock) {
|
if (l.id.clock + l.length !== r.id.clock) {
|
||||||
throw new Error('StructStore failed integrity check')
|
throw new Error('StructStore failed integrety check')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
getState,
|
getState,
|
||||||
writeStructsFromTransaction,
|
writeStructsFromTransaction,
|
||||||
@@ -10,15 +11,15 @@ import {
|
|||||||
Item,
|
Item,
|
||||||
generateNewClientId,
|
generateNewClientId,
|
||||||
createID,
|
createID,
|
||||||
cleanupYTextAfterTransaction,
|
GC, StructStore, ID, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
|
||||||
UpdateEncoderV1, UpdateEncoderV2, GC, StructStore, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as map from 'lib0/map'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as math from 'lib0/math'
|
import * as map from 'lib0/map.js'
|
||||||
import * as set from 'lib0/set'
|
import * as math from 'lib0/math.js'
|
||||||
import * as logging from 'lib0/logging'
|
import * as set from 'lib0/set.js'
|
||||||
import { callAll } from 'lib0/function'
|
import * as logging from 'lib0/logging.js'
|
||||||
|
import { callAll } from 'lib0/function.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A transaction is created for every change on the Yjs model. It is possible
|
* A transaction is created for every change on the Yjs model. It is possible
|
||||||
@@ -28,8 +29,7 @@ import { callAll } from 'lib0/function'
|
|||||||
* possible. Here is an example to illustrate the advantages of bundling:
|
* possible. Here is an example to illustrate the advantages of bundling:
|
||||||
*
|
*
|
||||||
* @example
|
* @example
|
||||||
* const ydoc = new Y.Doc()
|
* const map = y.define('map', YMap)
|
||||||
* const map = ydoc.getMap('map')
|
|
||||||
* // Log content when change is triggered
|
* // Log content when change is triggered
|
||||||
* map.observe(() => {
|
* map.observe(() => {
|
||||||
* console.log('change triggered')
|
* console.log('change triggered')
|
||||||
@@ -38,7 +38,7 @@ import { callAll } from 'lib0/function'
|
|||||||
* map.set('a', 0) // => "change triggered"
|
* map.set('a', 0) // => "change triggered"
|
||||||
* map.set('b', 0) // => "change triggered"
|
* map.set('b', 0) // => "change triggered"
|
||||||
* // When put in a transaction, it will trigger the log after the transaction:
|
* // When put in a transaction, it will trigger the log after the transaction:
|
||||||
* ydoc.transact(() => {
|
* y.transact(() => {
|
||||||
* map.set('a', 1)
|
* map.set('a', 1)
|
||||||
* map.set('b', 1)
|
* map.set('b', 1)
|
||||||
* }) // => "change triggered"
|
* }) // => "change triggered"
|
||||||
@@ -76,13 +76,13 @@ export class Transaction {
|
|||||||
* All types that were directly modified (property added or child
|
* All types that were directly modified (property added or child
|
||||||
* inserted/deleted). New types are not included in this Set.
|
* inserted/deleted). New types are not included in this Set.
|
||||||
* Maps from type to parentSubs (`item.parentSub = null` for YArray)
|
* Maps from type to parentSubs (`item.parentSub = null` for YArray)
|
||||||
* @type {Map<AbstractType<YEvent<any>>,Set<String|null>>}
|
* @type {Map<AbstractType<YEvent>,Set<String|null>>}
|
||||||
*/
|
*/
|
||||||
this.changed = new Map()
|
this.changed = new Map()
|
||||||
/**
|
/**
|
||||||
* Stores the events for the types that observe also child elements.
|
* Stores the events for the types that observe also child elements.
|
||||||
* It is mainly used by `observeDeep`.
|
* It is mainly used by `observeDeep`.
|
||||||
* @type {Map<AbstractType<YEvent<any>>,Array<YEvent<any>>>}
|
* @type {Map<AbstractType<YEvent>,Array<YEvent>>}
|
||||||
*/
|
*/
|
||||||
this.changedParentTypes = new Map()
|
this.changedParentTypes = new Map()
|
||||||
/**
|
/**
|
||||||
@@ -103,38 +103,21 @@ export class Transaction {
|
|||||||
* @type {boolean}
|
* @type {boolean}
|
||||||
*/
|
*/
|
||||||
this.local = local
|
this.local = local
|
||||||
/**
|
|
||||||
* @type {Set<Doc>}
|
|
||||||
*/
|
|
||||||
this.subdocsAdded = new Set()
|
|
||||||
/**
|
|
||||||
* @type {Set<Doc>}
|
|
||||||
*/
|
|
||||||
this.subdocsRemoved = new Set()
|
|
||||||
/**
|
|
||||||
* @type {Set<Doc>}
|
|
||||||
*/
|
|
||||||
this.subdocsLoaded = new Set()
|
|
||||||
/**
|
|
||||||
* @type {boolean}
|
|
||||||
*/
|
|
||||||
this._needFormattingCleanup = false
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @return {boolean} Whether data was written.
|
|
||||||
*/
|
*/
|
||||||
export const writeUpdateMessageFromTransaction = (encoder, transaction) => {
|
export const computeUpdateMessageFromTransaction = transaction => {
|
||||||
if (transaction.deleteSet.clients.size === 0 && !map.any(transaction.afterState, (clock, client) => transaction.beforeState.get(client) !== clock)) {
|
if (transaction.deleteSet.clients.size === 0 && !map.any(transaction.afterState, (clock, client) => transaction.beforeState.get(client) !== clock)) {
|
||||||
return false
|
return null
|
||||||
}
|
}
|
||||||
|
const encoder = encoding.createEncoder()
|
||||||
sortAndMergeDeleteSet(transaction.deleteSet)
|
sortAndMergeDeleteSet(transaction.deleteSet)
|
||||||
writeStructsFromTransaction(encoder, transaction)
|
writeStructsFromTransaction(encoder, transaction)
|
||||||
writeDeleteSet(encoder, transaction.deleteSet)
|
writeDeleteSet(encoder, transaction.deleteSet)
|
||||||
return true
|
return encoder
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -153,7 +136,7 @@ export const nextID = transaction => {
|
|||||||
* did not change, it was just added and we should not fire events for `type`.
|
* did not change, it was just added and we should not fire events for `type`.
|
||||||
*
|
*
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {AbstractType<YEvent<any>>} type
|
* @param {AbstractType<YEvent>} type
|
||||||
* @param {string|null} parentSub
|
* @param {string|null} parentSub
|
||||||
*/
|
*/
|
||||||
export const addChangedTypeToTransaction = (transaction, type, parentSub) => {
|
export const addChangedTypeToTransaction = (transaction, type, parentSub) => {
|
||||||
@@ -166,29 +149,18 @@ export const addChangedTypeToTransaction = (transaction, type, parentSub) => {
|
|||||||
/**
|
/**
|
||||||
* @param {Array<AbstractStruct>} structs
|
* @param {Array<AbstractStruct>} structs
|
||||||
* @param {number} pos
|
* @param {number} pos
|
||||||
* @return {number} # of merged structs
|
|
||||||
*/
|
*/
|
||||||
const tryToMergeWithLefts = (structs, pos) => {
|
const tryToMergeWithLeft = (structs, pos) => {
|
||||||
let right = structs[pos]
|
const left = structs[pos - 1]
|
||||||
let left = structs[pos - 1]
|
const right = structs[pos]
|
||||||
let i = pos
|
if (left.deleted === right.deleted && left.constructor === right.constructor) {
|
||||||
for (; i > 0; right = left, left = structs[--i - 1]) {
|
if (left.mergeWith(right)) {
|
||||||
if (left.deleted === right.deleted && left.constructor === right.constructor) {
|
structs.splice(pos, 1)
|
||||||
if (left.mergeWith(right)) {
|
if (right instanceof Item && right.parentSub !== null && /** @type {AbstractType<any>} */ (right.parent)._map.get(right.parentSub) === right) {
|
||||||
if (right instanceof Item && right.parentSub !== null && /** @type {AbstractType<any>} */ (right.parent)._map.get(right.parentSub) === right) {
|
/** @type {AbstractType<any>} */ (right.parent)._map.set(right.parentSub, /** @type {Item} */ (left))
|
||||||
/** @type {AbstractType<any>} */ (right.parent)._map.set(right.parentSub, /** @type {Item} */ (left))
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break
|
|
||||||
}
|
}
|
||||||
const merged = pos - i
|
|
||||||
if (merged) {
|
|
||||||
// remove all merged structs from the array
|
|
||||||
structs.splice(pos + 1 - merged, merged)
|
|
||||||
}
|
|
||||||
return merged
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -197,7 +169,7 @@ const tryToMergeWithLefts = (structs, pos) => {
|
|||||||
* @param {function(Item):boolean} gcFilter
|
* @param {function(Item):boolean} gcFilter
|
||||||
*/
|
*/
|
||||||
const tryGcDeleteSet = (ds, store, gcFilter) => {
|
const tryGcDeleteSet = (ds, store, gcFilter) => {
|
||||||
for (const [client, deleteItems] of ds.clients.entries()) {
|
for (const [client, deleteItems] of ds.clients) {
|
||||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
||||||
const deleteItem = deleteItems[di]
|
const deleteItem = deleteItems[di]
|
||||||
@@ -225,8 +197,8 @@ const tryGcDeleteSet = (ds, store, gcFilter) => {
|
|||||||
*/
|
*/
|
||||||
const tryMergeDeleteSet = (ds, store) => {
|
const tryMergeDeleteSet = (ds, store) => {
|
||||||
// try to merge deleted / gc'd items
|
// try to merge deleted / gc'd items
|
||||||
// merge from right to left for better efficiency and so we don't miss any merge targets
|
// merge from right to left for better efficiecy and so we don't miss any merge targets
|
||||||
ds.clients.forEach((deleteItems, client) => {
|
for (const [client, deleteItems] of ds.clients) {
|
||||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
for (let di = deleteItems.length - 1; di >= 0; di--) {
|
||||||
const deleteItem = deleteItems[di]
|
const deleteItem = deleteItems[di]
|
||||||
@@ -235,12 +207,12 @@ const tryMergeDeleteSet = (ds, store) => {
|
|||||||
for (
|
for (
|
||||||
let si = mostRightIndexToCheck, struct = structs[si];
|
let si = mostRightIndexToCheck, struct = structs[si];
|
||||||
si > 0 && struct.id.clock >= deleteItem.clock;
|
si > 0 && struct.id.clock >= deleteItem.clock;
|
||||||
struct = structs[si]
|
struct = structs[--si]
|
||||||
) {
|
) {
|
||||||
si -= 1 + tryToMergeWithLefts(structs, si)
|
tryToMergeWithLeft(structs, si)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -267,6 +239,7 @@ const cleanupTransactions = (transactionCleanups, i) => {
|
|||||||
try {
|
try {
|
||||||
sortAndMergeDeleteSet(ds)
|
sortAndMergeDeleteSet(ds)
|
||||||
transaction.afterState = getStateVector(transaction.doc.store)
|
transaction.afterState = getStateVector(transaction.doc.store)
|
||||||
|
doc._transaction = null
|
||||||
doc.emit('beforeObserverCalls', [transaction, doc])
|
doc.emit('beforeObserverCalls', [transaction, doc])
|
||||||
/**
|
/**
|
||||||
* An array of event callbacks.
|
* An array of event callbacks.
|
||||||
@@ -286,34 +259,28 @@ const cleanupTransactions = (transactionCleanups, i) => {
|
|||||||
)
|
)
|
||||||
fs.push(() => {
|
fs.push(() => {
|
||||||
// deep observe events
|
// deep observe events
|
||||||
transaction.changedParentTypes.forEach((events, type) => {
|
transaction.changedParentTypes.forEach((events, type) =>
|
||||||
// We need to think about the possibility that the user transforms the
|
fs.push(() => {
|
||||||
// Y.Doc in the event.
|
// We need to think about the possibility that the user transforms the
|
||||||
if (type._dEH.l.length > 0 && (type._item === null || !type._item.deleted)) {
|
// Y.Doc in the event.
|
||||||
events = events
|
if (type._item === null || !type._item.deleted) {
|
||||||
.filter(event =>
|
events = events
|
||||||
event.target._item === null || !event.target._item.deleted
|
.filter(event =>
|
||||||
)
|
event.target._item === null || !event.target._item.deleted
|
||||||
events
|
)
|
||||||
.forEach(event => {
|
events
|
||||||
event.currentTarget = type
|
.forEach(event => {
|
||||||
// path is relative to the current target
|
event.currentTarget = type
|
||||||
event._path = null
|
})
|
||||||
})
|
// We don't need to check for events.length
|
||||||
// sort events by path length so that top-level events are fired first.
|
// because we know it has at least one element
|
||||||
events
|
callEventHandlerListeners(type._dEH, events, transaction)
|
||||||
.sort((event1, event2) => event1.path.length - event2.path.length)
|
}
|
||||||
// We don't need to check for events.length
|
})
|
||||||
// because we know it has at least one element
|
)
|
||||||
callEventHandlerListeners(type._dEH, events, transaction)
|
fs.push(() => doc.emit('afterTransaction', [transaction, doc]))
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
fs.push(() => doc.emit('afterTransaction', [transaction, doc]))
|
|
||||||
callAll(fs, [])
|
callAll(fs, [])
|
||||||
if (transaction._needFormattingCleanup) {
|
|
||||||
cleanupYTextAfterTransaction(transaction)
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
// Replace deleted items with ItemDeleted / GC.
|
// Replace deleted items with ItemDeleted / GC.
|
||||||
// This is where content is actually remove from the Yjs Doc.
|
// This is where content is actually remove from the Yjs Doc.
|
||||||
@@ -323,70 +290,45 @@ const cleanupTransactions = (transactionCleanups, i) => {
|
|||||||
tryMergeDeleteSet(ds, store)
|
tryMergeDeleteSet(ds, store)
|
||||||
|
|
||||||
// on all affected store.clients props, try to merge
|
// on all affected store.clients props, try to merge
|
||||||
transaction.afterState.forEach((clock, client) => {
|
for (const [client, clock] of transaction.afterState) {
|
||||||
const beforeClock = transaction.beforeState.get(client) || 0
|
const beforeClock = transaction.beforeState.get(client) || 0
|
||||||
if (beforeClock !== clock) {
|
if (beforeClock !== clock) {
|
||||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
// we iterate from right to left so we can safely remove entries
|
// we iterate from right to left so we can safely remove entries
|
||||||
const firstChangePos = math.max(findIndexSS(structs, beforeClock), 1)
|
const firstChangePos = math.max(findIndexSS(structs, beforeClock), 1)
|
||||||
for (let i = structs.length - 1; i >= firstChangePos;) {
|
for (let i = structs.length - 1; i >= firstChangePos; i--) {
|
||||||
i -= 1 + tryToMergeWithLefts(structs, i)
|
tryToMergeWithLeft(structs, i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
// try to merge mergeStructs
|
// try to merge mergeStructs
|
||||||
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
|
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
|
||||||
// but at the moment DS does not handle duplicates
|
// but at the moment DS does not handle duplicates
|
||||||
for (let i = mergeStructs.length - 1; i >= 0; i--) {
|
for (let i = 0; i < mergeStructs.length; i++) {
|
||||||
const { client, clock } = mergeStructs[i].id
|
const { client, clock } = mergeStructs[i].id
|
||||||
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
const replacedStructPos = findIndexSS(structs, clock)
|
const replacedStructPos = findIndexSS(structs, clock)
|
||||||
if (replacedStructPos + 1 < structs.length) {
|
if (replacedStructPos + 1 < structs.length) {
|
||||||
if (tryToMergeWithLefts(structs, replacedStructPos + 1) > 1) {
|
tryToMergeWithLeft(structs, replacedStructPos + 1)
|
||||||
continue // no need to perform next check, both are already merged
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (replacedStructPos > 0) {
|
if (replacedStructPos > 0) {
|
||||||
tryToMergeWithLefts(structs, replacedStructPos)
|
tryToMergeWithLeft(structs, replacedStructPos)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!transaction.local && transaction.afterState.get(doc.clientID) !== transaction.beforeState.get(doc.clientID)) {
|
if (!transaction.local && transaction.afterState.get(doc.clientID) !== transaction.beforeState.get(doc.clientID)) {
|
||||||
logging.print(logging.ORANGE, logging.BOLD, '[yjs] ', logging.UNBOLD, logging.RED, 'Changed the client-id because another client seems to be using it.')
|
|
||||||
doc.clientID = generateNewClientId()
|
doc.clientID = generateNewClientId()
|
||||||
|
logging.print(logging.ORANGE, logging.BOLD, '[yjs] ', logging.UNBOLD, logging.RED, 'Changed the client-id because another client seems to be using it.')
|
||||||
}
|
}
|
||||||
// @todo Merge all the transactions into one and provide send the data as a single update message
|
// @todo Merge all the transactions into one and provide send the data as a single update message
|
||||||
doc.emit('afterTransactionCleanup', [transaction, doc])
|
doc.emit('afterTransactionCleanup', [transaction, doc])
|
||||||
if (doc._observers.has('update')) {
|
if (doc._observers.has('update')) {
|
||||||
const encoder = new UpdateEncoderV1()
|
const updateMessage = computeUpdateMessageFromTransaction(transaction)
|
||||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
if (updateMessage !== null) {
|
||||||
if (hasContent) {
|
doc.emit('update', [encoding.toUint8Array(updateMessage), transaction.origin, doc])
|
||||||
doc.emit('update', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (doc._observers.has('updateV2')) {
|
|
||||||
const encoder = new UpdateEncoderV2()
|
|
||||||
const hasContent = writeUpdateMessageFromTransaction(encoder, transaction)
|
|
||||||
if (hasContent) {
|
|
||||||
doc.emit('updateV2', [encoder.toUint8Array(), transaction.origin, doc, transaction])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const { subdocsAdded, subdocsLoaded, subdocsRemoved } = transaction
|
|
||||||
if (subdocsAdded.size > 0 || subdocsRemoved.size > 0 || subdocsLoaded.size > 0) {
|
|
||||||
subdocsAdded.forEach(subdoc => {
|
|
||||||
subdoc.clientID = doc.clientID
|
|
||||||
if (subdoc.collectionid == null) {
|
|
||||||
subdoc.collectionid = doc.collectionid
|
|
||||||
}
|
|
||||||
doc.subdocs.add(subdoc)
|
|
||||||
})
|
|
||||||
subdocsRemoved.forEach(subdoc => doc.subdocs.delete(subdoc))
|
|
||||||
doc.emit('subdocs', [{ loaded: subdocsLoaded, added: subdocsAdded, removed: subdocsRemoved }, doc, transaction])
|
|
||||||
subdocsRemoved.forEach(subdoc => subdoc.destroy())
|
|
||||||
}
|
|
||||||
|
|
||||||
if (transactionCleanups.length <= i + 1) {
|
if (transactionCleanups.length <= i + 1) {
|
||||||
doc._transactionCleanups = []
|
doc._transactionCleanups = []
|
||||||
doc.emit('afterAllTransactions', [doc, transactionCleanups])
|
|
||||||
} else {
|
} else {
|
||||||
cleanupTransactions(transactionCleanups, i + 1)
|
cleanupTransactions(transactionCleanups, i + 1)
|
||||||
}
|
}
|
||||||
@@ -397,48 +339,34 @@ const cleanupTransactions = (transactionCleanups, i) => {
|
|||||||
/**
|
/**
|
||||||
* Implements the functionality of `y.transact(()=>{..})`
|
* Implements the functionality of `y.transact(()=>{..})`
|
||||||
*
|
*
|
||||||
* @template T
|
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {function(Transaction):T} f
|
* @param {function(Transaction):void} f
|
||||||
* @param {any} [origin=true]
|
* @param {any} [origin=true]
|
||||||
* @return {T}
|
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const transact = (doc, f, origin = null, local = true) => {
|
export const transact = (doc, f, origin = null, local = true) => {
|
||||||
const transactionCleanups = doc._transactionCleanups
|
const transactionCleanups = doc._transactionCleanups
|
||||||
let initialCall = false
|
let initialCall = false
|
||||||
/**
|
|
||||||
* @type {any}
|
|
||||||
*/
|
|
||||||
let result = null
|
|
||||||
if (doc._transaction === null) {
|
if (doc._transaction === null) {
|
||||||
initialCall = true
|
initialCall = true
|
||||||
doc._transaction = new Transaction(doc, origin, local)
|
doc._transaction = new Transaction(doc, origin, local)
|
||||||
transactionCleanups.push(doc._transaction)
|
transactionCleanups.push(doc._transaction)
|
||||||
if (transactionCleanups.length === 1) {
|
|
||||||
doc.emit('beforeAllTransactions', [doc])
|
|
||||||
}
|
|
||||||
doc.emit('beforeTransaction', [doc._transaction, doc])
|
doc.emit('beforeTransaction', [doc._transaction, doc])
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
result = f(doc._transaction)
|
f(doc._transaction)
|
||||||
} finally {
|
} finally {
|
||||||
if (initialCall) {
|
if (initialCall && transactionCleanups[0] === doc._transaction) {
|
||||||
const finishCleanup = doc._transaction === transactionCleanups[0]
|
// The first transaction ended, now process observer calls.
|
||||||
doc._transaction = null
|
// Observer call may create new transactions for which we need to call the observers and do cleanup.
|
||||||
if (finishCleanup) {
|
// We don't want to nest these calls, so we execute these calls one after
|
||||||
// The first transaction ended, now process observer calls.
|
// another.
|
||||||
// Observer call may create new transactions for which we need to call the observers and do cleanup.
|
// Also we need to ensure that all cleanups are called, even if the
|
||||||
// We don't want to nest these calls, so we execute these calls one after
|
// observes throw errors.
|
||||||
// another.
|
// This file is full of hacky try {} finally {} blocks to ensure that an
|
||||||
// Also we need to ensure that all cleanups are called, even if the
|
// event can throw errors and also that the cleanup is called.
|
||||||
// observes throw errors.
|
cleanupTransactions(transactionCleanups, 0)
|
||||||
// This file is full of hacky try {} finally {} blocks to ensure that an
|
|
||||||
// event can throw errors and also that the cleanup is called.
|
|
||||||
cleanupTransactions(transactionCleanups, 0)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,62 +5,50 @@ import {
|
|||||||
transact,
|
transact,
|
||||||
createID,
|
createID,
|
||||||
redoItem,
|
redoItem,
|
||||||
|
iterateStructs,
|
||||||
isParentOf,
|
isParentOf,
|
||||||
followRedone,
|
followRedone,
|
||||||
getItemCleanStart,
|
getItemCleanStart,
|
||||||
isDeleted,
|
getState,
|
||||||
addToDeleteSet,
|
ID, Transaction, Doc, Item, GC, DeleteSet, AbstractType // eslint-disable-line
|
||||||
YEvent, Transaction, Doc, Item, GC, DeleteSet, AbstractType // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as time from 'lib0/time'
|
import * as time from 'lib0/time.js'
|
||||||
import * as array from 'lib0/array'
|
import { Observable } from 'lib0/observable.js'
|
||||||
import * as logging from 'lib0/logging'
|
|
||||||
import { ObservableV2 } from 'lib0/observable'
|
|
||||||
|
|
||||||
export class StackItem {
|
class StackItem {
|
||||||
/**
|
/**
|
||||||
* @param {DeleteSet} deletions
|
* @param {DeleteSet} ds
|
||||||
* @param {DeleteSet} insertions
|
* @param {Map<number,number>} beforeState
|
||||||
|
* @param {Map<number,number>} afterState
|
||||||
*/
|
*/
|
||||||
constructor (deletions, insertions) {
|
constructor (ds, beforeState, afterState) {
|
||||||
this.insertions = insertions
|
this.ds = ds
|
||||||
this.deletions = deletions
|
this.beforeState = beforeState
|
||||||
|
this.afterState = afterState
|
||||||
/**
|
/**
|
||||||
* Use this to save and restore metadata like selection range
|
* Use this to save and restore metadata like selection range
|
||||||
*/
|
*/
|
||||||
this.meta = new Map()
|
this.meta = new Map()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
|
||||||
* @param {Transaction} tr
|
|
||||||
* @param {UndoManager} um
|
|
||||||
* @param {StackItem} stackItem
|
|
||||||
*/
|
|
||||||
const clearUndoManagerStackItem = (tr, um, stackItem) => {
|
|
||||||
iterateDeletedStructs(tr, stackItem.deletions, item => {
|
|
||||||
if (item instanceof Item && um.scope.some(type => type === tr.doc || isParentOf(/** @type {AbstractType<any>} */ (type), item))) {
|
|
||||||
keepItem(item, false)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UndoManager} undoManager
|
* @param {UndoManager} undoManager
|
||||||
* @param {Array<StackItem>} stack
|
* @param {Array<StackItem>} stack
|
||||||
* @param {'undo'|'redo'} eventType
|
* @param {string} eventType
|
||||||
* @return {StackItem?}
|
* @return {StackItem?}
|
||||||
*/
|
*/
|
||||||
const popStackItem = (undoManager, stack, eventType) => {
|
const popStackItem = (undoManager, stack, eventType) => {
|
||||||
/**
|
/**
|
||||||
* Keep a reference to the transaction so we can fire the event with the changedParentTypes
|
* Whether a change happened
|
||||||
* @type {any}
|
* @type {StackItem?}
|
||||||
*/
|
*/
|
||||||
let _tr = null
|
let result = null
|
||||||
const doc = undoManager.doc
|
const doc = undoManager.doc
|
||||||
const scope = undoManager.scope
|
const scope = undoManager.scope
|
||||||
transact(doc, transaction => {
|
transact(doc, transaction => {
|
||||||
while (stack.length > 0 && undoManager.currStackItem === null) {
|
while (stack.length > 0 && result === null) {
|
||||||
const store = doc.store
|
const store = doc.store
|
||||||
const stackItem = /** @type {StackItem} */ (stack.pop())
|
const stackItem = /** @type {StackItem} */ (stack.pop())
|
||||||
/**
|
/**
|
||||||
@@ -72,32 +60,54 @@ const popStackItem = (undoManager, stack, eventType) => {
|
|||||||
*/
|
*/
|
||||||
const itemsToDelete = []
|
const itemsToDelete = []
|
||||||
let performedChange = false
|
let performedChange = false
|
||||||
iterateDeletedStructs(transaction, stackItem.insertions, struct => {
|
stackItem.afterState.forEach((endClock, client) => {
|
||||||
if (struct instanceof Item) {
|
const startClock = stackItem.beforeState.get(client) || 0
|
||||||
if (struct.redone !== null) {
|
const len = endClock - startClock
|
||||||
let { item, diff } = followRedone(store, struct.id)
|
// @todo iterateStructs should not need the structs parameter
|
||||||
if (diff > 0) {
|
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
|
||||||
item = getItemCleanStart(transaction, createID(item.id.client, item.id.clock + diff))
|
if (startClock !== endClock) {
|
||||||
|
// make sure structs don't overlap with the range of created operations [stackItem.start, stackItem.start + stackItem.end)
|
||||||
|
// this must be executed before deleted structs are iterated.
|
||||||
|
getItemCleanStart(transaction, createID(client, startClock))
|
||||||
|
if (endClock < getState(doc.store, client)) {
|
||||||
|
getItemCleanStart(transaction, createID(client, endClock))
|
||||||
|
}
|
||||||
|
iterateStructs(transaction, structs, startClock, len, struct => {
|
||||||
|
if (struct instanceof Item) {
|
||||||
|
if (struct.redone !== null) {
|
||||||
|
let { item, diff } = followRedone(store, struct.id)
|
||||||
|
if (diff > 0) {
|
||||||
|
item = getItemCleanStart(transaction, createID(item.id.client, item.id.clock + diff))
|
||||||
|
}
|
||||||
|
if (item.length > len) {
|
||||||
|
getItemCleanStart(transaction, createID(item.id.client, endClock))
|
||||||
|
}
|
||||||
|
struct = item
|
||||||
|
}
|
||||||
|
if (!struct.deleted && scope.some(type => isParentOf(type, /** @type {Item} */ (struct)))) {
|
||||||
|
itemsToDelete.push(struct)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
struct = item
|
})
|
||||||
}
|
|
||||||
if (!struct.deleted && scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), /** @type {Item} */ (struct)))) {
|
|
||||||
itemsToDelete.push(struct)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
iterateDeletedStructs(transaction, stackItem.deletions, struct => {
|
iterateDeletedStructs(transaction, stackItem.ds, struct => {
|
||||||
|
const id = struct.id
|
||||||
|
const clock = id.clock
|
||||||
|
const client = id.client
|
||||||
|
const startClock = stackItem.beforeState.get(client) || 0
|
||||||
|
const endClock = stackItem.afterState.get(client) || 0
|
||||||
if (
|
if (
|
||||||
struct instanceof Item &&
|
struct instanceof Item &&
|
||||||
scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), struct)) &&
|
scope.some(type => isParentOf(type, struct)) &&
|
||||||
// Never redo structs in stackItem.insertions because they were created and deleted in the same capture interval.
|
// Never redo structs in [stackItem.start, stackItem.start + stackItem.end) because they were created and deleted in the same capture interval.
|
||||||
!isDeleted(stackItem.insertions, struct.id)
|
!(clock >= startClock && clock < endClock)
|
||||||
) {
|
) {
|
||||||
itemsToRedo.add(struct)
|
itemsToRedo.add(struct)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
itemsToRedo.forEach(struct => {
|
itemsToRedo.forEach(struct => {
|
||||||
performedChange = redoItem(transaction, struct, itemsToRedo, stackItem.insertions, undoManager.ignoreRemoteMapChanges, undoManager) !== null || performedChange
|
performedChange = redoItem(transaction, struct, itemsToRedo) !== null || performedChange
|
||||||
})
|
})
|
||||||
// We want to delete in reverse order so that children are deleted before
|
// We want to delete in reverse order so that children are deleted before
|
||||||
// parents, so we have more information available when items are filtered.
|
// parents, so we have more information available when items are filtered.
|
||||||
@@ -108,44 +118,23 @@ const popStackItem = (undoManager, stack, eventType) => {
|
|||||||
performedChange = true
|
performedChange = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
undoManager.currStackItem = performedChange ? stackItem : null
|
result = stackItem
|
||||||
}
|
if (result != null) {
|
||||||
transaction.changed.forEach((subProps, type) => {
|
undoManager.emit('stack-item-popped', [{ stackItem: result, type: eventType }, undoManager])
|
||||||
// destroy search marker if necessary
|
|
||||||
if (subProps.has(null) && type._searchMarker) {
|
|
||||||
type._searchMarker.length = 0
|
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
_tr = transaction
|
|
||||||
}, undoManager)
|
}, undoManager)
|
||||||
const res = undoManager.currStackItem
|
return result
|
||||||
if (res != null) {
|
|
||||||
const changedParentTypes = _tr.changedParentTypes
|
|
||||||
undoManager.emit('stack-item-popped', [{ stackItem: res, type: eventType, changedParentTypes, origin: undoManager }, undoManager])
|
|
||||||
undoManager.currStackItem = null
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef {Object} UndoManagerOptions
|
* @typedef {Object} UndoManagerOptions
|
||||||
* @property {number} [UndoManagerOptions.captureTimeout=500]
|
* @property {number} [UndoManagerOptions.captureTimeout=500]
|
||||||
* @property {function(Transaction):boolean} [UndoManagerOptions.captureTransaction] Do not capture changes of a Transaction if result false.
|
|
||||||
* @property {function(Item):boolean} [UndoManagerOptions.deleteFilter=()=>true] Sometimes
|
* @property {function(Item):boolean} [UndoManagerOptions.deleteFilter=()=>true] Sometimes
|
||||||
* it is necessary to filter what an Undo/Redo operation can delete. If this
|
* it is necessary to filter whan an Undo/Redo operation can delete. If this
|
||||||
* filter returns false, the type/item won't be deleted even it is in the
|
* filter returns false, the type/item won't be deleted even it is in the
|
||||||
* undo/redo scope.
|
* undo/redo scope.
|
||||||
* @property {Set<any>} [UndoManagerOptions.trackedOrigins=new Set([null])]
|
* @property {Set<any>} [UndoManagerOptions.trackedOrigins=new Set([null])]
|
||||||
* @property {boolean} [ignoreRemoteMapChanges] Experimental. By default, the UndoManager will never overwrite remote changes. Enable this property to enable overwriting remote changes on key-value changes (Y.Map, properties on Y.Xml, etc..).
|
|
||||||
* @property {Doc} [doc] The document that this UndoManager operates on. Only needed if typeScope is empty.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object} StackItemEvent
|
|
||||||
* @property {StackItem} StackItemEvent.stackItem
|
|
||||||
* @property {any} StackItemEvent.origin
|
|
||||||
* @property {'undo'|'redo'} StackItemEvent.type
|
|
||||||
* @property {Map<AbstractType<YEvent<any>>,Array<YEvent<any>>>} StackItemEvent.changedParentTypes
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -155,32 +144,22 @@ const popStackItem = (undoManager, stack, eventType) => {
|
|||||||
* Fires 'stack-item-popped' event when a stack item was popped from either the
|
* Fires 'stack-item-popped' event when a stack item was popped from either the
|
||||||
* undo- or the redo-stack. You may restore the saved stack information from `event.stackItem.meta`.
|
* undo- or the redo-stack. You may restore the saved stack information from `event.stackItem.meta`.
|
||||||
*
|
*
|
||||||
* @extends {ObservableV2<{'stack-item-added':function(StackItemEvent, UndoManager):void, 'stack-item-popped': function(StackItemEvent, UndoManager):void, 'stack-cleared': function({ undoStackCleared: boolean, redoStackCleared: boolean }):void, 'stack-item-updated': function(StackItemEvent, UndoManager):void }>}
|
* @extends {Observable<'stack-item-added'|'stack-item-popped'>}
|
||||||
*/
|
*/
|
||||||
export class UndoManager extends ObservableV2 {
|
export class UndoManager extends Observable {
|
||||||
/**
|
/**
|
||||||
* @param {Doc|AbstractType<any>|Array<AbstractType<any>>} typeScope Accepts either a single type, or an array of types
|
* @param {AbstractType<any>|Array<AbstractType<any>>} typeScope Accepts either a single type, or an array of types
|
||||||
* @param {UndoManagerOptions} options
|
* @param {UndoManagerOptions} options
|
||||||
*/
|
*/
|
||||||
constructor (typeScope, {
|
constructor (typeScope, { captureTimeout, deleteFilter = () => true, trackedOrigins = new Set([null]) } = {}) {
|
||||||
captureTimeout = 500,
|
if (captureTimeout == null) {
|
||||||
captureTransaction = _tr => true,
|
captureTimeout = 500
|
||||||
deleteFilter = () => true,
|
}
|
||||||
trackedOrigins = new Set([null]),
|
|
||||||
ignoreRemoteMapChanges = false,
|
|
||||||
doc = /** @type {Doc} */ (array.isArray(typeScope) ? typeScope[0].doc : typeScope instanceof Doc ? typeScope : typeScope.doc)
|
|
||||||
} = {}) {
|
|
||||||
super()
|
super()
|
||||||
/**
|
this.scope = typeScope instanceof Array ? typeScope : [typeScope]
|
||||||
* @type {Array<AbstractType<any> | Doc>}
|
|
||||||
*/
|
|
||||||
this.scope = []
|
|
||||||
this.doc = doc
|
|
||||||
this.addToScope(typeScope)
|
|
||||||
this.deleteFilter = deleteFilter
|
this.deleteFilter = deleteFilter
|
||||||
trackedOrigins.add(this)
|
trackedOrigins.add(this)
|
||||||
this.trackedOrigins = trackedOrigins
|
this.trackedOrigins = trackedOrigins
|
||||||
this.captureTransaction = captureTransaction
|
|
||||||
/**
|
/**
|
||||||
* @type {Array<StackItem>}
|
* @type {Array<StackItem>}
|
||||||
*/
|
*/
|
||||||
@@ -196,25 +175,11 @@ export class UndoManager extends ObservableV2 {
|
|||||||
*/
|
*/
|
||||||
this.undoing = false
|
this.undoing = false
|
||||||
this.redoing = false
|
this.redoing = false
|
||||||
/**
|
this.doc = /** @type {Doc} */ (this.scope[0].doc)
|
||||||
* The currently popped stack item if UndoManager.undoing or UndoManager.redoing
|
|
||||||
*
|
|
||||||
* @type {StackItem|null}
|
|
||||||
*/
|
|
||||||
this.currStackItem = null
|
|
||||||
this.lastChange = 0
|
this.lastChange = 0
|
||||||
this.ignoreRemoteMapChanges = ignoreRemoteMapChanges
|
this.doc.on('afterTransaction', /** @param {Transaction} transaction */ transaction => {
|
||||||
this.captureTimeout = captureTimeout
|
|
||||||
/**
|
|
||||||
* @param {Transaction} transaction
|
|
||||||
*/
|
|
||||||
this.afterTransactionHandler = transaction => {
|
|
||||||
// Only track certain transactions
|
// Only track certain transactions
|
||||||
if (
|
if (!this.scope.some(type => transaction.changedParentTypes.has(type)) || (!this.trackedOrigins.has(transaction.origin) && (!transaction.origin || !this.trackedOrigins.has(transaction.origin.constructor)))) {
|
||||||
!this.captureTransaction(transaction) ||
|
|
||||||
!this.scope.some(type => transaction.changedParentTypes.has(/** @type {AbstractType<any>} */ (type)) || type === this.doc) ||
|
|
||||||
(!this.trackedOrigins.has(transaction.origin) && (!transaction.origin || !this.trackedOrigins.has(transaction.origin.constructor)))
|
|
||||||
) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const undoing = this.undoing
|
const undoing = this.undoing
|
||||||
@@ -224,96 +189,50 @@ export class UndoManager extends ObservableV2 {
|
|||||||
this.stopCapturing() // next undo should not be appended to last stack item
|
this.stopCapturing() // next undo should not be appended to last stack item
|
||||||
} else if (!redoing) {
|
} else if (!redoing) {
|
||||||
// neither undoing nor redoing: delete redoStack
|
// neither undoing nor redoing: delete redoStack
|
||||||
this.clear(false, true)
|
this.redoStack = []
|
||||||
}
|
}
|
||||||
const insertions = new DeleteSet()
|
const beforeState = transaction.beforeState
|
||||||
transaction.afterState.forEach((endClock, client) => {
|
const afterState = transaction.afterState
|
||||||
const startClock = transaction.beforeState.get(client) || 0
|
|
||||||
const len = endClock - startClock
|
|
||||||
if (len > 0) {
|
|
||||||
addToDeleteSet(insertions, client, startClock, len)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
const now = time.getUnixTime()
|
const now = time.getUnixTime()
|
||||||
let didAdd = false
|
if (now - this.lastChange < captureTimeout && stack.length > 0 && !undoing && !redoing) {
|
||||||
if (this.lastChange > 0 && now - this.lastChange < this.captureTimeout && stack.length > 0 && !undoing && !redoing) {
|
|
||||||
// append change to last stack op
|
// append change to last stack op
|
||||||
const lastOp = stack[stack.length - 1]
|
const lastOp = stack[stack.length - 1]
|
||||||
lastOp.deletions = mergeDeleteSets([lastOp.deletions, transaction.deleteSet])
|
lastOp.ds = mergeDeleteSets([lastOp.ds, transaction.deleteSet])
|
||||||
lastOp.insertions = mergeDeleteSets([lastOp.insertions, insertions])
|
lastOp.afterState = afterState
|
||||||
} else {
|
} else {
|
||||||
// create a new stack op
|
// create a new stack op
|
||||||
stack.push(new StackItem(transaction.deleteSet, insertions))
|
stack.push(new StackItem(transaction.deleteSet, beforeState, afterState))
|
||||||
didAdd = true
|
|
||||||
}
|
}
|
||||||
if (!undoing && !redoing) {
|
if (!undoing && !redoing) {
|
||||||
this.lastChange = now
|
this.lastChange = now
|
||||||
}
|
}
|
||||||
// make sure that deleted structs are not gc'd
|
// make sure that deleted structs are not gc'd
|
||||||
iterateDeletedStructs(transaction, transaction.deleteSet, /** @param {Item|GC} item */ item => {
|
iterateDeletedStructs(transaction, transaction.deleteSet, /** @param {Item|GC} item */ item => {
|
||||||
if (item instanceof Item && this.scope.some(type => type === transaction.doc || isParentOf(/** @type {AbstractType<any>} */ (type), item))) {
|
if (item instanceof Item && this.scope.some(type => isParentOf(type, item))) {
|
||||||
keepItem(item, true)
|
keepItem(item, true)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
this.emit('stack-item-added', [{ stackItem: stack[stack.length - 1], origin: transaction.origin, type: undoing ? 'redo' : 'undo' }, this])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
clear () {
|
||||||
|
this.doc.transact(transaction => {
|
||||||
/**
|
/**
|
||||||
* @type {[StackItemEvent, UndoManager]}
|
* @param {StackItem} stackItem
|
||||||
*/
|
*/
|
||||||
const changeEvent = [{ stackItem: stack[stack.length - 1], origin: transaction.origin, type: undoing ? 'redo' : 'undo', changedParentTypes: transaction.changedParentTypes }, this]
|
const clearItem = stackItem => {
|
||||||
if (didAdd) {
|
iterateDeletedStructs(transaction, stackItem.ds, item => {
|
||||||
this.emit('stack-item-added', changeEvent)
|
if (item instanceof Item && this.scope.some(type => isParentOf(type, item))) {
|
||||||
} else {
|
keepItem(item, false)
|
||||||
this.emit('stack-item-updated', changeEvent)
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
this.undoStack.forEach(clearItem)
|
||||||
this.doc.on('afterTransaction', this.afterTransactionHandler)
|
this.redoStack.forEach(clearItem)
|
||||||
this.doc.on('destroy', () => {
|
|
||||||
this.destroy()
|
|
||||||
})
|
})
|
||||||
}
|
this.undoStack = []
|
||||||
|
this.redoStack = []
|
||||||
/**
|
|
||||||
* @param {Array<AbstractType<any> | Doc> | AbstractType<any> | Doc} ytypes
|
|
||||||
*/
|
|
||||||
addToScope (ytypes) {
|
|
||||||
const tmpSet = new Set(this.scope)
|
|
||||||
ytypes = array.isArray(ytypes) ? ytypes : [ytypes]
|
|
||||||
ytypes.forEach(ytype => {
|
|
||||||
if (!tmpSet.has(ytype)) {
|
|
||||||
tmpSet.add(ytype)
|
|
||||||
if (ytype instanceof AbstractType ? ytype.doc !== this.doc : ytype !== this.doc) logging.warn('[yjs#509] Not same Y.Doc') // use MultiDocUndoManager instead. also see https://github.com/yjs/yjs/issues/509
|
|
||||||
this.scope.push(ytype)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} origin
|
|
||||||
*/
|
|
||||||
addTrackedOrigin (origin) {
|
|
||||||
this.trackedOrigins.add(origin)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} origin
|
|
||||||
*/
|
|
||||||
removeTrackedOrigin (origin) {
|
|
||||||
this.trackedOrigins.delete(origin)
|
|
||||||
}
|
|
||||||
|
|
||||||
clear (clearUndoStack = true, clearRedoStack = true) {
|
|
||||||
if ((clearUndoStack && this.canUndo()) || (clearRedoStack && this.canRedo())) {
|
|
||||||
this.doc.transact(tr => {
|
|
||||||
if (clearUndoStack) {
|
|
||||||
this.undoStack.forEach(item => clearUndoManagerStackItem(tr, this, item))
|
|
||||||
this.undoStack = []
|
|
||||||
}
|
|
||||||
if (clearRedoStack) {
|
|
||||||
this.redoStack.forEach(item => clearUndoManagerStackItem(tr, this, item))
|
|
||||||
this.redoStack = []
|
|
||||||
}
|
|
||||||
this.emit('stack-cleared', [{ undoStackCleared: clearUndoStack, redoStackCleared: clearRedoStack }])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -371,28 +290,4 @@ export class UndoManager extends ObservableV2 {
|
|||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Are undo steps available?
|
|
||||||
*
|
|
||||||
* @return {boolean} `true` if undo is possible
|
|
||||||
*/
|
|
||||||
canUndo () {
|
|
||||||
return this.undoStack.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Are redo steps available?
|
|
||||||
*
|
|
||||||
* @return {boolean} `true` if redo is possible
|
|
||||||
*/
|
|
||||||
canRedo () {
|
|
||||||
return this.redoStack.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
destroy () {
|
|
||||||
this.trackedOrigins.delete(this)
|
|
||||||
this.doc.off('afterTransaction', this.afterTransactionHandler)
|
|
||||||
super.destroy()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,281 +0,0 @@
|
|||||||
import * as buffer from 'lib0/buffer'
|
|
||||||
import * as decoding from 'lib0/decoding'
|
|
||||||
import {
|
|
||||||
ID, createID
|
|
||||||
} from '../internals.js'
|
|
||||||
|
|
||||||
export class DSDecoderV1 {
|
|
||||||
/**
|
|
||||||
* @param {decoding.Decoder} decoder
|
|
||||||
*/
|
|
||||||
constructor (decoder) {
|
|
||||||
this.restDecoder = decoder
|
|
||||||
}
|
|
||||||
|
|
||||||
resetDsCurVal () {
|
|
||||||
// nop
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readDsClock () {
|
|
||||||
return decoding.readVarUint(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readDsLen () {
|
|
||||||
return decoding.readVarUint(this.restDecoder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class UpdateDecoderV1 extends DSDecoderV1 {
|
|
||||||
/**
|
|
||||||
* @return {ID}
|
|
||||||
*/
|
|
||||||
readLeftID () {
|
|
||||||
return createID(decoding.readVarUint(this.restDecoder), decoding.readVarUint(this.restDecoder))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {ID}
|
|
||||||
*/
|
|
||||||
readRightID () {
|
|
||||||
return createID(decoding.readVarUint(this.restDecoder), decoding.readVarUint(this.restDecoder))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read the next client id.
|
|
||||||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
|
||||||
*/
|
|
||||||
readClient () {
|
|
||||||
return decoding.readVarUint(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
readInfo () {
|
|
||||||
return decoding.readUint8(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {string}
|
|
||||||
*/
|
|
||||||
readString () {
|
|
||||||
return decoding.readVarString(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {boolean} isKey
|
|
||||||
*/
|
|
||||||
readParentInfo () {
|
|
||||||
return decoding.readVarUint(this.restDecoder) === 1
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
readTypeRef () {
|
|
||||||
return decoding.readVarUint(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write len of a struct - well suited for Opt RLE encoder.
|
|
||||||
*
|
|
||||||
* @return {number} len
|
|
||||||
*/
|
|
||||||
readLen () {
|
|
||||||
return decoding.readVarUint(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
readAny () {
|
|
||||||
return decoding.readAny(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
readBuf () {
|
|
||||||
return buffer.copyUint8Array(decoding.readVarUint8Array(this.restDecoder))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Legacy implementation uses JSON parse. We use any-decoding in v2.
|
|
||||||
*
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
readJSON () {
|
|
||||||
return JSON.parse(decoding.readVarString(this.restDecoder))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {string}
|
|
||||||
*/
|
|
||||||
readKey () {
|
|
||||||
return decoding.readVarString(this.restDecoder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DSDecoderV2 {
|
|
||||||
/**
|
|
||||||
* @param {decoding.Decoder} decoder
|
|
||||||
*/
|
|
||||||
constructor (decoder) {
|
|
||||||
/**
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
this.dsCurrVal = 0
|
|
||||||
this.restDecoder = decoder
|
|
||||||
}
|
|
||||||
|
|
||||||
resetDsCurVal () {
|
|
||||||
this.dsCurrVal = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readDsClock () {
|
|
||||||
this.dsCurrVal += decoding.readVarUint(this.restDecoder)
|
|
||||||
return this.dsCurrVal
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readDsLen () {
|
|
||||||
const diff = decoding.readVarUint(this.restDecoder) + 1
|
|
||||||
this.dsCurrVal += diff
|
|
||||||
return diff
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class UpdateDecoderV2 extends DSDecoderV2 {
|
|
||||||
/**
|
|
||||||
* @param {decoding.Decoder} decoder
|
|
||||||
*/
|
|
||||||
constructor (decoder) {
|
|
||||||
super(decoder)
|
|
||||||
/**
|
|
||||||
* List of cached keys. If the keys[id] does not exist, we read a new key
|
|
||||||
* from stringEncoder and push it to keys.
|
|
||||||
*
|
|
||||||
* @type {Array<string>}
|
|
||||||
*/
|
|
||||||
this.keys = []
|
|
||||||
decoding.readVarUint(decoder) // read feature flag - currently unused
|
|
||||||
this.keyClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
this.clientDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
this.leftClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
this.rightClockDecoder = new decoding.IntDiffOptRleDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
this.infoDecoder = new decoding.RleDecoder(decoding.readVarUint8Array(decoder), decoding.readUint8)
|
|
||||||
this.stringDecoder = new decoding.StringDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
this.parentInfoDecoder = new decoding.RleDecoder(decoding.readVarUint8Array(decoder), decoding.readUint8)
|
|
||||||
this.typeRefDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
this.lenDecoder = new decoding.UintOptRleDecoder(decoding.readVarUint8Array(decoder))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {ID}
|
|
||||||
*/
|
|
||||||
readLeftID () {
|
|
||||||
return new ID(this.clientDecoder.read(), this.leftClockDecoder.read())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {ID}
|
|
||||||
*/
|
|
||||||
readRightID () {
|
|
||||||
return new ID(this.clientDecoder.read(), this.rightClockDecoder.read())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read the next client id.
|
|
||||||
* Use this in favor of readID whenever possible to reduce the number of objects created.
|
|
||||||
*/
|
|
||||||
readClient () {
|
|
||||||
return this.clientDecoder.read()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
readInfo () {
|
|
||||||
return /** @type {number} */ (this.infoDecoder.read())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {string}
|
|
||||||
*/
|
|
||||||
readString () {
|
|
||||||
return this.stringDecoder.read()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {boolean}
|
|
||||||
*/
|
|
||||||
readParentInfo () {
|
|
||||||
return this.parentInfoDecoder.read() === 1
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {number} An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
readTypeRef () {
|
|
||||||
return this.typeRefDecoder.read()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write len of a struct - well suited for Opt RLE encoder.
|
|
||||||
*
|
|
||||||
* @return {number}
|
|
||||||
*/
|
|
||||||
readLen () {
|
|
||||||
return this.lenDecoder.read()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
readAny () {
|
|
||||||
return decoding.readAny(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
readBuf () {
|
|
||||||
return decoding.readVarUint8Array(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is mainly here for legacy purposes.
|
|
||||||
*
|
|
||||||
* Initial we incoded objects using JSON. Now we use the much faster lib0/any-encoder. This method mainly exists for legacy purposes for the v1 encoder.
|
|
||||||
*
|
|
||||||
* @return {any}
|
|
||||||
*/
|
|
||||||
readJSON () {
|
|
||||||
return decoding.readAny(this.restDecoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {string}
|
|
||||||
*/
|
|
||||||
readKey () {
|
|
||||||
const keyClock = this.keyClockDecoder.read()
|
|
||||||
if (keyClock < this.keys.length) {
|
|
||||||
return this.keys[keyClock]
|
|
||||||
} else {
|
|
||||||
const key = this.stringDecoder.read()
|
|
||||||
this.keys.push(key)
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,320 +0,0 @@
|
|||||||
import * as error from 'lib0/error'
|
|
||||||
import * as encoding from 'lib0/encoding'
|
|
||||||
|
|
||||||
import {
|
|
||||||
ID // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
|
||||||
|
|
||||||
export class DSEncoderV1 {
|
|
||||||
constructor () {
|
|
||||||
this.restEncoder = encoding.createEncoder()
|
|
||||||
}
|
|
||||||
|
|
||||||
toUint8Array () {
|
|
||||||
return encoding.toUint8Array(this.restEncoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
resetDsCurVal () {
|
|
||||||
// nop
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} clock
|
|
||||||
*/
|
|
||||||
writeDsClock (clock) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, clock)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} len
|
|
||||||
*/
|
|
||||||
writeDsLen (len) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, len)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class UpdateEncoderV1 extends DSEncoderV1 {
|
|
||||||
/**
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
writeLeftID (id) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, id.client)
|
|
||||||
encoding.writeVarUint(this.restEncoder, id.clock)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
writeRightID (id) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, id.client)
|
|
||||||
encoding.writeVarUint(this.restEncoder, id.clock)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Use writeClient and writeClock instead of writeID if possible.
|
|
||||||
* @param {number} client
|
|
||||||
*/
|
|
||||||
writeClient (client) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, client)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
writeInfo (info) {
|
|
||||||
encoding.writeUint8(this.restEncoder, info)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} s
|
|
||||||
*/
|
|
||||||
writeString (s) {
|
|
||||||
encoding.writeVarString(this.restEncoder, s)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {boolean} isYKey
|
|
||||||
*/
|
|
||||||
writeParentInfo (isYKey) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, isYKey ? 1 : 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
writeTypeRef (info) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, info)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write len of a struct - well suited for Opt RLE encoder.
|
|
||||||
*
|
|
||||||
* @param {number} len
|
|
||||||
*/
|
|
||||||
writeLen (len) {
|
|
||||||
encoding.writeVarUint(this.restEncoder, len)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} any
|
|
||||||
*/
|
|
||||||
writeAny (any) {
|
|
||||||
encoding.writeAny(this.restEncoder, any)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} buf
|
|
||||||
*/
|
|
||||||
writeBuf (buf) {
|
|
||||||
encoding.writeVarUint8Array(this.restEncoder, buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} embed
|
|
||||||
*/
|
|
||||||
writeJSON (embed) {
|
|
||||||
encoding.writeVarString(this.restEncoder, JSON.stringify(embed))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} key
|
|
||||||
*/
|
|
||||||
writeKey (key) {
|
|
||||||
encoding.writeVarString(this.restEncoder, key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DSEncoderV2 {
|
|
||||||
constructor () {
|
|
||||||
this.restEncoder = encoding.createEncoder() // encodes all the rest / non-optimized
|
|
||||||
this.dsCurrVal = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
toUint8Array () {
|
|
||||||
return encoding.toUint8Array(this.restEncoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
resetDsCurVal () {
|
|
||||||
this.dsCurrVal = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} clock
|
|
||||||
*/
|
|
||||||
writeDsClock (clock) {
|
|
||||||
const diff = clock - this.dsCurrVal
|
|
||||||
this.dsCurrVal = clock
|
|
||||||
encoding.writeVarUint(this.restEncoder, diff)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} len
|
|
||||||
*/
|
|
||||||
writeDsLen (len) {
|
|
||||||
if (len === 0) {
|
|
||||||
error.unexpectedCase()
|
|
||||||
}
|
|
||||||
encoding.writeVarUint(this.restEncoder, len - 1)
|
|
||||||
this.dsCurrVal += len
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class UpdateEncoderV2 extends DSEncoderV2 {
|
|
||||||
constructor () {
|
|
||||||
super()
|
|
||||||
/**
|
|
||||||
* @type {Map<string,number>}
|
|
||||||
*/
|
|
||||||
this.keyMap = new Map()
|
|
||||||
/**
|
|
||||||
* Refers to the next unique key-identifier to me used.
|
|
||||||
* See writeKey method for more information.
|
|
||||||
*
|
|
||||||
* @type {number}
|
|
||||||
*/
|
|
||||||
this.keyClock = 0
|
|
||||||
this.keyClockEncoder = new encoding.IntDiffOptRleEncoder()
|
|
||||||
this.clientEncoder = new encoding.UintOptRleEncoder()
|
|
||||||
this.leftClockEncoder = new encoding.IntDiffOptRleEncoder()
|
|
||||||
this.rightClockEncoder = new encoding.IntDiffOptRleEncoder()
|
|
||||||
this.infoEncoder = new encoding.RleEncoder(encoding.writeUint8)
|
|
||||||
this.stringEncoder = new encoding.StringEncoder()
|
|
||||||
this.parentInfoEncoder = new encoding.RleEncoder(encoding.writeUint8)
|
|
||||||
this.typeRefEncoder = new encoding.UintOptRleEncoder()
|
|
||||||
this.lenEncoder = new encoding.UintOptRleEncoder()
|
|
||||||
}
|
|
||||||
|
|
||||||
toUint8Array () {
|
|
||||||
const encoder = encoding.createEncoder()
|
|
||||||
encoding.writeVarUint(encoder, 0) // this is a feature flag that we might use in the future
|
|
||||||
encoding.writeVarUint8Array(encoder, this.keyClockEncoder.toUint8Array())
|
|
||||||
encoding.writeVarUint8Array(encoder, this.clientEncoder.toUint8Array())
|
|
||||||
encoding.writeVarUint8Array(encoder, this.leftClockEncoder.toUint8Array())
|
|
||||||
encoding.writeVarUint8Array(encoder, this.rightClockEncoder.toUint8Array())
|
|
||||||
encoding.writeVarUint8Array(encoder, encoding.toUint8Array(this.infoEncoder))
|
|
||||||
encoding.writeVarUint8Array(encoder, this.stringEncoder.toUint8Array())
|
|
||||||
encoding.writeVarUint8Array(encoder, encoding.toUint8Array(this.parentInfoEncoder))
|
|
||||||
encoding.writeVarUint8Array(encoder, this.typeRefEncoder.toUint8Array())
|
|
||||||
encoding.writeVarUint8Array(encoder, this.lenEncoder.toUint8Array())
|
|
||||||
// @note The rest encoder is appended! (note the missing var)
|
|
||||||
encoding.writeUint8Array(encoder, encoding.toUint8Array(this.restEncoder))
|
|
||||||
return encoding.toUint8Array(encoder)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
writeLeftID (id) {
|
|
||||||
this.clientEncoder.write(id.client)
|
|
||||||
this.leftClockEncoder.write(id.clock)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ID} id
|
|
||||||
*/
|
|
||||||
writeRightID (id) {
|
|
||||||
this.clientEncoder.write(id.client)
|
|
||||||
this.rightClockEncoder.write(id.clock)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} client
|
|
||||||
*/
|
|
||||||
writeClient (client) {
|
|
||||||
this.clientEncoder.write(client)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
writeInfo (info) {
|
|
||||||
this.infoEncoder.write(info)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} s
|
|
||||||
*/
|
|
||||||
writeString (s) {
|
|
||||||
this.stringEncoder.write(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {boolean} isYKey
|
|
||||||
*/
|
|
||||||
writeParentInfo (isYKey) {
|
|
||||||
this.parentInfoEncoder.write(isYKey ? 1 : 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {number} info An unsigned 8-bit integer
|
|
||||||
*/
|
|
||||||
writeTypeRef (info) {
|
|
||||||
this.typeRefEncoder.write(info)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write len of a struct - well suited for Opt RLE encoder.
|
|
||||||
*
|
|
||||||
* @param {number} len
|
|
||||||
*/
|
|
||||||
writeLen (len) {
|
|
||||||
this.lenEncoder.write(len)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {any} any
|
|
||||||
*/
|
|
||||||
writeAny (any) {
|
|
||||||
encoding.writeAny(this.restEncoder, any)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} buf
|
|
||||||
*/
|
|
||||||
writeBuf (buf) {
|
|
||||||
encoding.writeVarUint8Array(this.restEncoder, buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is mainly here for legacy purposes.
|
|
||||||
*
|
|
||||||
* Initial we incoded objects using JSON. Now we use the much faster lib0/any-encoder. This method mainly exists for legacy purposes for the v1 encoder.
|
|
||||||
*
|
|
||||||
* @param {any} embed
|
|
||||||
*/
|
|
||||||
writeJSON (embed) {
|
|
||||||
encoding.writeAny(this.restEncoder, embed)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Property keys are often reused. For example, in y-prosemirror the key `bold` might
|
|
||||||
* occur very often. For a 3d application, the key `position` might occur very often.
|
|
||||||
*
|
|
||||||
* We cache these keys in a Map and refer to them via a unique number.
|
|
||||||
*
|
|
||||||
* @param {string} key
|
|
||||||
*/
|
|
||||||
writeKey (key) {
|
|
||||||
const clock = this.keyMap.get(key)
|
|
||||||
if (clock === undefined) {
|
|
||||||
/**
|
|
||||||
* @todo uncomment to introduce this feature finally
|
|
||||||
*
|
|
||||||
* Background. The ContentFormat object was always encoded using writeKey, but the decoder used to use readString.
|
|
||||||
* Furthermore, I forgot to set the keyclock. So everything was working fine.
|
|
||||||
*
|
|
||||||
* However, this feature here is basically useless as it is not being used (it actually only consumes extra memory).
|
|
||||||
*
|
|
||||||
* I don't know yet how to reintroduce this feature..
|
|
||||||
*
|
|
||||||
* Older clients won't be able to read updates when we reintroduce this feature. So this should probably be done using a flag.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
// this.keyMap.set(key, this.keyClock)
|
|
||||||
this.keyClockEncoder.write(this.keyClock++)
|
|
||||||
this.stringEncoder.write(key)
|
|
||||||
} else {
|
|
||||||
this.keyClockEncoder.write(clock)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,27 +1,24 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
isDeleted,
|
isDeleted,
|
||||||
Item, AbstractType, Transaction, AbstractStruct // eslint-disable-line
|
Item, AbstractType, Transaction, AbstractStruct // eslint-disable-line
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as set from 'lib0/set'
|
import * as set from 'lib0/set.js'
|
||||||
import * as array from 'lib0/array'
|
import * as array from 'lib0/array.js'
|
||||||
import * as error from 'lib0/error'
|
|
||||||
|
|
||||||
const errorComputeChanges = 'You must not compute changes after the event-handler fired.'
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template {AbstractType<any>} T
|
|
||||||
* YEvent describes the changes on a YType.
|
* YEvent describes the changes on a YType.
|
||||||
*/
|
*/
|
||||||
export class YEvent {
|
export class YEvent {
|
||||||
/**
|
/**
|
||||||
* @param {T} target The changed type.
|
* @param {AbstractType<any>} target The changed type.
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
*/
|
*/
|
||||||
constructor (target, transaction) {
|
constructor (target, transaction) {
|
||||||
/**
|
/**
|
||||||
* The type on which this event was created on.
|
* The type on which this event was created on.
|
||||||
* @type {T}
|
* @type {AbstractType<any>}
|
||||||
*/
|
*/
|
||||||
this.target = target
|
this.target = target
|
||||||
/**
|
/**
|
||||||
@@ -38,25 +35,11 @@ export class YEvent {
|
|||||||
* @type {Object|null}
|
* @type {Object|null}
|
||||||
*/
|
*/
|
||||||
this._changes = null
|
this._changes = null
|
||||||
/**
|
|
||||||
* @type {null | Map<string, { action: 'add' | 'update' | 'delete', oldValue: any, newValue: any }>}
|
|
||||||
*/
|
|
||||||
this._keys = null
|
|
||||||
/**
|
|
||||||
* @type {null | Array<{ insert?: string | Array<any> | object | AbstractType<any>, retain?: number, delete?: number, attributes?: Object<string, any> }>}
|
|
||||||
*/
|
|
||||||
this._delta = null
|
|
||||||
/**
|
|
||||||
* @type {Array<string|number>|null}
|
|
||||||
*/
|
|
||||||
this._path = null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Computes the path from `y` to the changed type.
|
* Computes the path from `y` to the changed type.
|
||||||
*
|
*
|
||||||
* @todo v14 should standardize on path: Array<{parent, index}> because that is easier to work with.
|
|
||||||
*
|
|
||||||
* The following property holds:
|
* The following property holds:
|
||||||
* @example
|
* @example
|
||||||
* let type = y
|
* let type = y
|
||||||
@@ -66,7 +49,8 @@ export class YEvent {
|
|||||||
* type === event.target // => true
|
* type === event.target // => true
|
||||||
*/
|
*/
|
||||||
get path () {
|
get path () {
|
||||||
return this._path || (this._path = getPathTo(this.currentTarget, this.target))
|
// @ts-ignore _item is defined because target is integrated
|
||||||
|
return getPathTo(this.currentTarget, this.target)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -81,74 +65,6 @@ export class YEvent {
|
|||||||
return isDeleted(this.transaction.deleteSet, struct.id)
|
return isDeleted(this.transaction.deleteSet, struct.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {Map<string, { action: 'add' | 'update' | 'delete', oldValue: any, newValue: any }>}
|
|
||||||
*/
|
|
||||||
get keys () {
|
|
||||||
if (this._keys === null) {
|
|
||||||
if (this.transaction.doc._transactionCleanups.length === 0) {
|
|
||||||
throw error.create(errorComputeChanges)
|
|
||||||
}
|
|
||||||
const keys = new Map()
|
|
||||||
const target = this.target
|
|
||||||
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target))
|
|
||||||
changed.forEach(key => {
|
|
||||||
if (key !== null) {
|
|
||||||
const item = /** @type {Item} */ (target._map.get(key))
|
|
||||||
/**
|
|
||||||
* @type {'delete' | 'add' | 'update'}
|
|
||||||
*/
|
|
||||||
let action
|
|
||||||
let oldValue
|
|
||||||
if (this.adds(item)) {
|
|
||||||
let prev = item.left
|
|
||||||
while (prev !== null && this.adds(prev)) {
|
|
||||||
prev = prev.left
|
|
||||||
}
|
|
||||||
if (this.deletes(item)) {
|
|
||||||
if (prev !== null && this.deletes(prev)) {
|
|
||||||
action = 'delete'
|
|
||||||
oldValue = array.last(prev.content.getContent())
|
|
||||||
} else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (prev !== null && this.deletes(prev)) {
|
|
||||||
action = 'update'
|
|
||||||
oldValue = array.last(prev.content.getContent())
|
|
||||||
} else {
|
|
||||||
action = 'add'
|
|
||||||
oldValue = undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (this.deletes(item)) {
|
|
||||||
action = 'delete'
|
|
||||||
oldValue = array.last(/** @type {Item} */ item.content.getContent())
|
|
||||||
} else {
|
|
||||||
return // nop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
keys.set(key, { action, oldValue })
|
|
||||||
}
|
|
||||||
})
|
|
||||||
this._keys = keys
|
|
||||||
}
|
|
||||||
return this._keys
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a computed property. Note that this can only be safely computed during the
|
|
||||||
* event call. Computing this property after other changes happened might result in
|
|
||||||
* unexpected behavior (incorrect computation of deltas). A safe way to collect changes
|
|
||||||
* is to store the `changes` or the `delta` object. Avoid storing the `transaction` object.
|
|
||||||
*
|
|
||||||
* @type {Array<{insert?: string | Array<any> | object | AbstractType<any>, retain?: number, delete?: number, attributes?: Object<string, any>}>}
|
|
||||||
*/
|
|
||||||
get delta () {
|
|
||||||
return this.changes.delta
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if a struct is added by this event.
|
* Check if a struct is added by this event.
|
||||||
*
|
*
|
||||||
@@ -162,19 +78,11 @@ export class YEvent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a computed property. Note that this can only be safely computed during the
|
* @return {{added:Set<Item>,deleted:Set<Item>,delta:Array<{insert:Array<any>}|{delete:number}|{retain:number}>}}
|
||||||
* event call. Computing this property after other changes happened might result in
|
|
||||||
* unexpected behavior (incorrect computation of deltas). A safe way to collect changes
|
|
||||||
* is to store the `changes` or the `delta` object. Avoid storing the `transaction` object.
|
|
||||||
*
|
|
||||||
* @type {{added:Set<Item>,deleted:Set<Item>,keys:Map<string,{action:'add'|'update'|'delete',oldValue:any}>,delta:Array<{insert?:Array<any>|string, delete?:number, retain?:number}>}}
|
|
||||||
*/
|
*/
|
||||||
get changes () {
|
get changes () {
|
||||||
let changes = this._changes
|
let changes = this._changes
|
||||||
if (changes === null) {
|
if (changes === null) {
|
||||||
if (this.transaction.doc._transactionCleanups.length === 0) {
|
|
||||||
throw error.create(errorComputeChanges)
|
|
||||||
}
|
|
||||||
const target = this.target
|
const target = this.target
|
||||||
const added = set.create()
|
const added = set.create()
|
||||||
const deleted = set.create()
|
const deleted = set.create()
|
||||||
@@ -182,11 +90,12 @@ export class YEvent {
|
|||||||
* @type {Array<{insert:Array<any>}|{delete:number}|{retain:number}>}
|
* @type {Array<{insert:Array<any>}|{delete:number}|{retain:number}>}
|
||||||
*/
|
*/
|
||||||
const delta = []
|
const delta = []
|
||||||
|
/**
|
||||||
|
* @type {Map<string,{ action: 'add' | 'update' | 'delete', oldValue: any}>}
|
||||||
|
*/
|
||||||
|
const keys = new Map()
|
||||||
changes = {
|
changes = {
|
||||||
added,
|
added, deleted, delta, keys
|
||||||
deleted,
|
|
||||||
delta,
|
|
||||||
keys: this.keys
|
|
||||||
}
|
}
|
||||||
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target))
|
const changed = /** @type Set<string|null> */ (this.transaction.changed.get(target))
|
||||||
if (changed.has(null)) {
|
if (changed.has(null)) {
|
||||||
@@ -230,6 +139,46 @@ export class YEvent {
|
|||||||
packOp()
|
packOp()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
changed.forEach(key => {
|
||||||
|
if (key !== null) {
|
||||||
|
const item = /** @type {Item} */ (target._map.get(key))
|
||||||
|
/**
|
||||||
|
* @type {'delete' | 'add' | 'update'}
|
||||||
|
*/
|
||||||
|
let action
|
||||||
|
let oldValue
|
||||||
|
if (this.adds(item)) {
|
||||||
|
let prev = item.left
|
||||||
|
while (prev !== null && this.adds(prev)) {
|
||||||
|
prev = prev.left
|
||||||
|
}
|
||||||
|
if (this.deletes(item)) {
|
||||||
|
if (prev !== null && this.deletes(prev)) {
|
||||||
|
action = 'delete'
|
||||||
|
oldValue = array.last(prev.content.getContent())
|
||||||
|
} else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (prev !== null && this.deletes(prev)) {
|
||||||
|
action = 'update'
|
||||||
|
oldValue = array.last(prev.content.getContent())
|
||||||
|
} else {
|
||||||
|
action = 'add'
|
||||||
|
oldValue = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (this.deletes(item)) {
|
||||||
|
action = 'delete'
|
||||||
|
oldValue = array.last(/** @type {Item} */ item.content.getContent())
|
||||||
|
} else {
|
||||||
|
return // nop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
keys.set(key, { action, oldValue })
|
||||||
|
}
|
||||||
|
})
|
||||||
this._changes = changes
|
this._changes = changes
|
||||||
}
|
}
|
||||||
return /** @type {any} */ (changes)
|
return /** @type {any} */ (changes)
|
||||||
@@ -264,8 +213,8 @@ const getPathTo = (parent, child) => {
|
|||||||
let i = 0
|
let i = 0
|
||||||
let c = /** @type {AbstractType<any>} */ (child._item.parent)._start
|
let c = /** @type {AbstractType<any>} */ (child._item.parent)._start
|
||||||
while (c !== child._item && c !== null) {
|
while (c !== child._item && c !== null) {
|
||||||
if (!c.deleted && c.countable) {
|
if (!c.deleted) {
|
||||||
i += c.length
|
i++
|
||||||
}
|
}
|
||||||
c = c.right
|
c = c.right
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @module encoding
|
* @module encoding
|
||||||
*/
|
*
|
||||||
/*
|
|
||||||
* We use the first five bits in the info flag for determining the type of the struct.
|
* We use the first five bits in the info flag for determining the type of the struct.
|
||||||
*
|
*
|
||||||
* 0: GC
|
* 0: GC
|
||||||
@@ -16,6 +16,8 @@
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
findIndexSS,
|
findIndexSS,
|
||||||
|
writeID,
|
||||||
|
readID,
|
||||||
getState,
|
getState,
|
||||||
createID,
|
createID,
|
||||||
getStateVector,
|
getStateVector,
|
||||||
@@ -23,31 +25,16 @@ import {
|
|||||||
writeDeleteSet,
|
writeDeleteSet,
|
||||||
createDeleteSetFromStructStore,
|
createDeleteSetFromStructStore,
|
||||||
transact,
|
transact,
|
||||||
readItemContent,
|
readItem,
|
||||||
UpdateDecoderV1,
|
Doc, Transaction, GC, Item, StructStore, ID // eslint-disable-line
|
||||||
UpdateDecoderV2,
|
|
||||||
UpdateEncoderV1,
|
|
||||||
UpdateEncoderV2,
|
|
||||||
DSEncoderV2,
|
|
||||||
DSDecoderV1,
|
|
||||||
DSEncoderV1,
|
|
||||||
mergeUpdates,
|
|
||||||
mergeUpdatesV2,
|
|
||||||
Skip,
|
|
||||||
diffUpdateV2,
|
|
||||||
convertUpdateFormatV2ToV1,
|
|
||||||
DSDecoderV2, Doc, Transaction, GC, Item, StructStore // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
} from '../internals.js'
|
||||||
|
|
||||||
import * as encoding from 'lib0/encoding'
|
import * as encoding from 'lib0/encoding.js'
|
||||||
import * as decoding from 'lib0/decoding'
|
import * as decoding from 'lib0/decoding.js'
|
||||||
import * as binary from 'lib0/binary'
|
import * as binary from 'lib0/binary.js'
|
||||||
import * as map from 'lib0/map'
|
|
||||||
import * as math from 'lib0/math'
|
|
||||||
import * as array from 'lib0/array'
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {Array<GC|Item>} structs All structs by `client`
|
* @param {Array<GC|Item>} structs All structs by `client`
|
||||||
* @param {number} client
|
* @param {number} client
|
||||||
* @param {number} clock write structs starting with `ID(client,clock)`
|
* @param {number} clock write structs starting with `ID(client,clock)`
|
||||||
@@ -56,12 +43,10 @@ import * as array from 'lib0/array'
|
|||||||
*/
|
*/
|
||||||
const writeStructs = (encoder, structs, client, clock) => {
|
const writeStructs = (encoder, structs, client, clock) => {
|
||||||
// write first id
|
// write first id
|
||||||
clock = math.max(clock, structs[0].id.clock) // make sure the first id exists
|
|
||||||
const startNewStructs = findIndexSS(structs, clock)
|
const startNewStructs = findIndexSS(structs, clock)
|
||||||
// write # encoded structs
|
// write # encoded structs
|
||||||
encoding.writeVarUint(encoder.restEncoder, structs.length - startNewStructs)
|
encoding.writeVarUint(encoder, structs.length - startNewStructs)
|
||||||
encoder.writeClient(client)
|
writeID(encoder, createID(client, clock))
|
||||||
encoding.writeVarUint(encoder.restEncoder, clock)
|
|
||||||
const firstStruct = structs[startNewStructs]
|
const firstStruct = structs[startNewStructs]
|
||||||
// write first struct with an offset
|
// write first struct with an offset
|
||||||
firstStruct.write(encoder, clock - firstStruct.id.clock)
|
firstStruct.write(encoder, clock - firstStruct.id.clock)
|
||||||
@@ -71,7 +56,7 @@ const writeStructs = (encoder, structs, client, clock) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {StructStore} store
|
* @param {StructStore} store
|
||||||
* @param {Map<number,number>} _sm
|
* @param {Map<number,number>} _sm
|
||||||
*
|
*
|
||||||
@@ -87,112 +72,47 @@ export const writeClientsStructs = (encoder, store, _sm) => {
|
|||||||
sm.set(client, clock)
|
sm.set(client, clock)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
getStateVector(store).forEach((_clock, client) => {
|
getStateVector(store).forEach((clock, client) => {
|
||||||
if (!_sm.has(client)) {
|
if (!_sm.has(client)) {
|
||||||
sm.set(client, 0)
|
sm.set(client, 0)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
// write # states that were updated
|
// write # states that were updated
|
||||||
encoding.writeVarUint(encoder.restEncoder, sm.size)
|
encoding.writeVarUint(encoder, sm.size)
|
||||||
// Write items with higher client ids first
|
// Write items with higher client ids first
|
||||||
// This heavily improves the conflict algorithm.
|
// This heavily improves the conflict algorithm.
|
||||||
array.from(sm.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
Array.from(sm.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
||||||
writeStructs(encoder, /** @type {Array<GC|Item>} */ (store.clients.get(client)), client, clock)
|
// @ts-ignore
|
||||||
|
writeStructs(encoder, store.clients.get(client), client, clock)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder The decoder object to read data from.
|
* @param {decoding.Decoder} decoder The decoder object to read data from.
|
||||||
|
* @param {Map<number,Array<GC|Item>>} clientRefs
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @return {Map<number, { i: number, refs: Array<Item | GC> }>}
|
* @return {Map<number,Array<GC|Item>>}
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readClientsStructRefs = (decoder, doc) => {
|
export const readClientsStructRefs = (decoder, clientRefs, doc) => {
|
||||||
/**
|
const numOfStateUpdates = decoding.readVarUint(decoder)
|
||||||
* @type {Map<number, { i: number, refs: Array<Item | GC> }>}
|
|
||||||
*/
|
|
||||||
const clientRefs = map.create()
|
|
||||||
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
for (let i = 0; i < numOfStateUpdates; i++) {
|
for (let i = 0; i < numOfStateUpdates; i++) {
|
||||||
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
const numberOfStructs = decoding.readVarUint(decoder)
|
||||||
/**
|
/**
|
||||||
* @type {Array<GC|Item>}
|
* @type {Array<GC|Item>}
|
||||||
*/
|
*/
|
||||||
const refs = new Array(numberOfStructs)
|
const refs = []
|
||||||
const client = decoder.readClient()
|
let { client, clock } = readID(decoder)
|
||||||
let clock = decoding.readVarUint(decoder.restDecoder)
|
let info, struct
|
||||||
// const start = performance.now()
|
clientRefs.set(client, refs)
|
||||||
clientRefs.set(client, { i: 0, refs })
|
|
||||||
for (let i = 0; i < numberOfStructs; i++) {
|
for (let i = 0; i < numberOfStructs; i++) {
|
||||||
const info = decoder.readInfo()
|
info = decoding.readUint8(decoder)
|
||||||
switch (binary.BITS5 & info) {
|
struct = (binary.BITS5 & info) === 0 ? new GC(createID(client, clock), decoding.readVarUint(decoder)) : readItem(decoder, createID(client, clock), info, doc)
|
||||||
case 0: { // GC
|
refs.push(struct)
|
||||||
const len = decoder.readLen()
|
clock += struct.length
|
||||||
refs[i] = new GC(createID(client, clock), len)
|
|
||||||
clock += len
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case 10: { // Skip Struct (nothing to apply)
|
|
||||||
// @todo we could reduce the amount of checks by adding Skip struct to clientRefs so we know that something is missing.
|
|
||||||
const len = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
refs[i] = new Skip(createID(client, clock), len)
|
|
||||||
clock += len
|
|
||||||
break
|
|
||||||
}
|
|
||||||
default: { // Item with content
|
|
||||||
/**
|
|
||||||
* The optimized implementation doesn't use any variables because inlining variables is faster.
|
|
||||||
* Below a non-optimized version is shown that implements the basic algorithm with
|
|
||||||
* a few comments
|
|
||||||
*/
|
|
||||||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
|
||||||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
|
||||||
// and we read the next string as parentYKey.
|
|
||||||
// It indicates how we store/retrieve parent from `y.share`
|
|
||||||
// @type {string|null}
|
|
||||||
const struct = new Item(
|
|
||||||
createID(client, clock),
|
|
||||||
null, // left
|
|
||||||
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
|
|
||||||
null, // right
|
|
||||||
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
|
|
||||||
cantCopyParentInfo ? (decoder.readParentInfo() ? doc.get(decoder.readString()) : decoder.readLeftID()) : null, // parent
|
|
||||||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
|
||||||
readItemContent(decoder, info) // item content
|
|
||||||
)
|
|
||||||
/* A non-optimized implementation of the above algorithm:
|
|
||||||
|
|
||||||
// The item that was originally to the left of this item.
|
|
||||||
const origin = (info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null
|
|
||||||
// The item that was originally to the right of this item.
|
|
||||||
const rightOrigin = (info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null
|
|
||||||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
|
||||||
const hasParentYKey = cantCopyParentInfo ? decoder.readParentInfo() : false
|
|
||||||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
|
||||||
// and we read the next string as parentYKey.
|
|
||||||
// It indicates how we store/retrieve parent from `y.share`
|
|
||||||
// @type {string|null}
|
|
||||||
const parentYKey = cantCopyParentInfo && hasParentYKey ? decoder.readString() : null
|
|
||||||
|
|
||||||
const struct = new Item(
|
|
||||||
createID(client, clock),
|
|
||||||
null, // left
|
|
||||||
origin, // origin
|
|
||||||
null, // right
|
|
||||||
rightOrigin, // right origin
|
|
||||||
cantCopyParentInfo && !hasParentYKey ? decoder.readLeftID() : (parentYKey !== null ? doc.get(parentYKey) : null), // parent
|
|
||||||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
|
||||||
readItemContent(decoder, info) // item content
|
|
||||||
)
|
|
||||||
*/
|
|
||||||
refs[i] = struct
|
|
||||||
clock += struct.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// console.log('time to read: ', performance.now() - start) // @todo remove
|
|
||||||
}
|
}
|
||||||
return clientRefs
|
return clientRefs
|
||||||
}
|
}
|
||||||
@@ -211,155 +131,98 @@ export const readClientsStructRefs = (decoder, doc) => {
|
|||||||
* then we start emptying the stack.
|
* then we start emptying the stack.
|
||||||
*
|
*
|
||||||
* It is not possible to have circles: i.e. struct1 (from client1) depends on struct2 (from client2)
|
* It is not possible to have circles: i.e. struct1 (from client1) depends on struct2 (from client2)
|
||||||
* depends on struct3 (from client1). Therefore the max stack size is equal to `structReaders.length`.
|
* depends on struct3 (from client1). Therefore the max stack size is eqaul to `structReaders.length`.
|
||||||
*
|
*
|
||||||
* This method is implemented in a way so that we can resume computation if this update
|
* This method is implemented in a way so that we can resume computation if this update
|
||||||
* causally depends on another update.
|
* causally depends on another update.
|
||||||
*
|
*
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
* @param {StructStore} store
|
* @param {StructStore} store
|
||||||
* @param {Map<number, { i: number, refs: (GC | Item)[] }>} clientsStructRefs
|
|
||||||
* @return { null | { update: Uint8Array, missing: Map<number,number> } }
|
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
const integrateStructs = (transaction, store, clientsStructRefs) => {
|
const resumeStructIntegration = (transaction, store) => {
|
||||||
/**
|
const stack = store.pendingStack
|
||||||
* @type {Array<Item | GC>}
|
const clientsStructRefs = store.pendingClientsStructRefs
|
||||||
*/
|
|
||||||
const stack = []
|
|
||||||
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
|
// sort them so that we take the higher id first, in case of conflicts the lower id will probably not conflict with the id from the higher user.
|
||||||
let clientsStructRefsIds = array.from(clientsStructRefs.keys()).sort((a, b) => a - b)
|
const clientsStructRefsIds = Array.from(clientsStructRefs.keys()).sort((a, b) => a - b)
|
||||||
if (clientsStructRefsIds.length === 0) {
|
let curStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
||||||
return null
|
|
||||||
}
|
|
||||||
const getNextStructTarget = () => {
|
|
||||||
if (clientsStructRefsIds.length === 0) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
let nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
|
||||||
while (nextStructsTarget.refs.length === nextStructsTarget.i) {
|
|
||||||
clientsStructRefsIds.pop()
|
|
||||||
if (clientsStructRefsIds.length > 0) {
|
|
||||||
nextStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
|
||||||
} else {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nextStructsTarget
|
|
||||||
}
|
|
||||||
let curStructsTarget = getNextStructTarget()
|
|
||||||
if (curStructsTarget === null) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {StructStore}
|
|
||||||
*/
|
|
||||||
const restStructs = new StructStore()
|
|
||||||
const missingSV = new Map()
|
|
||||||
/**
|
|
||||||
* @param {number} client
|
|
||||||
* @param {number} clock
|
|
||||||
*/
|
|
||||||
const updateMissingSv = (client, clock) => {
|
|
||||||
const mclock = missingSV.get(client)
|
|
||||||
if (mclock == null || mclock > clock) {
|
|
||||||
missingSV.set(client, clock)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @type {GC|Item}
|
|
||||||
*/
|
|
||||||
let stackHead = /** @type {any} */ (curStructsTarget).refs[/** @type {any} */ (curStructsTarget).i++]
|
|
||||||
// caching the state because it is used very often
|
|
||||||
const state = new Map()
|
|
||||||
|
|
||||||
const addStackToRestSS = () => {
|
|
||||||
for (const item of stack) {
|
|
||||||
const client = item.id.client
|
|
||||||
const inapplicableItems = clientsStructRefs.get(client)
|
|
||||||
if (inapplicableItems) {
|
|
||||||
// decrement because we weren't able to apply previous operation
|
|
||||||
inapplicableItems.i--
|
|
||||||
restStructs.clients.set(client, inapplicableItems.refs.slice(inapplicableItems.i))
|
|
||||||
clientsStructRefs.delete(client)
|
|
||||||
inapplicableItems.i = 0
|
|
||||||
inapplicableItems.refs = []
|
|
||||||
} else {
|
|
||||||
// item was the last item on clientsStructRefs and the field was already cleared. Add item to restStructs and continue
|
|
||||||
restStructs.clients.set(client, [item])
|
|
||||||
}
|
|
||||||
// remove client from clientsStructRefsIds to prevent users from applying the same update again
|
|
||||||
clientsStructRefsIds = clientsStructRefsIds.filter(c => c !== client)
|
|
||||||
}
|
|
||||||
stack.length = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// iterate over all struct readers until we are done
|
// iterate over all struct readers until we are done
|
||||||
while (true) {
|
while (stack.length !== 0 || clientsStructRefsIds.length > 0) {
|
||||||
if (stackHead.constructor !== Skip) {
|
if (stack.length === 0) {
|
||||||
const localClock = map.setIfUndefined(state, stackHead.id.client, () => getState(store, stackHead.id.client))
|
// take any first struct from clientsStructRefs and put it on the stack
|
||||||
const offset = localClock - stackHead.id.clock
|
if (curStructsTarget.i < curStructsTarget.refs.length) {
|
||||||
if (offset < 0) {
|
stack.push(curStructsTarget.refs[curStructsTarget.i++])
|
||||||
// update from the same client is missing
|
|
||||||
stack.push(stackHead)
|
|
||||||
updateMissingSv(stackHead.id.client, stackHead.id.clock - 1)
|
|
||||||
// hid a dead wall, add all items from stack to restSS
|
|
||||||
addStackToRestSS()
|
|
||||||
} else {
|
} else {
|
||||||
const missing = stackHead.getMissing(transaction, store)
|
clientsStructRefsIds.pop()
|
||||||
if (missing !== null) {
|
if (clientsStructRefsIds.length > 0) {
|
||||||
stack.push(stackHead)
|
curStructsTarget = /** @type {{i:number,refs:Array<GC|Item>}} */ (clientsStructRefs.get(clientsStructRefsIds[clientsStructRefsIds.length - 1]))
|
||||||
// get the struct reader that has the missing struct
|
}
|
||||||
/**
|
continue
|
||||||
* @type {{ refs: Array<GC|Item>, i: number }}
|
}
|
||||||
*/
|
}
|
||||||
const structRefs = clientsStructRefs.get(/** @type {number} */ (missing)) || { refs: [], i: 0 }
|
const ref = stack[stack.length - 1]
|
||||||
if (structRefs.refs.length === structRefs.i) {
|
const refID = ref.id
|
||||||
// This update message causally depends on another update message that doesn't exist yet
|
const client = refID.client
|
||||||
updateMissingSv(/** @type {number} */ (missing), getState(store, missing))
|
const refClock = refID.clock
|
||||||
addStackToRestSS()
|
const localClock = getState(store, client)
|
||||||
} else {
|
const offset = refClock < localClock ? localClock - refClock : 0
|
||||||
stackHead = structRefs.refs[structRefs.i++]
|
if (refClock + offset !== localClock) {
|
||||||
continue
|
// A previous message from this client is missing
|
||||||
}
|
// check if there is a pending structRef with a smaller clock and switch them
|
||||||
} else if (offset === 0 || offset < stackHead.length) {
|
const structRefs = clientsStructRefs.get(client) || { refs: [], i: 0 }
|
||||||
// all fine, apply the stackhead
|
if (structRefs.refs.length !== structRefs.i) {
|
||||||
stackHead.integrate(transaction, offset)
|
const r = structRefs.refs[structRefs.i]
|
||||||
state.set(stackHead.id.client, stackHead.id.clock + stackHead.length)
|
if (r.id.clock < refClock) {
|
||||||
|
// put ref with smaller clock on stack instead and continue
|
||||||
|
structRefs.refs[structRefs.i] = ref
|
||||||
|
stack[stack.length - 1] = r
|
||||||
|
// sort the set because this approach might bring the list out of order
|
||||||
|
structRefs.refs = structRefs.refs.slice(structRefs.i).sort((r1, r2) => r1.id.clock - r2.id.clock)
|
||||||
|
structRefs.i = 0
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// wait until missing struct is available
|
||||||
|
return
|
||||||
}
|
}
|
||||||
// iterate to next stackHead
|
const missing = ref.getMissing(transaction, store)
|
||||||
if (stack.length > 0) {
|
if (missing !== null) {
|
||||||
stackHead = /** @type {GC|Item} */ (stack.pop())
|
// get the struct reader that has the missing struct
|
||||||
} else if (curStructsTarget !== null && curStructsTarget.i < curStructsTarget.refs.length) {
|
const structRefs = clientsStructRefs.get(missing) || { refs: [], i: 0 }
|
||||||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++])
|
if (structRefs.refs.length === structRefs.i) {
|
||||||
} else {
|
// This update message causally depends on another update message.
|
||||||
curStructsTarget = getNextStructTarget()
|
return
|
||||||
if (curStructsTarget === null) {
|
|
||||||
// we are done!
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
stackHead = /** @type {GC|Item} */ (curStructsTarget.refs[curStructsTarget.i++])
|
|
||||||
}
|
}
|
||||||
|
stack.push(structRefs.refs[structRefs.i++])
|
||||||
|
} else {
|
||||||
|
if (offset < ref.length) {
|
||||||
|
ref.integrate(transaction, offset)
|
||||||
|
}
|
||||||
|
stack.pop()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (restStructs.clients.size > 0) {
|
store.pendingClientsStructRefs.clear()
|
||||||
const encoder = new UpdateEncoderV2()
|
|
||||||
writeClientsStructs(encoder, restStructs, new Map())
|
|
||||||
// write empty deleteset
|
|
||||||
// writeDeleteSet(encoder, new DeleteSet())
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, 0) // => no need for an extra function call, just write 0 deletes
|
|
||||||
return { missing: missingSV, update: encoder.toUint8Array() }
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {Transaction} transaction
|
||||||
|
* @param {StructStore} store
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
* @function
|
||||||
|
*/
|
||||||
|
export const tryResumePendingDeleteReaders = (transaction, store) => {
|
||||||
|
const pendingReaders = store.pendingDeleteReaders
|
||||||
|
store.pendingDeleteReaders = []
|
||||||
|
for (let i = 0; i < pendingReaders.length; i++) {
|
||||||
|
readAndApplyDeleteSet(pendingReaders[i], transaction, store)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {Transaction} transaction
|
* @param {Transaction} transaction
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
@@ -368,116 +231,83 @@ const integrateStructs = (transaction, store, clientsStructRefs) => {
|
|||||||
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState)
|
export const writeStructsFromTransaction = (encoder, transaction) => writeClientsStructs(encoder, transaction.doc.store, transaction.beforeState)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read and apply a document update.
|
* @param {StructStore} store
|
||||||
*
|
* @param {Map<number, Array<GC|Item>>} clientsStructsRefs
|
||||||
* This function has the same effect as `applyUpdate` but accepts a decoder.
|
|
||||||
*
|
|
||||||
* @param {decoding.Decoder} decoder
|
|
||||||
* @param {Doc} ydoc
|
|
||||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} [structDecoder]
|
|
||||||
*
|
*
|
||||||
|
* @private
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readUpdateV2 = (decoder, ydoc, transactionOrigin, structDecoder = new UpdateDecoderV2(decoder)) =>
|
const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
|
||||||
transact(ydoc, transaction => {
|
const pendingClientsStructRefs = store.pendingClientsStructRefs
|
||||||
// force that transaction.local is set to non-local
|
for (const [client, structRefs] of clientsStructsRefs) {
|
||||||
transaction.local = false
|
const pendingStructRefs = pendingClientsStructRefs.get(client)
|
||||||
let retry = false
|
if (pendingStructRefs === undefined) {
|
||||||
const doc = transaction.doc
|
pendingClientsStructRefs.set(client, { refs: structRefs, i: 0 })
|
||||||
const store = doc.store
|
|
||||||
// let start = performance.now()
|
|
||||||
const ss = readClientsStructRefs(structDecoder, doc)
|
|
||||||
// console.log('time to read structs: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
// console.log('time to merge: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
const restStructs = integrateStructs(transaction, store, ss)
|
|
||||||
const pending = store.pendingStructs
|
|
||||||
if (pending) {
|
|
||||||
// check if we can apply something
|
|
||||||
for (const [client, clock] of pending.missing) {
|
|
||||||
if (clock < getState(store, client)) {
|
|
||||||
retry = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (restStructs) {
|
|
||||||
// merge restStructs into store.pending
|
|
||||||
for (const [client, clock] of restStructs.missing) {
|
|
||||||
const mclock = pending.missing.get(client)
|
|
||||||
if (mclock == null || mclock > clock) {
|
|
||||||
pending.missing.set(client, clock)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pending.update = mergeUpdatesV2([pending.update, restStructs.update])
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
store.pendingStructs = restStructs
|
// merge into existing structRefs
|
||||||
}
|
const merged = pendingStructRefs.i > 0 ? pendingStructRefs.refs.slice(pendingStructRefs.i) : pendingStructRefs.refs
|
||||||
// console.log('time to integrate: ', performance.now() - start) // @todo remove
|
for (let i = 0; i < structRefs.length; i++) {
|
||||||
// start = performance.now()
|
merged.push(structRefs[i])
|
||||||
const dsRest = readAndApplyDeleteSet(structDecoder, transaction, store)
|
|
||||||
if (store.pendingDs) {
|
|
||||||
// @todo we could make a lower-bound state-vector check as we do above
|
|
||||||
const pendingDSUpdate = new UpdateDecoderV2(decoding.createDecoder(store.pendingDs))
|
|
||||||
decoding.readVarUint(pendingDSUpdate.restDecoder) // read 0 structs, because we only encode deletes in pendingdsupdate
|
|
||||||
const dsRest2 = readAndApplyDeleteSet(pendingDSUpdate, transaction, store)
|
|
||||||
if (dsRest && dsRest2) {
|
|
||||||
// case 1: ds1 != null && ds2 != null
|
|
||||||
store.pendingDs = mergeUpdatesV2([dsRest, dsRest2])
|
|
||||||
} else {
|
|
||||||
// case 2: ds1 != null
|
|
||||||
// case 3: ds2 != null
|
|
||||||
// case 4: ds1 == null && ds2 == null
|
|
||||||
store.pendingDs = dsRest || dsRest2
|
|
||||||
}
|
}
|
||||||
} else {
|
pendingStructRefs.i = 0
|
||||||
// Either dsRest == null && pendingDs == null OR dsRest != null
|
pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock)
|
||||||
store.pendingDs = dsRest
|
|
||||||
}
|
}
|
||||||
// console.log('time to cleanup: ', performance.now() - start) // @todo remove
|
}
|
||||||
// start = performance.now()
|
|
||||||
|
|
||||||
// console.log('time to resume delete readers: ', performance.now() - start) // @todo remove
|
|
||||||
// start = performance.now()
|
|
||||||
if (retry) {
|
|
||||||
const update = /** @type {{update: Uint8Array}} */ (store.pendingStructs).update
|
|
||||||
store.pendingStructs = null
|
|
||||||
applyUpdateV2(transaction.doc, update)
|
|
||||||
}
|
|
||||||
}, transactionOrigin, false)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read and apply a document update.
|
|
||||||
*
|
|
||||||
* This function has the same effect as `applyUpdate` but accepts a decoder.
|
|
||||||
*
|
|
||||||
* @param {decoding.Decoder} decoder
|
|
||||||
* @param {Doc} ydoc
|
|
||||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
|
||||||
*
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const readUpdate = (decoder, ydoc, transactionOrigin) => readUpdateV2(decoder, ydoc, transactionOrigin, new UpdateDecoderV1(decoder))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
|
||||||
*
|
|
||||||
* This function has the same effect as `readUpdate` but accepts an Uint8Array instead of a Decoder.
|
|
||||||
*
|
|
||||||
* @param {Doc} ydoc
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
|
||||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
|
||||||
*
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = UpdateDecoderV2) => {
|
|
||||||
const decoder = decoding.createDecoder(update)
|
|
||||||
readUpdateV2(decoder, ydoc, transactionOrigin, new YDecoder(decoder))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Map<number,{refs:Array<GC|Item>,i:number}>} pendingClientsStructRefs
|
||||||
|
*/
|
||||||
|
const cleanupPendingStructs = pendingClientsStructRefs => {
|
||||||
|
// cleanup pendingClientsStructs if not fully finished
|
||||||
|
for (const [client, refs] of pendingClientsStructRefs) {
|
||||||
|
if (refs.i === refs.refs.length) {
|
||||||
|
pendingClientsStructRefs.delete(client)
|
||||||
|
} else {
|
||||||
|
refs.refs.splice(0, refs.i)
|
||||||
|
refs.i = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the next Item in a Decoder and fill this Item with the read data.
|
||||||
|
*
|
||||||
|
* This is called when data is received from a remote peer.
|
||||||
|
*
|
||||||
|
* @param {decoding.Decoder} decoder The decoder object to read data from.
|
||||||
|
* @param {Transaction} transaction
|
||||||
|
* @param {StructStore} store
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
* @function
|
||||||
|
*/
|
||||||
|
export const readStructs = (decoder, transaction, store) => {
|
||||||
|
const clientsStructRefs = new Map()
|
||||||
|
readClientsStructRefs(decoder, clientsStructRefs, transaction.doc)
|
||||||
|
mergeReadStructsIntoPendingReads(store, clientsStructRefs)
|
||||||
|
resumeStructIntegration(transaction, store)
|
||||||
|
cleanupPendingStructs(store.pendingClientsStructRefs)
|
||||||
|
tryResumePendingDeleteReaders(transaction, store)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read and apply a document update.
|
||||||
|
*
|
||||||
|
* This function has the same effect as `applyUpdate` but accepts an decoder.
|
||||||
|
*
|
||||||
|
* @param {decoding.Decoder} decoder
|
||||||
|
* @param {Doc} ydoc
|
||||||
|
* @param {any} [transactionOrigin] This will be stored on `transaction.origin` and `.on('update', (update, origin))`
|
||||||
|
*
|
||||||
|
* @function
|
||||||
|
*/
|
||||||
|
export const readUpdate = (decoder, ydoc, transactionOrigin) =>
|
||||||
|
transact(ydoc, transaction => {
|
||||||
|
readStructs(decoder, transaction, ydoc.store)
|
||||||
|
readAndApplyDeleteSet(decoder, transaction, ydoc.store)
|
||||||
|
}, transactionOrigin, false)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
* Apply a document update created by, for example, `y.on('update', update => ..)` or `update = encodeStateAsUpdate()`.
|
||||||
*
|
*
|
||||||
@@ -489,13 +319,14 @@ export const applyUpdateV2 = (ydoc, update, transactionOrigin, YDecoder = Update
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const applyUpdate = (ydoc, update, transactionOrigin) => applyUpdateV2(ydoc, update, transactionOrigin, UpdateDecoderV1)
|
export const applyUpdate = (ydoc, update, transactionOrigin) =>
|
||||||
|
readUpdate(decoding.createDecoder(update), ydoc, transactionOrigin)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
* Write all the document as a single update message. If you specify the state of the remote client (`targetStateVector`) it will
|
||||||
* only write the operations that are missing.
|
* only write the operations that are missing.
|
||||||
*
|
*
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
* @param {Map<number,number>} [targetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||||
*
|
*
|
||||||
@@ -514,60 +345,31 @@ export const writeStateAsUpdate = (encoder, doc, targetStateVector = new Map())
|
|||||||
*
|
*
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} [encoder]
|
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const encodeStateAsUpdateV2 = (doc, encodedTargetStateVector = new Uint8Array([0]), encoder = new UpdateEncoderV2()) => {
|
export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => {
|
||||||
const targetStateVector = decodeStateVector(encodedTargetStateVector)
|
const encoder = encoding.createEncoder()
|
||||||
|
const targetStateVector = encodedTargetStateVector == null ? new Map() : decodeStateVector(encodedTargetStateVector)
|
||||||
writeStateAsUpdate(encoder, doc, targetStateVector)
|
writeStateAsUpdate(encoder, doc, targetStateVector)
|
||||||
const updates = [encoder.toUint8Array()]
|
return encoding.toUint8Array(encoder)
|
||||||
// also add the pending updates (if there are any)
|
|
||||||
if (doc.store.pendingDs) {
|
|
||||||
updates.push(doc.store.pendingDs)
|
|
||||||
}
|
|
||||||
if (doc.store.pendingStructs) {
|
|
||||||
updates.push(diffUpdateV2(doc.store.pendingStructs.update, encodedTargetStateVector))
|
|
||||||
}
|
|
||||||
if (updates.length > 1) {
|
|
||||||
if (encoder.constructor === UpdateEncoderV1) {
|
|
||||||
return mergeUpdates(updates.map((update, i) => i === 0 ? update : convertUpdateFormatV2ToV1(update)))
|
|
||||||
} else if (encoder.constructor === UpdateEncoderV2) {
|
|
||||||
return mergeUpdatesV2(updates)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return updates[0]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Write all the document as a single update message that can be applied on the remote document. If you specify the state of the remote client (`targetState`) it will
|
|
||||||
* only write the operations that are missing.
|
|
||||||
*
|
|
||||||
* Use `writeStateAsUpdate` instead if you are working with lib0/encoding.js#Encoder
|
|
||||||
*
|
|
||||||
* @param {Doc} doc
|
|
||||||
* @param {Uint8Array} [encodedTargetStateVector] The state of the target that receives the update. Leave empty to write all known structs
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const encodeStateAsUpdate = (doc, encodedTargetStateVector) => encodeStateAsUpdateV2(doc, encodedTargetStateVector, new UpdateEncoderV1())
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read state vector from Decoder and return as Map
|
* Read state vector from Decoder and return as Map
|
||||||
*
|
*
|
||||||
* @param {DSDecoderV1 | DSDecoderV2} decoder
|
* @param {decoding.Decoder} decoder
|
||||||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const readStateVector = decoder => {
|
export const readStateVector = decoder => {
|
||||||
const ss = new Map()
|
const ss = new Map()
|
||||||
const ssLength = decoding.readVarUint(decoder.restDecoder)
|
const ssLength = decoding.readVarUint(decoder)
|
||||||
for (let i = 0; i < ssLength; i++) {
|
for (let i = 0; i < ssLength; i++) {
|
||||||
const client = decoding.readVarUint(decoder.restDecoder)
|
const client = decoding.readVarUint(decoder)
|
||||||
const clock = decoding.readVarUint(decoder.restDecoder)
|
const clock = decoding.readVarUint(decoder)
|
||||||
ss.set(client, clock)
|
ss.set(client, clock)
|
||||||
}
|
}
|
||||||
return ss
|
return ss
|
||||||
@@ -581,34 +383,28 @@ export const readStateVector = decoder => {
|
|||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
// export const decodeStateVectorV2 = decodedState => readStateVector(new DSDecoderV2(decoding.createDecoder(decodedState)))
|
export const decodeStateVector = decodedState => readStateVector(decoding.createDecoder(decodedState))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read decodedState and return State as Map.
|
* Write State Vector to `lib0/encoding.js#Encoder`.
|
||||||
*
|
*
|
||||||
* @param {Uint8Array} decodedState
|
* @param {encoding.Encoder} encoder
|
||||||
* @return {Map<number,number>} Maps `client` to the number next expected `clock` from that client.
|
|
||||||
*
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const decodeStateVector = decodedState => readStateVector(new DSDecoderV1(decoding.createDecoder(decodedState)))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
|
||||||
* @param {Map<number,number>} sv
|
* @param {Map<number,number>} sv
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const writeStateVector = (encoder, sv) => {
|
export const writeStateVector = (encoder, sv) => {
|
||||||
encoding.writeVarUint(encoder.restEncoder, sv.size)
|
encoding.writeVarUint(encoder, sv.size)
|
||||||
array.from(sv.entries()).sort((a, b) => b[0] - a[0]).forEach(([client, clock]) => {
|
sv.forEach((clock, client) => {
|
||||||
encoding.writeVarUint(encoder.restEncoder, client) // @todo use a special client decoder that is based on mapping
|
encoding.writeVarUint(encoder, client)
|
||||||
encoding.writeVarUint(encoder.restEncoder, clock)
|
encoding.writeVarUint(encoder, clock)
|
||||||
})
|
})
|
||||||
return encoder
|
return encoder
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {DSEncoderV1 | DSEncoderV2} encoder
|
* Write State Vector to `lib0/encoding.js#Encoder`.
|
||||||
|
*
|
||||||
|
* @param {encoding.Encoder} encoder
|
||||||
* @param {Doc} doc
|
* @param {Doc} doc
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
@@ -618,27 +414,13 @@ export const writeDocumentStateVector = (encoder, doc) => writeStateVector(encod
|
|||||||
/**
|
/**
|
||||||
* Encode State as Uint8Array.
|
* Encode State as Uint8Array.
|
||||||
*
|
*
|
||||||
* @param {Doc|Map<number,number>} doc
|
* @param {Doc} doc
|
||||||
* @param {DSEncoderV1 | DSEncoderV2} [encoder]
|
|
||||||
* @return {Uint8Array}
|
* @return {Uint8Array}
|
||||||
*
|
*
|
||||||
* @function
|
* @function
|
||||||
*/
|
*/
|
||||||
export const encodeStateVectorV2 = (doc, encoder = new DSEncoderV2()) => {
|
export const encodeStateVector = doc => {
|
||||||
if (doc instanceof Map) {
|
const encoder = encoding.createEncoder()
|
||||||
writeStateVector(encoder, doc)
|
writeDocumentStateVector(encoder, doc)
|
||||||
} else {
|
return encoding.toUint8Array(encoder)
|
||||||
writeDocumentStateVector(encoder, doc)
|
|
||||||
}
|
|
||||||
return encoder.toUint8Array()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Encode State as Uint8Array.
|
|
||||||
*
|
|
||||||
* @param {Doc|Map<number,number>} doc
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*
|
|
||||||
* @function
|
|
||||||
*/
|
|
||||||
export const encodeStateVector = doc => encodeStateVectorV2(doc, new DSEncoderV1())
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import { AbstractType, Item } from '../internals.js' // eslint-disable-line
|
import { AbstractType, Item } from '../internals.js' // eslint-disable-line
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,21 +0,0 @@
|
|||||||
import {
|
|
||||||
AbstractType // eslint-disable-line
|
|
||||||
} from '../internals.js'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convenient helper to log type information.
|
|
||||||
*
|
|
||||||
* Do not use in productive systems as the output can be immense!
|
|
||||||
*
|
|
||||||
* @param {AbstractType<any>} type
|
|
||||||
*/
|
|
||||||
export const logType = type => {
|
|
||||||
const res = []
|
|
||||||
let n = type._start
|
|
||||||
while (n) {
|
|
||||||
res.push(n)
|
|
||||||
n = n.right
|
|
||||||
}
|
|
||||||
console.log('Children: ', res)
|
|
||||||
console.log('Children content: ', res.filter(m => !m.deleted).map(m => m.content))
|
|
||||||
}
|
|
||||||
@@ -1,722 +0,0 @@
|
|||||||
import * as binary from 'lib0/binary'
|
|
||||||
import * as decoding from 'lib0/decoding'
|
|
||||||
import * as encoding from 'lib0/encoding'
|
|
||||||
import * as error from 'lib0/error'
|
|
||||||
import * as f from 'lib0/function'
|
|
||||||
import * as logging from 'lib0/logging'
|
|
||||||
import * as map from 'lib0/map'
|
|
||||||
import * as math from 'lib0/math'
|
|
||||||
import * as string from 'lib0/string'
|
|
||||||
|
|
||||||
import {
|
|
||||||
ContentAny,
|
|
||||||
ContentBinary,
|
|
||||||
ContentDeleted,
|
|
||||||
ContentDoc,
|
|
||||||
ContentEmbed,
|
|
||||||
ContentFormat,
|
|
||||||
ContentJSON,
|
|
||||||
ContentString,
|
|
||||||
ContentType,
|
|
||||||
createID,
|
|
||||||
decodeStateVector,
|
|
||||||
DSEncoderV1,
|
|
||||||
DSEncoderV2,
|
|
||||||
GC,
|
|
||||||
Item,
|
|
||||||
mergeDeleteSets,
|
|
||||||
readDeleteSet,
|
|
||||||
readItemContent,
|
|
||||||
Skip,
|
|
||||||
UpdateDecoderV1,
|
|
||||||
UpdateDecoderV2,
|
|
||||||
UpdateEncoderV1,
|
|
||||||
UpdateEncoderV2,
|
|
||||||
writeDeleteSet,
|
|
||||||
YXmlElement,
|
|
||||||
YXmlHook
|
|
||||||
} from '../internals.js'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
|
||||||
*/
|
|
||||||
function * lazyStructReaderGenerator (decoder) {
|
|
||||||
const numOfStateUpdates = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
for (let i = 0; i < numOfStateUpdates; i++) {
|
|
||||||
const numberOfStructs = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
const client = decoder.readClient()
|
|
||||||
let clock = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
for (let i = 0; i < numberOfStructs; i++) {
|
|
||||||
const info = decoder.readInfo()
|
|
||||||
// @todo use switch instead of ifs
|
|
||||||
if (info === 10) {
|
|
||||||
const len = decoding.readVarUint(decoder.restDecoder)
|
|
||||||
yield new Skip(createID(client, clock), len)
|
|
||||||
clock += len
|
|
||||||
} else if ((binary.BITS5 & info) !== 0) {
|
|
||||||
const cantCopyParentInfo = (info & (binary.BIT7 | binary.BIT8)) === 0
|
|
||||||
// If parent = null and neither left nor right are defined, then we know that `parent` is child of `y`
|
|
||||||
// and we read the next string as parentYKey.
|
|
||||||
// It indicates how we store/retrieve parent from `y.share`
|
|
||||||
// @type {string|null}
|
|
||||||
const struct = new Item(
|
|
||||||
createID(client, clock),
|
|
||||||
null, // left
|
|
||||||
(info & binary.BIT8) === binary.BIT8 ? decoder.readLeftID() : null, // origin
|
|
||||||
null, // right
|
|
||||||
(info & binary.BIT7) === binary.BIT7 ? decoder.readRightID() : null, // right origin
|
|
||||||
// @ts-ignore Force writing a string here.
|
|
||||||
cantCopyParentInfo ? (decoder.readParentInfo() ? decoder.readString() : decoder.readLeftID()) : null, // parent
|
|
||||||
cantCopyParentInfo && (info & binary.BIT6) === binary.BIT6 ? decoder.readString() : null, // parentSub
|
|
||||||
readItemContent(decoder, info) // item content
|
|
||||||
)
|
|
||||||
yield struct
|
|
||||||
clock += struct.length
|
|
||||||
} else {
|
|
||||||
const len = decoder.readLen()
|
|
||||||
yield new GC(createID(client, clock), len)
|
|
||||||
clock += len
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class LazyStructReader {
|
|
||||||
/**
|
|
||||||
* @param {UpdateDecoderV1 | UpdateDecoderV2} decoder
|
|
||||||
* @param {boolean} filterSkips
|
|
||||||
*/
|
|
||||||
constructor (decoder, filterSkips) {
|
|
||||||
this.gen = lazyStructReaderGenerator(decoder)
|
|
||||||
/**
|
|
||||||
* @type {null | Item | Skip | GC}
|
|
||||||
*/
|
|
||||||
this.curr = null
|
|
||||||
this.done = false
|
|
||||||
this.filterSkips = filterSkips
|
|
||||||
this.next()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {Item | GC | Skip |null}
|
|
||||||
*/
|
|
||||||
next () {
|
|
||||||
// ignore "Skip" structs
|
|
||||||
do {
|
|
||||||
this.curr = this.gen.next().value || null
|
|
||||||
} while (this.filterSkips && this.curr !== null && this.curr.constructor === Skip)
|
|
||||||
return this.curr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export const logUpdate = update => logUpdateV2(update, UpdateDecoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export const logUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
|
|
||||||
const structs = []
|
|
||||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
|
||||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
|
||||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
|
||||||
structs.push(curr)
|
|
||||||
}
|
|
||||||
logging.print('Structs: ', structs)
|
|
||||||
const ds = readDeleteSet(updateDecoder)
|
|
||||||
logging.print('DeleteSet: ', ds)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export const decodeUpdate = (update) => decodeUpdateV2(update, UpdateDecoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} [YDecoder]
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export const decodeUpdateV2 = (update, YDecoder = UpdateDecoderV2) => {
|
|
||||||
const structs = []
|
|
||||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
|
||||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
|
||||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
|
||||||
structs.push(curr)
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
structs,
|
|
||||||
ds: readDeleteSet(updateDecoder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class LazyStructWriter {
|
|
||||||
/**
|
|
||||||
* @param {UpdateEncoderV1 | UpdateEncoderV2} encoder
|
|
||||||
*/
|
|
||||||
constructor (encoder) {
|
|
||||||
this.currClient = 0
|
|
||||||
this.startClock = 0
|
|
||||||
this.written = 0
|
|
||||||
this.encoder = encoder
|
|
||||||
/**
|
|
||||||
* We want to write operations lazily, but also we need to know beforehand how many operations we want to write for each client.
|
|
||||||
*
|
|
||||||
* This kind of meta-information (#clients, #structs-per-client-written) is written to the restEncoder.
|
|
||||||
*
|
|
||||||
* We fragment the restEncoder and store a slice of it per-client until we know how many clients there are.
|
|
||||||
* When we flush (toUint8Array) we write the restEncoder using the fragments and the meta-information.
|
|
||||||
*
|
|
||||||
* @type {Array<{ written: number, restEncoder: Uint8Array }>}
|
|
||||||
*/
|
|
||||||
this.clientStructs = []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<Uint8Array>} updates
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
export const mergeUpdates = updates => mergeUpdatesV2(updates, UpdateDecoderV1, UpdateEncoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {typeof DSEncoderV1 | typeof DSEncoderV2} YEncoder
|
|
||||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
export const encodeStateVectorFromUpdateV2 = (update, YEncoder = DSEncoderV2, YDecoder = UpdateDecoderV2) => {
|
|
||||||
const encoder = new YEncoder()
|
|
||||||
const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false)
|
|
||||||
let curr = updateDecoder.curr
|
|
||||||
if (curr !== null) {
|
|
||||||
let size = 0
|
|
||||||
let currClient = curr.id.client
|
|
||||||
let stopCounting = curr.id.clock !== 0 // must start at 0
|
|
||||||
let currClock = stopCounting ? 0 : curr.id.clock + curr.length
|
|
||||||
for (; curr !== null; curr = updateDecoder.next()) {
|
|
||||||
if (currClient !== curr.id.client) {
|
|
||||||
if (currClock !== 0) {
|
|
||||||
size++
|
|
||||||
// We found a new client
|
|
||||||
// write what we have to the encoder
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, currClient)
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, currClock)
|
|
||||||
}
|
|
||||||
currClient = curr.id.client
|
|
||||||
currClock = 0
|
|
||||||
stopCounting = curr.id.clock !== 0
|
|
||||||
}
|
|
||||||
// we ignore skips
|
|
||||||
if (curr.constructor === Skip) {
|
|
||||||
stopCounting = true
|
|
||||||
}
|
|
||||||
if (!stopCounting) {
|
|
||||||
currClock = curr.id.clock + curr.length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// write what we have
|
|
||||||
if (currClock !== 0) {
|
|
||||||
size++
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, currClient)
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, currClock)
|
|
||||||
}
|
|
||||||
// prepend the size of the state vector
|
|
||||||
const enc = encoding.createEncoder()
|
|
||||||
encoding.writeVarUint(enc, size)
|
|
||||||
encoding.writeBinaryEncoder(enc, encoder.restEncoder)
|
|
||||||
encoder.restEncoder = enc
|
|
||||||
return encoder.toUint8Array()
|
|
||||||
} else {
|
|
||||||
encoding.writeVarUint(encoder.restEncoder, 0)
|
|
||||||
return encoder.toUint8Array()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
export const encodeStateVectorFromUpdate = update => encodeStateVectorFromUpdateV2(update, DSEncoderV1, UpdateDecoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} YDecoder
|
|
||||||
* @return {{ from: Map<number,number>, to: Map<number,number> }}
|
|
||||||
*/
|
|
||||||
export const parseUpdateMetaV2 = (update, YDecoder = UpdateDecoderV2) => {
|
|
||||||
/**
|
|
||||||
* @type {Map<number, number>}
|
|
||||||
*/
|
|
||||||
const from = new Map()
|
|
||||||
/**
|
|
||||||
* @type {Map<number, number>}
|
|
||||||
*/
|
|
||||||
const to = new Map()
|
|
||||||
const updateDecoder = new LazyStructReader(new YDecoder(decoding.createDecoder(update)), false)
|
|
||||||
let curr = updateDecoder.curr
|
|
||||||
if (curr !== null) {
|
|
||||||
let currClient = curr.id.client
|
|
||||||
let currClock = curr.id.clock
|
|
||||||
// write the beginning to `from`
|
|
||||||
from.set(currClient, currClock)
|
|
||||||
for (; curr !== null; curr = updateDecoder.next()) {
|
|
||||||
if (currClient !== curr.id.client) {
|
|
||||||
// We found a new client
|
|
||||||
// write the end to `to`
|
|
||||||
to.set(currClient, currClock)
|
|
||||||
// write the beginning to `from`
|
|
||||||
from.set(curr.id.client, curr.id.clock)
|
|
||||||
// update currClient
|
|
||||||
currClient = curr.id.client
|
|
||||||
}
|
|
||||||
currClock = curr.id.clock + curr.length
|
|
||||||
}
|
|
||||||
// write the end to `to`
|
|
||||||
to.set(currClient, currClock)
|
|
||||||
}
|
|
||||||
return { from, to }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @return {{ from: Map<number,number>, to: Map<number,number> }}
|
|
||||||
*/
|
|
||||||
export const parseUpdateMeta = update => parseUpdateMetaV2(update, UpdateDecoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method is intended to slice any kind of struct and retrieve the right part.
|
|
||||||
* It does not handle side-effects, so it should only be used by the lazy-encoder.
|
|
||||||
*
|
|
||||||
* @param {Item | GC | Skip} left
|
|
||||||
* @param {number} diff
|
|
||||||
* @return {Item | GC}
|
|
||||||
*/
|
|
||||||
const sliceStruct = (left, diff) => {
|
|
||||||
if (left.constructor === GC) {
|
|
||||||
const { client, clock } = left.id
|
|
||||||
return new GC(createID(client, clock + diff), left.length - diff)
|
|
||||||
} else if (left.constructor === Skip) {
|
|
||||||
const { client, clock } = left.id
|
|
||||||
return new Skip(createID(client, clock + diff), left.length - diff)
|
|
||||||
} else {
|
|
||||||
const leftItem = /** @type {Item} */ (left)
|
|
||||||
const { client, clock } = leftItem.id
|
|
||||||
return new Item(
|
|
||||||
createID(client, clock + diff),
|
|
||||||
null,
|
|
||||||
createID(client, clock + diff - 1),
|
|
||||||
null,
|
|
||||||
leftItem.rightOrigin,
|
|
||||||
leftItem.parent,
|
|
||||||
leftItem.parentSub,
|
|
||||||
leftItem.content.splice(diff)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* This function works similarly to `readUpdateV2`.
|
|
||||||
*
|
|
||||||
* @param {Array<Uint8Array>} updates
|
|
||||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
|
||||||
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
|
||||||
* @return {Uint8Array}
|
|
||||||
*/
|
|
||||||
export const mergeUpdatesV2 = (updates, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
|
||||||
if (updates.length === 1) {
|
|
||||||
return updates[0]
|
|
||||||
}
|
|
||||||
const updateDecoders = updates.map(update => new YDecoder(decoding.createDecoder(update)))
|
|
||||||
let lazyStructDecoders = updateDecoders.map(decoder => new LazyStructReader(decoder, true))
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @todo we don't need offset because we always slice before
|
|
||||||
* @type {null | { struct: Item | GC | Skip, offset: number }}
|
|
||||||
*/
|
|
||||||
let currWrite = null
|
|
||||||
|
|
||||||
const updateEncoder = new YEncoder()
|
|
||||||
// write structs lazily
|
|
||||||
const lazyStructEncoder = new LazyStructWriter(updateEncoder)
|
|
||||||
|
|
||||||
// Note: We need to ensure that all lazyStructDecoders are fully consumed
|
|
||||||
// Note: Should merge document updates whenever possible - even from different updates
|
|
||||||
// Note: Should handle that some operations cannot be applied yet ()
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
// Write higher clients first ⇒ sort by clientID & clock and remove decoders without content
|
|
||||||
lazyStructDecoders = lazyStructDecoders.filter(dec => dec.curr !== null)
|
|
||||||
lazyStructDecoders.sort(
|
|
||||||
/** @type {function(any,any):number} */ (dec1, dec2) => {
|
|
||||||
if (dec1.curr.id.client === dec2.curr.id.client) {
|
|
||||||
const clockDiff = dec1.curr.id.clock - dec2.curr.id.clock
|
|
||||||
if (clockDiff === 0) {
|
|
||||||
// @todo remove references to skip since the structDecoders must filter Skips.
|
|
||||||
return dec1.curr.constructor === dec2.curr.constructor
|
|
||||||
? 0
|
|
||||||
: dec1.curr.constructor === Skip ? 1 : -1 // we are filtering skips anyway.
|
|
||||||
} else {
|
|
||||||
return clockDiff
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return dec2.curr.id.client - dec1.curr.id.client
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if (lazyStructDecoders.length === 0) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
const currDecoder = lazyStructDecoders[0]
|
|
||||||
// write from currDecoder until the next operation is from another client or if filler-struct
|
|
||||||
// then we need to reorder the decoders and find the next operation to write
|
|
||||||
const firstClient = /** @type {Item | GC} */ (currDecoder.curr).id.client
|
|
||||||
|
|
||||||
if (currWrite !== null) {
|
|
||||||
let curr = /** @type {Item | GC | null} */ (currDecoder.curr)
|
|
||||||
let iterated = false
|
|
||||||
|
|
||||||
// iterate until we find something that we haven't written already
|
|
||||||
// remember: first the high client-ids are written
|
|
||||||
while (curr !== null && curr.id.clock + curr.length <= currWrite.struct.id.clock + currWrite.struct.length && curr.id.client >= currWrite.struct.id.client) {
|
|
||||||
curr = currDecoder.next()
|
|
||||||
iterated = true
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
curr === null || // current decoder is empty
|
|
||||||
curr.id.client !== firstClient || // check whether there is another decoder that has has updates from `firstClient`
|
|
||||||
(iterated && curr.id.clock > currWrite.struct.id.clock + currWrite.struct.length) // the above while loop was used and we are potentially missing updates
|
|
||||||
) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (firstClient !== currWrite.struct.id.client) {
|
|
||||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
|
||||||
currWrite = { struct: curr, offset: 0 }
|
|
||||||
currDecoder.next()
|
|
||||||
} else {
|
|
||||||
if (currWrite.struct.id.clock + currWrite.struct.length < curr.id.clock) {
|
|
||||||
// @todo write currStruct & set currStruct = Skip(clock = currStruct.id.clock + currStruct.length, length = curr.id.clock - self.clock)
|
|
||||||
if (currWrite.struct.constructor === Skip) {
|
|
||||||
// extend existing skip
|
|
||||||
currWrite.struct.length = curr.id.clock + curr.length - currWrite.struct.id.clock
|
|
||||||
} else {
|
|
||||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
|
||||||
const diff = curr.id.clock - currWrite.struct.id.clock - currWrite.struct.length
|
|
||||||
/**
|
|
||||||
* @type {Skip}
|
|
||||||
*/
|
|
||||||
const struct = new Skip(createID(firstClient, currWrite.struct.id.clock + currWrite.struct.length), diff)
|
|
||||||
currWrite = { struct, offset: 0 }
|
|
||||||
}
|
|
||||||
} else { // if (currWrite.struct.id.clock + currWrite.struct.length >= curr.id.clock) {
|
|
||||||
const diff = currWrite.struct.id.clock + currWrite.struct.length - curr.id.clock
|
|
||||||
if (diff > 0) {
|
|
||||||
if (currWrite.struct.constructor === Skip) {
|
|
||||||
// prefer to slice Skip because the other struct might contain more information
|
|
||||||
currWrite.struct.length -= diff
|
|
||||||
} else {
|
|
||||||
curr = sliceStruct(curr, diff)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!currWrite.struct.mergeWith(/** @type {any} */ (curr))) {
|
|
||||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
|
||||||
currWrite = { struct: curr, offset: 0 }
|
|
||||||
currDecoder.next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
currWrite = { struct: /** @type {Item | GC} */ (currDecoder.curr), offset: 0 }
|
|
||||||
currDecoder.next()
|
|
||||||
}
|
|
||||||
for (
|
|
||||||
let next = currDecoder.curr;
|
|
||||||
next !== null && next.id.client === firstClient && next.id.clock === currWrite.struct.id.clock + currWrite.struct.length && next.constructor !== Skip;
|
|
||||||
next = currDecoder.next()
|
|
||||||
) {
|
|
||||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
|
||||||
currWrite = { struct: next, offset: 0 }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (currWrite !== null) {
|
|
||||||
writeStructToLazyStructWriter(lazyStructEncoder, currWrite.struct, currWrite.offset)
|
|
||||||
currWrite = null
|
|
||||||
}
|
|
||||||
finishLazyStructWriting(lazyStructEncoder)
|
|
||||||
|
|
||||||
const dss = updateDecoders.map(decoder => readDeleteSet(decoder))
|
|
||||||
const ds = mergeDeleteSets(dss)
|
|
||||||
writeDeleteSet(updateEncoder, ds)
|
|
||||||
return updateEncoder.toUint8Array()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {Uint8Array} sv
|
|
||||||
* @param {typeof UpdateDecoderV1 | typeof UpdateDecoderV2} [YDecoder]
|
|
||||||
* @param {typeof UpdateEncoderV1 | typeof UpdateEncoderV2} [YEncoder]
|
|
||||||
*/
|
|
||||||
export const diffUpdateV2 = (update, sv, YDecoder = UpdateDecoderV2, YEncoder = UpdateEncoderV2) => {
|
|
||||||
const state = decodeStateVector(sv)
|
|
||||||
const encoder = new YEncoder()
|
|
||||||
const lazyStructWriter = new LazyStructWriter(encoder)
|
|
||||||
const decoder = new YDecoder(decoding.createDecoder(update))
|
|
||||||
const reader = new LazyStructReader(decoder, false)
|
|
||||||
while (reader.curr) {
|
|
||||||
const curr = reader.curr
|
|
||||||
const currClient = curr.id.client
|
|
||||||
const svClock = state.get(currClient) || 0
|
|
||||||
if (reader.curr.constructor === Skip) {
|
|
||||||
// the first written struct shouldn't be a skip
|
|
||||||
reader.next()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (curr.id.clock + curr.length > svClock) {
|
|
||||||
writeStructToLazyStructWriter(lazyStructWriter, curr, math.max(svClock - curr.id.clock, 0))
|
|
||||||
reader.next()
|
|
||||||
while (reader.curr && reader.curr.id.client === currClient) {
|
|
||||||
writeStructToLazyStructWriter(lazyStructWriter, reader.curr, 0)
|
|
||||||
reader.next()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// read until something new comes up
|
|
||||||
while (reader.curr && reader.curr.id.client === currClient && reader.curr.id.clock + reader.curr.length <= svClock) {
|
|
||||||
reader.next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finishLazyStructWriting(lazyStructWriter)
|
|
||||||
// write ds
|
|
||||||
const ds = readDeleteSet(decoder)
|
|
||||||
writeDeleteSet(encoder, ds)
|
|
||||||
return encoder.toUint8Array()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {Uint8Array} sv
|
|
||||||
*/
|
|
||||||
export const diffUpdate = (update, sv) => diffUpdateV2(update, sv, UpdateDecoderV1, UpdateEncoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {LazyStructWriter} lazyWriter
|
|
||||||
*/
|
|
||||||
const flushLazyStructWriter = lazyWriter => {
|
|
||||||
if (lazyWriter.written > 0) {
|
|
||||||
lazyWriter.clientStructs.push({ written: lazyWriter.written, restEncoder: encoding.toUint8Array(lazyWriter.encoder.restEncoder) })
|
|
||||||
lazyWriter.encoder.restEncoder = encoding.createEncoder()
|
|
||||||
lazyWriter.written = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {LazyStructWriter} lazyWriter
|
|
||||||
* @param {Item | GC} struct
|
|
||||||
* @param {number} offset
|
|
||||||
*/
|
|
||||||
const writeStructToLazyStructWriter = (lazyWriter, struct, offset) => {
|
|
||||||
// flush curr if we start another client
|
|
||||||
if (lazyWriter.written > 0 && lazyWriter.currClient !== struct.id.client) {
|
|
||||||
flushLazyStructWriter(lazyWriter)
|
|
||||||
}
|
|
||||||
if (lazyWriter.written === 0) {
|
|
||||||
lazyWriter.currClient = struct.id.client
|
|
||||||
// write next client
|
|
||||||
lazyWriter.encoder.writeClient(struct.id.client)
|
|
||||||
// write startClock
|
|
||||||
encoding.writeVarUint(lazyWriter.encoder.restEncoder, struct.id.clock + offset)
|
|
||||||
}
|
|
||||||
struct.write(lazyWriter.encoder, offset)
|
|
||||||
lazyWriter.written++
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Call this function when we collected all parts and want to
|
|
||||||
* put all the parts together. After calling this method,
|
|
||||||
* you can continue using the UpdateEncoder.
|
|
||||||
*
|
|
||||||
* @param {LazyStructWriter} lazyWriter
|
|
||||||
*/
|
|
||||||
const finishLazyStructWriting = (lazyWriter) => {
|
|
||||||
flushLazyStructWriter(lazyWriter)
|
|
||||||
|
|
||||||
// this is a fresh encoder because we called flushCurr
|
|
||||||
const restEncoder = lazyWriter.encoder.restEncoder
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Now we put all the fragments together.
|
|
||||||
* This works similarly to `writeClientsStructs`
|
|
||||||
*/
|
|
||||||
|
|
||||||
// write # states that were updated - i.e. the clients
|
|
||||||
encoding.writeVarUint(restEncoder, lazyWriter.clientStructs.length)
|
|
||||||
|
|
||||||
for (let i = 0; i < lazyWriter.clientStructs.length; i++) {
|
|
||||||
const partStructs = lazyWriter.clientStructs[i]
|
|
||||||
/**
|
|
||||||
* Works similarly to `writeStructs`
|
|
||||||
*/
|
|
||||||
// write # encoded structs
|
|
||||||
encoding.writeVarUint(restEncoder, partStructs.written)
|
|
||||||
// write the rest of the fragment
|
|
||||||
encoding.writeUint8Array(restEncoder, partStructs.restEncoder)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {function(Item|GC|Skip):Item|GC|Skip} blockTransformer
|
|
||||||
* @param {typeof UpdateDecoderV2 | typeof UpdateDecoderV1} YDecoder
|
|
||||||
* @param {typeof UpdateEncoderV2 | typeof UpdateEncoderV1 } YEncoder
|
|
||||||
*/
|
|
||||||
export const convertUpdateFormat = (update, blockTransformer, YDecoder, YEncoder) => {
|
|
||||||
const updateDecoder = new YDecoder(decoding.createDecoder(update))
|
|
||||||
const lazyDecoder = new LazyStructReader(updateDecoder, false)
|
|
||||||
const updateEncoder = new YEncoder()
|
|
||||||
const lazyWriter = new LazyStructWriter(updateEncoder)
|
|
||||||
for (let curr = lazyDecoder.curr; curr !== null; curr = lazyDecoder.next()) {
|
|
||||||
writeStructToLazyStructWriter(lazyWriter, blockTransformer(curr), 0)
|
|
||||||
}
|
|
||||||
finishLazyStructWriting(lazyWriter)
|
|
||||||
const ds = readDeleteSet(updateDecoder)
|
|
||||||
writeDeleteSet(updateEncoder, ds)
|
|
||||||
return updateEncoder.toUint8Array()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object} ObfuscatorOptions
|
|
||||||
* @property {boolean} [ObfuscatorOptions.formatting=true]
|
|
||||||
* @property {boolean} [ObfuscatorOptions.subdocs=true]
|
|
||||||
* @property {boolean} [ObfuscatorOptions.yxml=true] Whether to obfuscate nodeName / hookName
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {ObfuscatorOptions} obfuscator
|
|
||||||
*/
|
|
||||||
const createObfuscator = ({ formatting = true, subdocs = true, yxml = true } = {}) => {
|
|
||||||
let i = 0
|
|
||||||
const mapKeyCache = map.create()
|
|
||||||
const nodeNameCache = map.create()
|
|
||||||
const formattingKeyCache = map.create()
|
|
||||||
const formattingValueCache = map.create()
|
|
||||||
formattingValueCache.set(null, null) // end of a formatting range should always be the end of a formatting range
|
|
||||||
/**
|
|
||||||
* @param {Item|GC|Skip} block
|
|
||||||
* @return {Item|GC|Skip}
|
|
||||||
*/
|
|
||||||
return block => {
|
|
||||||
switch (block.constructor) {
|
|
||||||
case GC:
|
|
||||||
case Skip:
|
|
||||||
return block
|
|
||||||
case Item: {
|
|
||||||
const item = /** @type {Item} */ (block)
|
|
||||||
const content = item.content
|
|
||||||
switch (content.constructor) {
|
|
||||||
case ContentDeleted:
|
|
||||||
break
|
|
||||||
case ContentType: {
|
|
||||||
if (yxml) {
|
|
||||||
const type = /** @type {ContentType} */ (content).type
|
|
||||||
if (type instanceof YXmlElement) {
|
|
||||||
type.nodeName = map.setIfUndefined(nodeNameCache, type.nodeName, () => 'node-' + i)
|
|
||||||
}
|
|
||||||
if (type instanceof YXmlHook) {
|
|
||||||
type.hookName = map.setIfUndefined(nodeNameCache, type.hookName, () => 'hook-' + i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentAny: {
|
|
||||||
const c = /** @type {ContentAny} */ (content)
|
|
||||||
c.arr = c.arr.map(() => i)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentBinary: {
|
|
||||||
const c = /** @type {ContentBinary} */ (content)
|
|
||||||
c.content = new Uint8Array([i])
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentDoc: {
|
|
||||||
const c = /** @type {ContentDoc} */ (content)
|
|
||||||
if (subdocs) {
|
|
||||||
c.opts = {}
|
|
||||||
c.doc.guid = i + ''
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentEmbed: {
|
|
||||||
const c = /** @type {ContentEmbed} */ (content)
|
|
||||||
c.embed = {}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentFormat: {
|
|
||||||
const c = /** @type {ContentFormat} */ (content)
|
|
||||||
if (formatting) {
|
|
||||||
c.key = map.setIfUndefined(formattingKeyCache, c.key, () => i + '')
|
|
||||||
c.value = map.setIfUndefined(formattingValueCache, c.value, () => ({ i }))
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentJSON: {
|
|
||||||
const c = /** @type {ContentJSON} */ (content)
|
|
||||||
c.arr = c.arr.map(() => i)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case ContentString: {
|
|
||||||
const c = /** @type {ContentString} */ (content)
|
|
||||||
c.str = string.repeat((i % 10) + '', c.str.length)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// unknown content type
|
|
||||||
error.unexpectedCase()
|
|
||||||
}
|
|
||||||
if (item.parentSub) {
|
|
||||||
item.parentSub = map.setIfUndefined(mapKeyCache, item.parentSub, () => i + '')
|
|
||||||
}
|
|
||||||
i++
|
|
||||||
return block
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
// unknown block-type
|
|
||||||
error.unexpectedCase()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function obfuscates the content of a Yjs update. This is useful to share
|
|
||||||
* buggy Yjs documents while significantly limiting the possibility that a
|
|
||||||
* developer can on the user. Note that it might still be possible to deduce
|
|
||||||
* some information by analyzing the "structure" of the document or by analyzing
|
|
||||||
* the typing behavior using the CRDT-related metadata that is still kept fully
|
|
||||||
* intact.
|
|
||||||
*
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {ObfuscatorOptions} [opts]
|
|
||||||
*/
|
|
||||||
export const obfuscateUpdate = (update, opts) => convertUpdateFormat(update, createObfuscator(opts), UpdateDecoderV1, UpdateEncoderV1)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {ObfuscatorOptions} [opts]
|
|
||||||
*/
|
|
||||||
export const obfuscateUpdateV2 = (update, opts) => convertUpdateFormat(update, createObfuscator(opts), UpdateDecoderV2, UpdateEncoderV2)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
*/
|
|
||||||
export const convertUpdateFormatV1ToV2 = update => convertUpdateFormat(update, f.id, UpdateDecoderV1, UpdateEncoderV2)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
*/
|
|
||||||
export const convertUpdateFormatV2ToV1 = update => convertUpdateFormat(update, f.id, UpdateDecoderV2, UpdateEncoderV1)
|
|
||||||
File diff suppressed because one or more lines are too long
19
tests/consistency.tests.js
Normal file
19
tests/consistency.tests.js
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
|
||||||
|
import * as Y from '../src/index.js'
|
||||||
|
import * as t from 'lib0/testing.js'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Client id should be changed when an instance receives updates from another client using the same client id.
|
||||||
|
*
|
||||||
|
* @param {t.TestCase} tc
|
||||||
|
*/
|
||||||
|
export const testClientIdDuplicateChange = tc => {
|
||||||
|
const doc1 = new Y.Doc()
|
||||||
|
doc1.clientID = 0
|
||||||
|
const doc2 = new Y.Doc()
|
||||||
|
doc2.clientID = 0
|
||||||
|
t.assert(doc2.clientID === doc1.clientID)
|
||||||
|
doc1.getArray('a').insert(0, [1, 2])
|
||||||
|
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc1))
|
||||||
|
t.assert(doc2.clientID !== doc1.clientID)
|
||||||
|
}
|
||||||
@@ -1,329 +0,0 @@
|
|||||||
import * as Y from '../src/index.js'
|
|
||||||
import * as t from 'lib0/testing'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testAfterTransactionRecursion = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yxml = ydoc.getXmlFragment('')
|
|
||||||
ydoc.on('afterTransaction', tr => {
|
|
||||||
if (tr.origin === 'test') {
|
|
||||||
yxml.toJSON()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
ydoc.transact(_tr => {
|
|
||||||
for (let i = 0; i < 15000; i++) {
|
|
||||||
yxml.push([new Y.XmlText('a')])
|
|
||||||
}
|
|
||||||
}, 'test')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testOriginInTransaction = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const ytext = doc.getText()
|
|
||||||
/**
|
|
||||||
* @type {Array<string>}
|
|
||||||
*/
|
|
||||||
const origins = []
|
|
||||||
doc.on('afterTransaction', (tr) => {
|
|
||||||
origins.push(tr.origin)
|
|
||||||
if (origins.length <= 1) {
|
|
||||||
ytext.toDelta(Y.snapshot(doc)) // adding a snapshot forces toDelta to create a cleanup transaction
|
|
||||||
doc.transact(() => {
|
|
||||||
ytext.insert(0, 'a')
|
|
||||||
}, 'nested')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
ytext.insert(0, '0')
|
|
||||||
}, 'first')
|
|
||||||
t.compareArrays(origins, ['first', 'cleanup', 'nested'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Client id should be changed when an instance receives updates from another client using the same client id.
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testClientIdDuplicateChange = _tc => {
|
|
||||||
const doc1 = new Y.Doc()
|
|
||||||
doc1.clientID = 0
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
doc2.clientID = 0
|
|
||||||
t.assert(doc2.clientID === doc1.clientID)
|
|
||||||
doc1.getArray('a').insert(0, [1, 2])
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc1))
|
|
||||||
t.assert(doc2.clientID !== doc1.clientID)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testGetTypeEmptyId = _tc => {
|
|
||||||
const doc1 = new Y.Doc()
|
|
||||||
doc1.getText('').insert(0, 'h')
|
|
||||||
doc1.getText().insert(1, 'i')
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc1))
|
|
||||||
t.assert(doc2.getText().toString() === 'hi')
|
|
||||||
t.assert(doc2.getText('').toString() === 'hi')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testToJSON = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
t.compare(doc.toJSON(), {}, 'doc.toJSON yields empty object')
|
|
||||||
|
|
||||||
const arr = doc.getArray('array')
|
|
||||||
arr.push(['test1'])
|
|
||||||
|
|
||||||
const map = doc.getMap('map')
|
|
||||||
map.set('k1', 'v1')
|
|
||||||
const map2 = new Y.Map()
|
|
||||||
map.set('k2', map2)
|
|
||||||
map2.set('m2k1', 'm2v1')
|
|
||||||
|
|
||||||
t.compare(doc.toJSON(), {
|
|
||||||
array: ['test1'],
|
|
||||||
map: {
|
|
||||||
k1: 'v1',
|
|
||||||
k2: {
|
|
||||||
m2k1: 'm2v1'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, 'doc.toJSON has array and recursive map')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSubdoc = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
doc.load() // doesn't do anything
|
|
||||||
{
|
|
||||||
/**
|
|
||||||
* @type {Array<any>|null}
|
|
||||||
*/
|
|
||||||
let event = /** @type {any} */ (null)
|
|
||||||
doc.on('subdocs', subdocs => {
|
|
||||||
event = [Array.from(subdocs.added).map(x => x.guid), Array.from(subdocs.removed).map(x => x.guid), Array.from(subdocs.loaded).map(x => x.guid)]
|
|
||||||
})
|
|
||||||
const subdocs = doc.getMap('mysubdocs')
|
|
||||||
const docA = new Y.Doc({ guid: 'a' })
|
|
||||||
docA.load()
|
|
||||||
subdocs.set('a', docA)
|
|
||||||
t.compare(event, [['a'], [], ['a']])
|
|
||||||
|
|
||||||
event = null
|
|
||||||
subdocs.get('a').load()
|
|
||||||
t.assert(event === null)
|
|
||||||
|
|
||||||
event = null
|
|
||||||
subdocs.get('a').destroy()
|
|
||||||
t.compare(event, [['a'], ['a'], []])
|
|
||||||
subdocs.get('a').load()
|
|
||||||
t.compare(event, [[], [], ['a']])
|
|
||||||
|
|
||||||
subdocs.set('b', new Y.Doc({ guid: 'a', shouldLoad: false }))
|
|
||||||
t.compare(event, [['a'], [], []])
|
|
||||||
subdocs.get('b').load()
|
|
||||||
t.compare(event, [[], [], ['a']])
|
|
||||||
|
|
||||||
const docC = new Y.Doc({ guid: 'c' })
|
|
||||||
docC.load()
|
|
||||||
subdocs.set('c', docC)
|
|
||||||
t.compare(event, [['c'], [], ['c']])
|
|
||||||
|
|
||||||
t.compare(Array.from(doc.getSubdocGuids()), ['a', 'c'])
|
|
||||||
}
|
|
||||||
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
{
|
|
||||||
t.compare(Array.from(doc2.getSubdocs()), [])
|
|
||||||
/**
|
|
||||||
* @type {Array<any>|null}
|
|
||||||
*/
|
|
||||||
let event = /** @type {any} */ (null)
|
|
||||||
doc2.on('subdocs', subdocs => {
|
|
||||||
event = [Array.from(subdocs.added).map(d => d.guid), Array.from(subdocs.removed).map(d => d.guid), Array.from(subdocs.loaded).map(d => d.guid)]
|
|
||||||
})
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
|
||||||
t.compare(event, [['a', 'a', 'c'], [], []])
|
|
||||||
|
|
||||||
doc2.getMap('mysubdocs').get('a').load()
|
|
||||||
t.compare(event, [[], [], ['a']])
|
|
||||||
|
|
||||||
t.compare(Array.from(doc2.getSubdocGuids()), ['a', 'c'])
|
|
||||||
|
|
||||||
doc2.getMap('mysubdocs').delete('a')
|
|
||||||
t.compare(event, [[], ['a'], []])
|
|
||||||
t.compare(Array.from(doc2.getSubdocGuids()), ['a', 'c'])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSubdocLoadEdgeCases = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yarray = ydoc.getArray()
|
|
||||||
const subdoc1 = new Y.Doc()
|
|
||||||
/**
|
|
||||||
* @type {any}
|
|
||||||
*/
|
|
||||||
let lastEvent = null
|
|
||||||
ydoc.on('subdocs', event => {
|
|
||||||
lastEvent = event
|
|
||||||
})
|
|
||||||
yarray.insert(0, [subdoc1])
|
|
||||||
t.assert(subdoc1.shouldLoad)
|
|
||||||
t.assert(subdoc1.autoLoad === false)
|
|
||||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc1))
|
|
||||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc1))
|
|
||||||
// destroy and check whether lastEvent adds it again to added (it shouldn't)
|
|
||||||
subdoc1.destroy()
|
|
||||||
const subdoc2 = yarray.get(0)
|
|
||||||
t.assert(subdoc1 !== subdoc2)
|
|
||||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc2))
|
|
||||||
t.assert(lastEvent !== null && !lastEvent.loaded.has(subdoc2))
|
|
||||||
// load
|
|
||||||
subdoc2.load()
|
|
||||||
t.assert(lastEvent !== null && !lastEvent.added.has(subdoc2))
|
|
||||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc2))
|
|
||||||
// apply from remote
|
|
||||||
const ydoc2 = new Y.Doc()
|
|
||||||
ydoc2.on('subdocs', event => {
|
|
||||||
lastEvent = event
|
|
||||||
})
|
|
||||||
Y.applyUpdate(ydoc2, Y.encodeStateAsUpdate(ydoc))
|
|
||||||
const subdoc3 = ydoc2.getArray().get(0)
|
|
||||||
t.assert(subdoc3.shouldLoad === false)
|
|
||||||
t.assert(subdoc3.autoLoad === false)
|
|
||||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc3))
|
|
||||||
t.assert(lastEvent !== null && !lastEvent.loaded.has(subdoc3))
|
|
||||||
// load
|
|
||||||
subdoc3.load()
|
|
||||||
t.assert(subdoc3.shouldLoad)
|
|
||||||
t.assert(lastEvent !== null && !lastEvent.added.has(subdoc3))
|
|
||||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc3))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSubdocLoadEdgeCasesAutoload = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yarray = ydoc.getArray()
|
|
||||||
const subdoc1 = new Y.Doc({ autoLoad: true })
|
|
||||||
/**
|
|
||||||
* @type {any}
|
|
||||||
*/
|
|
||||||
let lastEvent = null
|
|
||||||
ydoc.on('subdocs', event => {
|
|
||||||
lastEvent = event
|
|
||||||
})
|
|
||||||
yarray.insert(0, [subdoc1])
|
|
||||||
t.assert(subdoc1.shouldLoad)
|
|
||||||
t.assert(subdoc1.autoLoad)
|
|
||||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc1))
|
|
||||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc1))
|
|
||||||
// destroy and check whether lastEvent adds it again to added (it shouldn't)
|
|
||||||
subdoc1.destroy()
|
|
||||||
const subdoc2 = yarray.get(0)
|
|
||||||
t.assert(subdoc1 !== subdoc2)
|
|
||||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc2))
|
|
||||||
t.assert(lastEvent !== null && !lastEvent.loaded.has(subdoc2))
|
|
||||||
// load
|
|
||||||
subdoc2.load()
|
|
||||||
t.assert(lastEvent !== null && !lastEvent.added.has(subdoc2))
|
|
||||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc2))
|
|
||||||
// apply from remote
|
|
||||||
const ydoc2 = new Y.Doc()
|
|
||||||
ydoc2.on('subdocs', event => {
|
|
||||||
lastEvent = event
|
|
||||||
})
|
|
||||||
Y.applyUpdate(ydoc2, Y.encodeStateAsUpdate(ydoc))
|
|
||||||
const subdoc3 = ydoc2.getArray().get(0)
|
|
||||||
t.assert(subdoc1.shouldLoad)
|
|
||||||
t.assert(subdoc1.autoLoad)
|
|
||||||
t.assert(lastEvent !== null && lastEvent.added.has(subdoc3))
|
|
||||||
t.assert(lastEvent !== null && lastEvent.loaded.has(subdoc3))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSubdocsUndo = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const elems = ydoc.getXmlFragment()
|
|
||||||
const undoManager = new Y.UndoManager(elems)
|
|
||||||
const subdoc = new Y.Doc()
|
|
||||||
// @ts-ignore
|
|
||||||
elems.insert(0, [subdoc])
|
|
||||||
undoManager.undo()
|
|
||||||
undoManager.redo()
|
|
||||||
t.assert(elems.length === 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testLoadDocsEvent = async _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
t.assert(ydoc.isLoaded === false)
|
|
||||||
let loadedEvent = false
|
|
||||||
ydoc.on('load', () => {
|
|
||||||
loadedEvent = true
|
|
||||||
})
|
|
||||||
ydoc.emit('load', [ydoc])
|
|
||||||
await ydoc.whenLoaded
|
|
||||||
t.assert(loadedEvent)
|
|
||||||
t.assert(ydoc.isLoaded)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSyncDocsEvent = async _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
t.assert(ydoc.isLoaded === false)
|
|
||||||
t.assert(ydoc.isSynced === false)
|
|
||||||
let loadedEvent = false
|
|
||||||
ydoc.once('load', () => {
|
|
||||||
loadedEvent = true
|
|
||||||
})
|
|
||||||
let syncedEvent = false
|
|
||||||
ydoc.once('sync', /** @param {any} isSynced */ (isSynced) => {
|
|
||||||
syncedEvent = true
|
|
||||||
t.assert(isSynced)
|
|
||||||
})
|
|
||||||
ydoc.emit('sync', [true, ydoc])
|
|
||||||
await ydoc.whenLoaded
|
|
||||||
const oldWhenSynced = ydoc.whenSynced
|
|
||||||
await ydoc.whenSynced
|
|
||||||
t.assert(loadedEvent)
|
|
||||||
t.assert(syncedEvent)
|
|
||||||
t.assert(ydoc.isLoaded)
|
|
||||||
t.assert(ydoc.isSynced)
|
|
||||||
let loadedEvent2 = false
|
|
||||||
ydoc.on('load', () => {
|
|
||||||
loadedEvent2 = true
|
|
||||||
})
|
|
||||||
let syncedEvent2 = false
|
|
||||||
ydoc.on('sync', (isSynced) => {
|
|
||||||
syncedEvent2 = true
|
|
||||||
t.assert(isSynced === false)
|
|
||||||
})
|
|
||||||
ydoc.emit('sync', [false, ydoc])
|
|
||||||
t.assert(!loadedEvent2)
|
|
||||||
t.assert(syncedEvent2)
|
|
||||||
t.assert(ydoc.isLoaded)
|
|
||||||
t.assert(!ydoc.isSynced)
|
|
||||||
t.assert(ydoc.whenSynced !== oldWhenSynced)
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import * as t from 'lib0/testing'
|
import * as t from 'lib0/testing.js'
|
||||||
import * as promise from 'lib0/promise'
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
contentRefs,
|
contentRefs,
|
||||||
@@ -10,21 +9,14 @@ import {
|
|||||||
readContentEmbed,
|
readContentEmbed,
|
||||||
readContentType,
|
readContentType,
|
||||||
readContentFormat,
|
readContentFormat,
|
||||||
readContentAny,
|
readContentAny
|
||||||
readContentDoc,
|
|
||||||
Doc,
|
|
||||||
PermanentUserData,
|
|
||||||
encodeStateAsUpdate,
|
|
||||||
applyUpdate
|
|
||||||
} from '../src/internals.js'
|
} from '../src/internals.js'
|
||||||
|
|
||||||
import * as Y from '../src/index.js'
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testStructReferences = tc => {
|
export const testStructReferences = tc => {
|
||||||
t.assert(contentRefs.length === 11)
|
t.assert(contentRefs.length === 9)
|
||||||
t.assert(contentRefs[1] === readContentDeleted)
|
t.assert(contentRefs[1] === readContentDeleted)
|
||||||
t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json?
|
t.assert(contentRefs[2] === readContentJSON) // TODO: deprecate content json?
|
||||||
t.assert(contentRefs[3] === readContentBinary)
|
t.assert(contentRefs[3] === readContentBinary)
|
||||||
@@ -33,76 +25,4 @@ export const testStructReferences = tc => {
|
|||||||
t.assert(contentRefs[6] === readContentFormat)
|
t.assert(contentRefs[6] === readContentFormat)
|
||||||
t.assert(contentRefs[7] === readContentType)
|
t.assert(contentRefs[7] === readContentType)
|
||||||
t.assert(contentRefs[8] === readContentAny)
|
t.assert(contentRefs[8] === readContentAny)
|
||||||
t.assert(contentRefs[9] === readContentDoc)
|
|
||||||
// contentRefs[10] is reserved for Skip structs
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* There is some custom encoding/decoding happening in PermanentUserData.
|
|
||||||
* This is why it landed here.
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testPermanentUserData = async tc => {
|
|
||||||
const ydoc1 = new Doc()
|
|
||||||
const ydoc2 = new Doc()
|
|
||||||
const pd1 = new PermanentUserData(ydoc1)
|
|
||||||
const pd2 = new PermanentUserData(ydoc2)
|
|
||||||
pd1.setUserMapping(ydoc1, ydoc1.clientID, 'user a')
|
|
||||||
pd2.setUserMapping(ydoc2, ydoc2.clientID, 'user b')
|
|
||||||
ydoc1.getText().insert(0, 'xhi')
|
|
||||||
ydoc1.getText().delete(0, 1)
|
|
||||||
ydoc2.getText().insert(0, 'hxxi')
|
|
||||||
ydoc2.getText().delete(1, 2)
|
|
||||||
await promise.wait(10)
|
|
||||||
applyUpdate(ydoc2, encodeStateAsUpdate(ydoc1))
|
|
||||||
applyUpdate(ydoc1, encodeStateAsUpdate(ydoc2))
|
|
||||||
|
|
||||||
// now sync a third doc with same name as doc1 and then create PermanentUserData
|
|
||||||
const ydoc3 = new Doc()
|
|
||||||
applyUpdate(ydoc3, encodeStateAsUpdate(ydoc1))
|
|
||||||
const pd3 = new PermanentUserData(ydoc3)
|
|
||||||
pd3.setUserMapping(ydoc3, ydoc3.clientID, 'user a')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reported here: https://github.com/yjs/yjs/issues/308
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testDiffStateVectorOfUpdateIsEmpty = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
/**
|
|
||||||
* @type {any}
|
|
||||||
*/
|
|
||||||
let sv = null
|
|
||||||
ydoc.getText().insert(0, 'a')
|
|
||||||
ydoc.on('update', update => {
|
|
||||||
sv = Y.encodeStateVectorFromUpdate(update)
|
|
||||||
})
|
|
||||||
// should produce an update with an empty state vector (because previous ops are missing)
|
|
||||||
ydoc.getText().insert(0, 'a')
|
|
||||||
t.assert(sv !== null && sv.byteLength === 1 && sv[0] === 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reported here: https://github.com/yjs/yjs/issues/308
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testDiffStateVectorOfUpdateIgnoresSkips = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
/**
|
|
||||||
* @type {Array<Uint8Array>}
|
|
||||||
*/
|
|
||||||
const updates = []
|
|
||||||
ydoc.on('update', update => {
|
|
||||||
updates.push(update)
|
|
||||||
})
|
|
||||||
ydoc.getText().insert(0, 'a')
|
|
||||||
ydoc.getText().insert(0, 'b')
|
|
||||||
ydoc.getText().insert(0, 'c')
|
|
||||||
const update13 = Y.mergeUpdates([updates[0], updates[2]])
|
|
||||||
const sv = Y.encodeStateVectorFromUpdate(update13)
|
|
||||||
const state = Y.decodeStateVector(sv)
|
|
||||||
t.assert(state.get(ydoc.clientID) === 1)
|
|
||||||
t.assert(state.size === 1)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,42 +1,24 @@
|
|||||||
/* eslint-env node */
|
|
||||||
|
|
||||||
import * as map from './y-map.tests.js'
|
|
||||||
import * as array from './y-array.tests.js'
|
import * as array from './y-array.tests.js'
|
||||||
|
import * as map from './y-map.tests.js'
|
||||||
import * as text from './y-text.tests.js'
|
import * as text from './y-text.tests.js'
|
||||||
import * as xml from './y-xml.tests.js'
|
import * as xml from './y-xml.tests.js'
|
||||||
import * as encoding from './encoding.tests.js'
|
import * as encoding from './encoding.tests.js'
|
||||||
import * as undoredo from './undo-redo.tests.js'
|
import * as undoredo from './undo-redo.tests.js'
|
||||||
import * as compatibility from './compatibility.tests.js'
|
import * as consistency from './consistency.tests.js'
|
||||||
import * as doc from './doc.tests.js'
|
|
||||||
import * as snapshot from './snapshot.tests.js'
|
|
||||||
import * as updates from './updates.tests.js'
|
|
||||||
import * as relativePositions from './relativePositions.tests.js'
|
|
||||||
|
|
||||||
import { runTests } from 'lib0/testing'
|
import { runTests } from 'lib0/testing.js'
|
||||||
import { isBrowser, isNode } from 'lib0/environment'
|
import { isBrowser, isNode } from 'lib0/environment.js'
|
||||||
import * as log from 'lib0/logging'
|
import * as log from 'lib0/logging.js'
|
||||||
import { environment } from 'lib0'
|
|
||||||
|
|
||||||
if (isBrowser) {
|
if (isBrowser) {
|
||||||
log.createVConsole(document.body)
|
log.createVConsole(document.body)
|
||||||
}
|
}
|
||||||
|
runTests({
|
||||||
/**
|
map, array, text, xml, consistency, encoding, undoredo
|
||||||
* @type {any}
|
}).then(success => {
|
||||||
*/
|
|
||||||
const tests = {
|
|
||||||
doc, map, array, text, xml, encoding, undoredo, compatibility, snapshot, updates, relativePositions
|
|
||||||
}
|
|
||||||
|
|
||||||
const run = async () => {
|
|
||||||
if (environment.isNode) {
|
|
||||||
// tests.nodejs = await import('./node.tests.js')
|
|
||||||
}
|
|
||||||
|
|
||||||
const success = await runTests(tests)
|
|
||||||
/* istanbul ignore next */
|
/* istanbul ignore next */
|
||||||
if (isNode) {
|
if (isNode) {
|
||||||
process.exit(success ? 0 : 1)
|
process.exit(success ? 0 : 1)
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
run()
|
|
||||||
|
|||||||
@@ -1,145 +0,0 @@
|
|||||||
import * as Y from '../src/index.js'
|
|
||||||
import * as t from 'lib0/testing'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Y.Text} ytext
|
|
||||||
*/
|
|
||||||
const checkRelativePositions = ytext => {
|
|
||||||
// test if all positions are encoded and restored correctly
|
|
||||||
for (let i = 0; i < ytext.length; i++) {
|
|
||||||
// for all types of associations..
|
|
||||||
for (let assoc = -1; assoc < 2; assoc++) {
|
|
||||||
const rpos = Y.createRelativePositionFromTypeIndex(ytext, i, assoc)
|
|
||||||
const encodedRpos = Y.encodeRelativePosition(rpos)
|
|
||||||
const decodedRpos = Y.decodeRelativePosition(encodedRpos)
|
|
||||||
const absPos = /** @type {Y.AbsolutePosition} */ (Y.createAbsolutePositionFromRelativePosition(decodedRpos, /** @type {Y.Doc} */ (ytext.doc)))
|
|
||||||
t.assert(absPos.index === i)
|
|
||||||
t.assert(absPos.assoc === assoc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase1 = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, '1')
|
|
||||||
ytext.insert(0, 'abc')
|
|
||||||
ytext.insert(0, 'z')
|
|
||||||
ytext.insert(0, 'y')
|
|
||||||
ytext.insert(0, 'x')
|
|
||||||
checkRelativePositions(ytext)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase2 = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, 'abc')
|
|
||||||
checkRelativePositions(ytext)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase3 = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, 'abc')
|
|
||||||
ytext.insert(0, '1')
|
|
||||||
ytext.insert(0, 'xyz')
|
|
||||||
checkRelativePositions(ytext)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase4 = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, '1')
|
|
||||||
checkRelativePositions(ytext)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase5 = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, '2')
|
|
||||||
ytext.insert(0, '1')
|
|
||||||
checkRelativePositions(ytext)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase6 = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
checkRelativePositions(ytext)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Testing https://github.com/yjs/yjs/issues/657
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionCase7 = tc => {
|
|
||||||
const docA = new Y.Doc()
|
|
||||||
const textA = docA.getText('text')
|
|
||||||
textA.insert(0, 'abcde')
|
|
||||||
// Create a relative position at index 2 in 'textA'
|
|
||||||
const relativePosition = Y.createRelativePositionFromTypeIndex(textA, 2)
|
|
||||||
// Verify that the absolutes positions on 'docA' are the same
|
|
||||||
const absolutePositionWithFollow =
|
|
||||||
Y.createAbsolutePositionFromRelativePosition(relativePosition, docA, true)
|
|
||||||
const absolutePositionWithoutFollow =
|
|
||||||
Y.createAbsolutePositionFromRelativePosition(relativePosition, docA, false)
|
|
||||||
t.assert(absolutePositionWithFollow?.index === 2)
|
|
||||||
t.assert(absolutePositionWithoutFollow?.index === 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionAssociationDifference = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, '2')
|
|
||||||
ytext.insert(0, '1')
|
|
||||||
const rposRight = Y.createRelativePositionFromTypeIndex(ytext, 1, 0)
|
|
||||||
const rposLeft = Y.createRelativePositionFromTypeIndex(ytext, 1, -1)
|
|
||||||
ytext.insert(1, 'x')
|
|
||||||
const posRight = Y.createAbsolutePositionFromRelativePosition(rposRight, ydoc)
|
|
||||||
const posLeft = Y.createAbsolutePositionFromRelativePosition(rposLeft, ydoc)
|
|
||||||
t.assert(posRight != null && posRight.index === 2)
|
|
||||||
t.assert(posLeft != null && posLeft.index === 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testRelativePositionWithUndo = tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText()
|
|
||||||
ytext.insert(0, 'hello world')
|
|
||||||
const rpos = Y.createRelativePositionFromTypeIndex(ytext, 1)
|
|
||||||
const um = new Y.UndoManager(ytext)
|
|
||||||
ytext.delete(0, 6)
|
|
||||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydoc)?.index === 0)
|
|
||||||
um.undo()
|
|
||||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydoc)?.index === 1)
|
|
||||||
const posWithoutFollow = Y.createAbsolutePositionFromRelativePosition(rpos, ydoc, false)
|
|
||||||
console.log({ posWithoutFollow })
|
|
||||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydoc, false)?.index === 6)
|
|
||||||
const ydocClone = new Y.Doc()
|
|
||||||
Y.applyUpdate(ydocClone, Y.encodeStateAsUpdate(ydoc))
|
|
||||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydocClone)?.index === 6)
|
|
||||||
t.assert(Y.createAbsolutePositionFromRelativePosition(rpos, ydocClone, false)?.index === 6)
|
|
||||||
}
|
|
||||||
@@ -1,223 +0,0 @@
|
|||||||
import * as Y from '../src/index.js'
|
|
||||||
import * as t from 'lib0/testing'
|
|
||||||
import { init } from './testHelper.js'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testBasic = _tc => {
|
|
||||||
const ydoc = new Y.Doc({ gc: false })
|
|
||||||
ydoc.getText().insert(0, 'world!')
|
|
||||||
const snapshot = Y.snapshot(ydoc)
|
|
||||||
ydoc.getText().insert(0, 'hello ')
|
|
||||||
const restored = Y.createDocFromSnapshot(ydoc, snapshot)
|
|
||||||
t.assert(restored.getText().toString() === 'world!')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testBasicXmlAttributes = _tc => {
|
|
||||||
const ydoc = new Y.Doc({ gc: false })
|
|
||||||
const yxml = ydoc.getMap().set('el', new Y.XmlElement('div'))
|
|
||||||
const snapshot1 = Y.snapshot(ydoc)
|
|
||||||
yxml.setAttribute('a', '1')
|
|
||||||
const snapshot2 = Y.snapshot(ydoc)
|
|
||||||
yxml.setAttribute('a', '2')
|
|
||||||
t.compare(yxml.getAttributes(), { a: '2' })
|
|
||||||
t.compare(yxml.getAttributes(snapshot2), { a: '1' })
|
|
||||||
t.compare(yxml.getAttributes(snapshot1), {})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testBasicRestoreSnapshot = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
doc.getArray('array').insert(0, ['hello'])
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
doc.getArray('array').insert(1, ['world'])
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray('array').toArray(), ['hello'])
|
|
||||||
t.compare(doc.getArray('array').toArray(), ['hello', 'world'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testEmptyRestoreSnapshot = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
snap.sv.set(9999, 0)
|
|
||||||
doc.getArray().insert(0, ['world'])
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray().toArray(), [])
|
|
||||||
t.compare(doc.getArray().toArray(), ['world'])
|
|
||||||
|
|
||||||
// now this snapshot reflects the latest state. It should still work.
|
|
||||||
const snap2 = Y.snapshot(doc)
|
|
||||||
const docRestored2 = Y.createDocFromSnapshot(doc, snap2)
|
|
||||||
t.compare(docRestored2.getArray().toArray(), ['world'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testRestoreSnapshotWithSubType = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
doc.getArray('array').insert(0, [new Y.Map()])
|
|
||||||
const subMap = doc.getArray('array').get(0)
|
|
||||||
subMap.set('key1', 'value1')
|
|
||||||
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
subMap.set('key2', 'value2')
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray('array').toJSON(), [{
|
|
||||||
key1: 'value1'
|
|
||||||
}])
|
|
||||||
t.compare(doc.getArray('array').toJSON(), [{
|
|
||||||
key1: 'value1',
|
|
||||||
key2: 'value2'
|
|
||||||
}])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testRestoreDeletedItem1 = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
doc.getArray('array').insert(0, ['item1', 'item2'])
|
|
||||||
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
doc.getArray('array').delete(0)
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray('array').toArray(), ['item1', 'item2'])
|
|
||||||
t.compare(doc.getArray('array').toArray(), ['item2'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testRestoreLeftItem = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
doc.getArray('array').insert(0, ['item1'])
|
|
||||||
doc.getMap('map').set('test', 1)
|
|
||||||
doc.getArray('array').insert(0, ['item0'])
|
|
||||||
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
doc.getArray('array').delete(1)
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray('array').toArray(), ['item0', 'item1'])
|
|
||||||
t.compare(doc.getArray('array').toArray(), ['item0'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testDeletedItemsBase = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
doc.getArray('array').insert(0, ['item1'])
|
|
||||||
doc.getArray('array').delete(0)
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
doc.getArray('array').insert(0, ['item0'])
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray('array').toArray(), [])
|
|
||||||
t.compare(doc.getArray('array').toArray(), ['item0'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testDeletedItems2 = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
doc.getArray('array').insert(0, ['item1', 'item2', 'item3'])
|
|
||||||
doc.getArray('array').delete(1)
|
|
||||||
const snap = Y.snapshot(doc)
|
|
||||||
doc.getArray('array').insert(0, ['item0'])
|
|
||||||
|
|
||||||
const docRestored = Y.createDocFromSnapshot(doc, snap)
|
|
||||||
|
|
||||||
t.compare(docRestored.getArray('array').toArray(), ['item1', 'item3'])
|
|
||||||
t.compare(doc.getArray('array').toArray(), ['item0', 'item1', 'item3'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testDependentChanges = tc => {
|
|
||||||
const { array0, array1, testConnector } = init(tc, { users: 2 })
|
|
||||||
|
|
||||||
if (!array0.doc) {
|
|
||||||
throw new Error('no document 0')
|
|
||||||
}
|
|
||||||
if (!array1.doc) {
|
|
||||||
throw new Error('no document 1')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {Y.Doc}
|
|
||||||
*/
|
|
||||||
const doc0 = array0.doc
|
|
||||||
/**
|
|
||||||
* @type {Y.Doc}
|
|
||||||
*/
|
|
||||||
const doc1 = array1.doc
|
|
||||||
|
|
||||||
doc0.gc = false
|
|
||||||
doc1.gc = false
|
|
||||||
|
|
||||||
array0.insert(0, ['user1item1'])
|
|
||||||
testConnector.syncAll()
|
|
||||||
array1.insert(1, ['user2item1'])
|
|
||||||
testConnector.syncAll()
|
|
||||||
|
|
||||||
const snap = Y.snapshot(array0.doc)
|
|
||||||
|
|
||||||
array0.insert(2, ['user1item2'])
|
|
||||||
testConnector.syncAll()
|
|
||||||
array1.insert(3, ['user2item2'])
|
|
||||||
testConnector.syncAll()
|
|
||||||
|
|
||||||
const docRestored0 = Y.createDocFromSnapshot(array0.doc, snap)
|
|
||||||
t.compare(docRestored0.getArray('array').toArray(), ['user1item1', 'user2item1'])
|
|
||||||
|
|
||||||
const docRestored1 = Y.createDocFromSnapshot(array1.doc, snap)
|
|
||||||
t.compare(docRestored1.getArray('array').toArray(), ['user1item1', 'user2item1'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testContainsUpdate = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
/**
|
|
||||||
* @type {Array<Uint8Array>}
|
|
||||||
*/
|
|
||||||
const updates = []
|
|
||||||
ydoc.on('update', update => {
|
|
||||||
updates.push(update)
|
|
||||||
})
|
|
||||||
const yarr = ydoc.getArray()
|
|
||||||
const snapshot1 = Y.snapshot(ydoc)
|
|
||||||
yarr.insert(0, [1])
|
|
||||||
const snapshot2 = Y.snapshot(ydoc)
|
|
||||||
yarr.delete(0, 1)
|
|
||||||
const snapshotFinal = Y.snapshot(ydoc)
|
|
||||||
t.assert(!Y.snapshotContainsUpdate(snapshot1, updates[0]))
|
|
||||||
t.assert(!Y.snapshotContainsUpdate(snapshot2, updates[1]))
|
|
||||||
t.assert(Y.snapshotContainsUpdate(snapshot2, updates[0]))
|
|
||||||
t.assert(Y.snapshotContainsUpdate(snapshotFinal, updates[0]))
|
|
||||||
t.assert(Y.snapshotContainsUpdate(snapshotFinal, updates[1]))
|
|
||||||
}
|
|
||||||
@@ -1,17 +1,19 @@
|
|||||||
import * as t from 'lib0/testing'
|
|
||||||
import * as prng from 'lib0/prng'
|
|
||||||
import * as encoding from 'lib0/encoding'
|
|
||||||
import * as decoding from 'lib0/decoding'
|
|
||||||
import * as syncProtocol from 'y-protocols/sync'
|
|
||||||
import * as object from 'lib0/object'
|
|
||||||
import * as map from 'lib0/map'
|
|
||||||
import * as Y from '../src/index.js'
|
import * as Y from '../src/index.js'
|
||||||
export * from '../src/index.js'
|
|
||||||
|
|
||||||
if (typeof window !== 'undefined') {
|
import {
|
||||||
// @ts-ignore
|
createDeleteSetFromStructStore,
|
||||||
window.Y = Y // eslint-disable-line
|
getStateVector,
|
||||||
}
|
Item,
|
||||||
|
DeleteItem, DeleteSet, StructStore, Doc // eslint-disable-line
|
||||||
|
} from '../src/internals.js'
|
||||||
|
|
||||||
|
import * as t from 'lib0/testing.js'
|
||||||
|
import * as prng from 'lib0/prng.js'
|
||||||
|
import * as encoding from 'lib0/encoding.js'
|
||||||
|
import * as decoding from 'lib0/decoding.js'
|
||||||
|
import * as syncProtocol from 'y-protocols/sync.js'
|
||||||
|
import * as object from 'lib0/object.js'
|
||||||
|
export * from '../src/internals.js'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {TestYInstance} y // publish message created by `y` to all other online clients
|
* @param {TestYInstance} y // publish message created by `y` to all other online clients
|
||||||
@@ -27,40 +29,7 @@ const broadcastMessage = (y, m) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export let useV2 = false
|
export class TestYInstance extends Doc {
|
||||||
|
|
||||||
export const encV1 = {
|
|
||||||
encodeStateAsUpdate: Y.encodeStateAsUpdate,
|
|
||||||
mergeUpdates: Y.mergeUpdates,
|
|
||||||
applyUpdate: Y.applyUpdate,
|
|
||||||
logUpdate: Y.logUpdate,
|
|
||||||
updateEventName: /** @type {'update'} */ ('update'),
|
|
||||||
diffUpdate: Y.diffUpdate
|
|
||||||
}
|
|
||||||
|
|
||||||
export const encV2 = {
|
|
||||||
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
|
||||||
mergeUpdates: Y.mergeUpdatesV2,
|
|
||||||
applyUpdate: Y.applyUpdateV2,
|
|
||||||
logUpdate: Y.logUpdateV2,
|
|
||||||
updateEventName: /** @type {'updateV2'} */ ('updateV2'),
|
|
||||||
diffUpdate: Y.diffUpdateV2
|
|
||||||
}
|
|
||||||
|
|
||||||
export let enc = encV1
|
|
||||||
|
|
||||||
const useV1Encoding = () => {
|
|
||||||
useV2 = false
|
|
||||||
enc = encV1
|
|
||||||
}
|
|
||||||
|
|
||||||
const useV2Encoding = () => {
|
|
||||||
console.error('sync protocol doesnt support v2 protocol yet, fallback to v1 encoding') // @Todo
|
|
||||||
useV2 = false
|
|
||||||
enc = encV1
|
|
||||||
}
|
|
||||||
|
|
||||||
export class TestYInstance extends Y.Doc {
|
|
||||||
/**
|
/**
|
||||||
* @param {TestConnector} testConnector
|
* @param {TestConnector} testConnector
|
||||||
* @param {number} clientID
|
* @param {number} clientID
|
||||||
@@ -77,20 +46,13 @@ export class TestYInstance extends Y.Doc {
|
|||||||
*/
|
*/
|
||||||
this.receiving = new Map()
|
this.receiving = new Map()
|
||||||
testConnector.allConns.add(this)
|
testConnector.allConns.add(this)
|
||||||
/**
|
|
||||||
* The list of received updates.
|
|
||||||
* We are going to merge them later using Y.mergeUpdates and check if the resulting document is correct.
|
|
||||||
* @type {Array<Uint8Array>}
|
|
||||||
*/
|
|
||||||
this.updates = []
|
|
||||||
// set up observe on local model
|
// set up observe on local model
|
||||||
this.on(enc.updateEventName, /** @param {Uint8Array} update @param {any} origin */ (update, origin) => {
|
this.on('update', /** @param {Uint8Array} update @param {any} origin */ (update, origin) => {
|
||||||
if (origin !== testConnector) {
|
if (origin !== testConnector) {
|
||||||
const encoder = encoding.createEncoder()
|
const encoder = encoding.createEncoder()
|
||||||
syncProtocol.writeUpdate(encoder, update)
|
syncProtocol.writeUpdate(encoder, update)
|
||||||
broadcastMessage(this, encoding.toUint8Array(encoder))
|
broadcastMessage(this, encoding.toUint8Array(encoder))
|
||||||
}
|
}
|
||||||
this.updates.push(update)
|
|
||||||
})
|
})
|
||||||
this.connect()
|
this.connect()
|
||||||
}
|
}
|
||||||
@@ -133,7 +95,12 @@ export class TestYInstance extends Y.Doc {
|
|||||||
* @param {TestYInstance} remoteClient
|
* @param {TestYInstance} remoteClient
|
||||||
*/
|
*/
|
||||||
_receive (message, remoteClient) {
|
_receive (message, remoteClient) {
|
||||||
map.setIfUndefined(this.receiving, remoteClient, () => /** @type {Array<Uint8Array>} */ ([])).push(message)
|
let messages = this.receiving.get(remoteClient)
|
||||||
|
if (messages === undefined) {
|
||||||
|
messages = []
|
||||||
|
this.receiving.set(remoteClient, messages)
|
||||||
|
}
|
||||||
|
messages.push(message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -273,27 +240,19 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => {
|
|||||||
users: []
|
users: []
|
||||||
}
|
}
|
||||||
const gen = tc.prng
|
const gen = tc.prng
|
||||||
// choose an encoding approach at random
|
|
||||||
if (prng.bool(gen)) {
|
|
||||||
useV2Encoding()
|
|
||||||
} else {
|
|
||||||
useV1Encoding()
|
|
||||||
}
|
|
||||||
|
|
||||||
const testConnector = new TestConnector(gen)
|
const testConnector = new TestConnector(gen)
|
||||||
result.testConnector = testConnector
|
result.testConnector = testConnector
|
||||||
for (let i = 0; i < users; i++) {
|
for (let i = 0; i < users; i++) {
|
||||||
const y = testConnector.createY(i)
|
const y = testConnector.createY(i)
|
||||||
y.clientID = i
|
y.clientID = i
|
||||||
result.users.push(y)
|
result.users.push(y)
|
||||||
result['array' + i] = y.getArray('array')
|
result['array' + i] = y.get('array', Y.Array)
|
||||||
result['map' + i] = y.getMap('map')
|
result['map' + i] = y.get('map', Y.Map)
|
||||||
result['xml' + i] = y.get('xml', Y.XmlElement)
|
result['xml' + i] = y.get('xml', Y.XmlElement)
|
||||||
result['text' + i] = y.getText('text')
|
result['text' + i] = y.get('text', Y.Text)
|
||||||
}
|
}
|
||||||
testConnector.syncAll()
|
testConnector.syncAll()
|
||||||
result.testObjects = result.users.map(initTestObject || (() => null))
|
result.testObjects = result.users.map(initTestObject || (() => null))
|
||||||
useV1Encoding()
|
|
||||||
return /** @type {any} */ (result)
|
return /** @type {any} */ (result)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -308,22 +267,15 @@ export const init = (tc, { users = 5 } = {}, initTestObject) => {
|
|||||||
*/
|
*/
|
||||||
export const compare = users => {
|
export const compare = users => {
|
||||||
users.forEach(u => u.connect())
|
users.forEach(u => u.connect())
|
||||||
while (users[0].tc.flushAllMessages()) {} // eslint-disable-line
|
while (users[0].tc.flushAllMessages()) {}
|
||||||
// For each document, merge all received document updates with Y.mergeUpdates and create a new document which will be added to the list of "users"
|
|
||||||
// This ensures that mergeUpdates works correctly
|
|
||||||
const mergedDocs = users.map(user => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
enc.applyUpdate(ydoc, enc.mergeUpdates(user.updates))
|
|
||||||
return ydoc
|
|
||||||
})
|
|
||||||
users.push(.../** @type {any} */(mergedDocs))
|
|
||||||
const userArrayValues = users.map(u => u.getArray('array').toJSON())
|
const userArrayValues = users.map(u => u.getArray('array').toJSON())
|
||||||
const userMapValues = users.map(u => u.getMap('map').toJSON())
|
const userMapValues = users.map(u => u.getMap('map').toJSON())
|
||||||
const userXmlValues = users.map(u => u.get('xml', Y.XmlElement).toString())
|
const userXmlValues = users.map(u => u.get('xml', Y.XmlElement).toString())
|
||||||
const userTextValues = users.map(u => u.getText('text').toDelta())
|
const userTextValues = users.map(u => u.getText('text').toDelta())
|
||||||
for (const u of users) {
|
for (const u of users) {
|
||||||
t.assert(u.store.pendingDs === null)
|
t.assert(u.store.pendingDeleteReaders.length === 0)
|
||||||
t.assert(u.store.pendingStructs === null)
|
t.assert(u.store.pendingStack.length === 0)
|
||||||
|
t.assert(u.store.pendingClientsStructRefs.size === 0)
|
||||||
}
|
}
|
||||||
// Test Array iterator
|
// Test Array iterator
|
||||||
t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array')))
|
t.compare(users[0].getArray('array').toArray(), Array.from(users[0].getArray('array')))
|
||||||
@@ -346,32 +298,24 @@ export const compare = users => {
|
|||||||
t.compare(userMapValues[i], userMapValues[i + 1])
|
t.compare(userMapValues[i], userMapValues[i + 1])
|
||||||
t.compare(userXmlValues[i], userXmlValues[i + 1])
|
t.compare(userXmlValues[i], userXmlValues[i + 1])
|
||||||
t.compare(userTextValues[i].map(/** @param {any} a */ a => typeof a.insert === 'string' ? a.insert : ' ').join('').length, users[i].getText('text').length)
|
t.compare(userTextValues[i].map(/** @param {any} a */ a => typeof a.insert === 'string' ? a.insert : ' ').join('').length, users[i].getText('text').length)
|
||||||
t.compare(userTextValues[i], userTextValues[i + 1], '', (_constructor, a, b) => {
|
t.compare(userTextValues[i], userTextValues[i + 1])
|
||||||
if (a instanceof Y.AbstractType) {
|
t.compare(getStateVector(users[i].store), getStateVector(users[i + 1].store))
|
||||||
t.compare(a.toJSON(), b.toJSON())
|
compareDS(createDeleteSetFromStructStore(users[i].store), createDeleteSetFromStructStore(users[i + 1].store))
|
||||||
} else if (a !== b) {
|
|
||||||
t.fail('Deltas dont match')
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
t.compare(Y.encodeStateVector(users[i]), Y.encodeStateVector(users[i + 1]))
|
|
||||||
Y.equalDeleteSets(Y.createDeleteSetFromStructStore(users[i].store), Y.createDeleteSetFromStructStore(users[i + 1].store))
|
|
||||||
compareStructStores(users[i].store, users[i + 1].store)
|
compareStructStores(users[i].store, users[i + 1].store)
|
||||||
t.compare(Y.encodeSnapshot(Y.snapshot(users[i])), Y.encodeSnapshot(Y.snapshot(users[i + 1])))
|
|
||||||
}
|
}
|
||||||
users.map(u => u.destroy())
|
users.map(u => u.destroy())
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Y.Item?} a
|
* @param {Item?} a
|
||||||
* @param {Y.Item?} b
|
* @param {Item?} b
|
||||||
* @return {boolean}
|
* @return {boolean}
|
||||||
*/
|
*/
|
||||||
export const compareItemIDs = (a, b) => a === b || (a !== null && b != null && Y.compareIDs(a.id, b.id))
|
export const compareItemIDs = (a, b) => a === b || (a !== null && b != null && Y.compareIDs(a.id, b.id))
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {import('../src/internals.js').StructStore} ss1
|
* @param {StructStore} ss1
|
||||||
* @param {import('../src/internals.js').StructStore} ss2
|
* @param {StructStore} ss2
|
||||||
*/
|
*/
|
||||||
export const compareStructStores = (ss1, ss2) => {
|
export const compareStructStores = (ss1, ss2) => {
|
||||||
t.assert(ss1.clients.size === ss2.clients.size)
|
t.assert(ss1.clients.size === ss2.clients.size)
|
||||||
@@ -391,9 +335,9 @@ export const compareStructStores = (ss1, ss2) => {
|
|||||||
) {
|
) {
|
||||||
t.fail('Structs dont match')
|
t.fail('Structs dont match')
|
||||||
}
|
}
|
||||||
if (s1 instanceof Y.Item) {
|
if (s1 instanceof Item) {
|
||||||
if (
|
if (
|
||||||
!(s2 instanceof Y.Item) ||
|
!(s2 instanceof Item) ||
|
||||||
!((s1.left === null && s2.left === null) || (s1.left !== null && s2.left !== null && Y.compareIDs(s1.left.lastId, s2.left.lastId))) ||
|
!((s1.left === null && s2.left === null) || (s1.left !== null && s2.left !== null && Y.compareIDs(s1.left.lastId, s2.left.lastId))) ||
|
||||||
!compareItemIDs(s1.right, s2.right) ||
|
!compareItemIDs(s1.right, s2.right) ||
|
||||||
!Y.compareIDs(s1.origin, s2.origin) ||
|
!Y.compareIDs(s1.origin, s2.origin) ||
|
||||||
@@ -412,6 +356,25 @@ export const compareStructStores = (ss1, ss2) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {DeleteSet} ds1
|
||||||
|
* @param {DeleteSet} ds2
|
||||||
|
*/
|
||||||
|
export const compareDS = (ds1, ds2) => {
|
||||||
|
t.assert(ds1.clients.size === ds2.clients.size)
|
||||||
|
for (const [client, deleteItems1] of ds1.clients) {
|
||||||
|
const deleteItems2 = /** @type {Array<DeleteItem>} */ (ds2.clients.get(client))
|
||||||
|
t.assert(deleteItems2 !== undefined && deleteItems1.length === deleteItems2.length)
|
||||||
|
for (let i = 0; i < deleteItems1.length; i++) {
|
||||||
|
const di1 = deleteItems1[i]
|
||||||
|
const di2 = deleteItems2[i]
|
||||||
|
if (di1.clock !== di2.clock || di1.len !== di2.len) {
|
||||||
|
t.fail('DeleteSets dont match')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @template T
|
* @template T
|
||||||
* @callback InitTestObjectCallback
|
* @callback InitTestObjectCallback
|
||||||
@@ -431,21 +394,21 @@ export const applyRandomTests = (tc, mods, iterations, initTestObject) => {
|
|||||||
const result = init(tc, { users: 5 }, initTestObject)
|
const result = init(tc, { users: 5 }, initTestObject)
|
||||||
const { testConnector, users } = result
|
const { testConnector, users } = result
|
||||||
for (let i = 0; i < iterations; i++) {
|
for (let i = 0; i < iterations; i++) {
|
||||||
if (prng.int32(gen, 0, 100) <= 2) {
|
if (prng.int31(gen, 0, 100) <= 2) {
|
||||||
// 2% chance to disconnect/reconnect a random user
|
// 2% chance to disconnect/reconnect a random user
|
||||||
if (prng.bool(gen)) {
|
if (prng.bool(gen)) {
|
||||||
testConnector.disconnectRandom()
|
testConnector.disconnectRandom()
|
||||||
} else {
|
} else {
|
||||||
testConnector.reconnectRandom()
|
testConnector.reconnectRandom()
|
||||||
}
|
}
|
||||||
} else if (prng.int32(gen, 0, 100) <= 1) {
|
} else if (prng.int31(gen, 0, 100) <= 1) {
|
||||||
// 1% chance to flush all
|
// 1% chance to flush all
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
} else if (prng.int32(gen, 0, 100) <= 50) {
|
} else if (prng.int31(gen, 0, 100) <= 50) {
|
||||||
// 50% chance to flush a random message
|
// 50% chance to flush a random message
|
||||||
testConnector.flushRandomMessage()
|
testConnector.flushRandomMessage()
|
||||||
}
|
}
|
||||||
const user = prng.int32(gen, 0, users.length - 1)
|
const user = prng.int31(gen, 0, users.length - 1)
|
||||||
const test = prng.oneOf(gen, mods)
|
const test = prng.oneOf(gen, mods)
|
||||||
test(users[user], gen, result.testObjects[user])
|
test(users[user], gen, result.testObjects[user])
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,67 +1,18 @@
|
|||||||
import { init } from './testHelper.js' // eslint-disable-line
|
import { init, compare, applyRandomTests, Doc } from './testHelper.js' // eslint-disable-line
|
||||||
|
|
||||||
|
import {
|
||||||
|
UndoManager
|
||||||
|
} from '../src/internals.js'
|
||||||
|
|
||||||
import * as Y from '../src/index.js'
|
import * as Y from '../src/index.js'
|
||||||
import * as t from 'lib0/testing'
|
import * as t from 'lib0/testing.js'
|
||||||
|
|
||||||
export const testInconsistentFormat = () => {
|
|
||||||
/**
|
|
||||||
* @param {Y.Doc} ydoc
|
|
||||||
*/
|
|
||||||
const testYjsMerge = ydoc => {
|
|
||||||
const content = /** @type {Y.XmlText} */ (ydoc.get('text', Y.XmlText))
|
|
||||||
content.format(0, 6, { bold: null })
|
|
||||||
content.format(6, 4, { type: 'text' })
|
|
||||||
t.compare(content.toDelta(), [
|
|
||||||
{
|
|
||||||
attributes: { type: 'text' },
|
|
||||||
insert: 'Merge Test'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
attributes: { type: 'text', italic: true },
|
|
||||||
insert: ' After'
|
|
||||||
}
|
|
||||||
])
|
|
||||||
}
|
|
||||||
const initializeYDoc = () => {
|
|
||||||
const yDoc = new Y.Doc({ gc: false })
|
|
||||||
|
|
||||||
const content = /** @type {Y.XmlText} */ (yDoc.get('text', Y.XmlText))
|
|
||||||
content.insert(0, ' After', { type: 'text', italic: true })
|
|
||||||
content.insert(0, 'Test', { type: 'text' })
|
|
||||||
content.insert(0, 'Merge ', { type: 'text', bold: true })
|
|
||||||
return yDoc
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const yDoc = initializeYDoc()
|
|
||||||
testYjsMerge(yDoc)
|
|
||||||
}
|
|
||||||
{
|
|
||||||
const initialYDoc = initializeYDoc()
|
|
||||||
const yDoc = new Y.Doc({ gc: false })
|
|
||||||
Y.applyUpdate(yDoc, Y.encodeStateAsUpdate(initialYDoc))
|
|
||||||
testYjsMerge(yDoc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testInfiniteCaptureTimeout = tc => {
|
|
||||||
const { array0 } = init(tc, { users: 3 })
|
|
||||||
const undoManager = new Y.UndoManager(array0, { captureTimeout: Number.MAX_VALUE })
|
|
||||||
array0.push([1, 2, 3])
|
|
||||||
undoManager.stopCapturing()
|
|
||||||
array0.push([4, 5, 6])
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(array0.toArray(), [1, 2, 3])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testUndoText = tc => {
|
export const testUndoText = tc => {
|
||||||
const { testConnector, text0, text1 } = init(tc, { users: 3 })
|
const { testConnector, text0, text1 } = init(tc, { users: 3 })
|
||||||
const undoManager = new Y.UndoManager(text0)
|
const undoManager = new UndoManager(text0)
|
||||||
|
|
||||||
// items that are added & deleted in the same transaction won't be undo
|
// items that are added & deleted in the same transaction won't be undo
|
||||||
text0.insert(0, 'test')
|
text0.insert(0, 'test')
|
||||||
@@ -102,62 +53,13 @@ export const testUndoText = tc => {
|
|||||||
t.compare(text0.toDelta(), [{ insert: 'b' }, { insert: 'cxy', attributes: { bold: true } }, { insert: 'z' }])
|
t.compare(text0.toDelta(), [{ insert: 'b' }, { insert: 'cxy', attributes: { bold: true } }, { insert: 'z' }])
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Test case to fix #241
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testEmptyTypeScope = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const um = new Y.UndoManager([], { doc: ydoc })
|
|
||||||
const yarray = ydoc.getArray()
|
|
||||||
um.addToScope(yarray)
|
|
||||||
yarray.insert(0, [1])
|
|
||||||
um.undo()
|
|
||||||
t.assert(yarray.length === 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test case to fix #241
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testGlobalScope = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const um = new Y.UndoManager(ydoc)
|
|
||||||
const yarray = ydoc.getArray()
|
|
||||||
yarray.insert(0, [1])
|
|
||||||
um.undo()
|
|
||||||
t.assert(yarray.length === 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test case to fix #241
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testDoubleUndo = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const text = doc.getText()
|
|
||||||
text.insert(0, '1221')
|
|
||||||
|
|
||||||
const manager = new Y.UndoManager(text)
|
|
||||||
|
|
||||||
text.insert(2, '3')
|
|
||||||
text.insert(3, '3')
|
|
||||||
|
|
||||||
manager.undo()
|
|
||||||
manager.undo()
|
|
||||||
|
|
||||||
text.insert(2, '3')
|
|
||||||
|
|
||||||
t.compareStrings(text.toString(), '12321')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testUndoMap = tc => {
|
export const testUndoMap = tc => {
|
||||||
const { testConnector, map0, map1 } = init(tc, { users: 2 })
|
const { testConnector, map0, map1 } = init(tc, { users: 2 })
|
||||||
map0.set('a', 0)
|
map0.set('a', 0)
|
||||||
const undoManager = new Y.UndoManager(map0)
|
const undoManager = new UndoManager(map0)
|
||||||
map0.set('a', 1)
|
map0.set('a', 1)
|
||||||
undoManager.undo()
|
undoManager.undo()
|
||||||
t.assert(map0.get('a') === 0)
|
t.assert(map0.get('a') === 0)
|
||||||
@@ -196,7 +98,7 @@ export const testUndoMap = tc => {
|
|||||||
*/
|
*/
|
||||||
export const testUndoArray = tc => {
|
export const testUndoArray = tc => {
|
||||||
const { testConnector, array0, array1 } = init(tc, { users: 3 })
|
const { testConnector, array0, array1 } = init(tc, { users: 3 })
|
||||||
const undoManager = new Y.UndoManager(array0)
|
const undoManager = new UndoManager(array0)
|
||||||
array0.insert(0, [1, 2, 3])
|
array0.insert(0, [1, 2, 3])
|
||||||
array1.insert(0, [4, 5, 6])
|
array1.insert(0, [4, 5, 6])
|
||||||
testConnector.syncAll()
|
testConnector.syncAll()
|
||||||
@@ -247,7 +149,7 @@ export const testUndoArray = tc => {
|
|||||||
*/
|
*/
|
||||||
export const testUndoXml = tc => {
|
export const testUndoXml = tc => {
|
||||||
const { xml0 } = init(tc, { users: 3 })
|
const { xml0 } = init(tc, { users: 3 })
|
||||||
const undoManager = new Y.UndoManager(xml0)
|
const undoManager = new UndoManager(xml0)
|
||||||
const child = new Y.XmlElement('p')
|
const child = new Y.XmlElement('p')
|
||||||
xml0.insert(0, [child])
|
xml0.insert(0, [child])
|
||||||
const textchild = new Y.XmlText('content')
|
const textchild = new Y.XmlText('content')
|
||||||
@@ -272,17 +174,15 @@ export const testUndoXml = tc => {
|
|||||||
*/
|
*/
|
||||||
export const testUndoEvents = tc => {
|
export const testUndoEvents = tc => {
|
||||||
const { text0 } = init(tc, { users: 3 })
|
const { text0 } = init(tc, { users: 3 })
|
||||||
const undoManager = new Y.UndoManager(text0)
|
const undoManager = new UndoManager(text0)
|
||||||
let counter = 0
|
let counter = 0
|
||||||
let receivedMetadata = -1
|
let receivedMetadata = -1
|
||||||
undoManager.on('stack-item-added', /** @param {any} event */ event => {
|
undoManager.on('stack-item-added', /** @param {any} event */ event => {
|
||||||
t.assert(event.type != null)
|
t.assert(event.type != null)
|
||||||
t.assert(event.changedParentTypes != null && event.changedParentTypes.has(text0))
|
|
||||||
event.stackItem.meta.set('test', counter++)
|
event.stackItem.meta.set('test', counter++)
|
||||||
})
|
})
|
||||||
undoManager.on('stack-item-popped', /** @param {any} event */ event => {
|
undoManager.on('stack-item-popped', /** @param {any} event */ event => {
|
||||||
t.assert(event.type != null)
|
t.assert(event.type != null)
|
||||||
t.assert(event.changedParentTypes != null && event.changedParentTypes.has(text0))
|
|
||||||
receivedMetadata = event.stackItem.meta.get('test')
|
receivedMetadata = event.stackItem.meta.get('test')
|
||||||
})
|
})
|
||||||
text0.insert(0, 'abc')
|
text0.insert(0, 'abc')
|
||||||
@@ -298,7 +198,7 @@ export const testUndoEvents = tc => {
|
|||||||
export const testTrackClass = tc => {
|
export const testTrackClass = tc => {
|
||||||
const { users, text0 } = init(tc, { users: 3 })
|
const { users, text0 } = init(tc, { users: 3 })
|
||||||
// only track origins that are numbers
|
// only track origins that are numbers
|
||||||
const undoManager = new Y.UndoManager(text0, { trackedOrigins: new Set([Number]) })
|
const undoManager = new UndoManager(text0, { trackedOrigins: new Set([Number]) })
|
||||||
users[0].transact(() => {
|
users[0].transact(() => {
|
||||||
text0.insert(0, 'abc')
|
text0.insert(0, 'abc')
|
||||||
}, 42)
|
}, 42)
|
||||||
@@ -316,8 +216,8 @@ export const testTypeScope = tc => {
|
|||||||
const text0 = new Y.Text()
|
const text0 = new Y.Text()
|
||||||
const text1 = new Y.Text()
|
const text1 = new Y.Text()
|
||||||
array0.insert(0, [text0, text1])
|
array0.insert(0, [text0, text1])
|
||||||
const undoManager = new Y.UndoManager(text0)
|
const undoManager = new UndoManager(text0)
|
||||||
const undoManagerBoth = new Y.UndoManager([text0, text1])
|
const undoManagerBoth = new UndoManager([text0, text1])
|
||||||
text1.insert(0, 'abc')
|
text1.insert(0, 'abc')
|
||||||
t.assert(undoManager.undoStack.length === 0)
|
t.assert(undoManager.undoStack.length === 0)
|
||||||
t.assert(undoManagerBoth.undoStack.length === 1)
|
t.assert(undoManagerBoth.undoStack.length === 1)
|
||||||
@@ -328,35 +228,15 @@ export const testTypeScope = tc => {
|
|||||||
t.assert(text1.toString() === '')
|
t.assert(text1.toString() === '')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testUndoInEmbed = tc => {
|
|
||||||
const { text0 } = init(tc, { users: 3 })
|
|
||||||
const undoManager = new Y.UndoManager(text0)
|
|
||||||
const nestedText = new Y.Text('initial text')
|
|
||||||
undoManager.stopCapturing()
|
|
||||||
text0.insertEmbed(0, nestedText, { bold: true })
|
|
||||||
t.assert(nestedText.toString() === 'initial text')
|
|
||||||
undoManager.stopCapturing()
|
|
||||||
nestedText.delete(0, nestedText.length)
|
|
||||||
nestedText.insert(0, 'other text')
|
|
||||||
t.assert(nestedText.toString() === 'other text')
|
|
||||||
undoManager.undo()
|
|
||||||
t.assert(nestedText.toString() === 'initial text')
|
|
||||||
undoManager.undo()
|
|
||||||
t.assert(text0.length === 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testUndoDeleteFilter = tc => {
|
export const testUndoDeleteFilter = tc => {
|
||||||
/**
|
/**
|
||||||
* @type {Y.Array<any>}
|
* @type {Array<Y.Map<any>>}
|
||||||
*/
|
*/
|
||||||
const array0 = /** @type {any} */ (init(tc, { users: 3 }).array0)
|
const array0 = /** @type {any} */ (init(tc, { users: 3 }).array0)
|
||||||
const undoManager = new Y.UndoManager(array0, { deleteFilter: item => !(item instanceof Y.Item) || (item.content instanceof Y.ContentType && item.content.type._map.size === 0) })
|
const undoManager = new UndoManager(array0, { deleteFilter: item => !(item instanceof Y.Item) || (item.content instanceof Y.ContentType && item.content.type._map.size === 0) })
|
||||||
const map0 = new Y.Map()
|
const map0 = new Y.Map()
|
||||||
map0.set('hi', 1)
|
map0.set('hi', 1)
|
||||||
const map1 = new Y.Map()
|
const map1 = new Y.Map()
|
||||||
@@ -366,395 +246,3 @@ export const testUndoDeleteFilter = tc => {
|
|||||||
array0.get(0)
|
array0.get(0)
|
||||||
t.assert(Array.from(array0.get(0).keys()).length === 1)
|
t.assert(Array.from(array0.get(0).keys()).length === 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This issue has been reported in https://discuss.yjs.dev/t/undomanager-with-external-updates/454/6
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testUndoUntilChangePerformed = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
doc.on('update', update => Y.applyUpdate(doc2, update))
|
|
||||||
doc2.on('update', update => Y.applyUpdate(doc, update))
|
|
||||||
|
|
||||||
const yArray = doc.getArray('array')
|
|
||||||
const yArray2 = doc2.getArray('array')
|
|
||||||
const yMap = new Y.Map()
|
|
||||||
yMap.set('hello', 'world')
|
|
||||||
yArray.push([yMap])
|
|
||||||
const yMap2 = new Y.Map()
|
|
||||||
yMap2.set('key', 'value')
|
|
||||||
yArray.push([yMap2])
|
|
||||||
|
|
||||||
const undoManager = new Y.UndoManager([yArray], { trackedOrigins: new Set([doc.clientID]) })
|
|
||||||
const undoManager2 = new Y.UndoManager([doc2.get('array')], { trackedOrigins: new Set([doc2.clientID]) })
|
|
||||||
|
|
||||||
Y.transact(doc, () => yMap2.set('key', 'value modified'), doc.clientID)
|
|
||||||
undoManager.stopCapturing()
|
|
||||||
Y.transact(doc, () => yMap.set('hello', 'world modified'), doc.clientID)
|
|
||||||
Y.transact(doc2, () => yArray2.delete(0), doc2.clientID)
|
|
||||||
undoManager2.undo()
|
|
||||||
undoManager.undo()
|
|
||||||
t.compareStrings(yMap2.get('key'), 'value')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This issue has been reported in https://github.com/yjs/yjs/issues/317
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testUndoNestedUndoIssue = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
const design = doc.getMap()
|
|
||||||
const undoManager = new Y.UndoManager(design, { captureTimeout: 0 })
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {Y.Map<any>}
|
|
||||||
*/
|
|
||||||
const text = new Y.Map()
|
|
||||||
|
|
||||||
const blocks1 = new Y.Array()
|
|
||||||
const blocks1block = new Y.Map()
|
|
||||||
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks1block.set('text', 'Type Something')
|
|
||||||
blocks1.push([blocks1block])
|
|
||||||
text.set('blocks', blocks1block)
|
|
||||||
design.set('text', text)
|
|
||||||
})
|
|
||||||
|
|
||||||
const blocks2 = new Y.Array()
|
|
||||||
const blocks2block = new Y.Map()
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks2block.set('text', 'Something')
|
|
||||||
blocks2.push([blocks2block])
|
|
||||||
text.set('blocks', blocks2block)
|
|
||||||
})
|
|
||||||
|
|
||||||
const blocks3 = new Y.Array()
|
|
||||||
const blocks3block = new Y.Map()
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks3block.set('text', 'Something Else')
|
|
||||||
blocks3.push([blocks3block])
|
|
||||||
text.set('blocks', blocks3block)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something Else' } } })
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something' } } })
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Type Something' } } })
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(design.toJSON(), { })
|
|
||||||
undoManager.redo()
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Type Something' } } })
|
|
||||||
undoManager.redo()
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something' } } })
|
|
||||||
undoManager.redo()
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: 'Something Else' } } })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This issue has been reported in https://github.com/yjs/yjs/issues/355
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testConsecutiveRedoBug = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const yRoot = doc.getMap()
|
|
||||||
const undoMgr = new Y.UndoManager(yRoot)
|
|
||||||
|
|
||||||
let yPoint = new Y.Map()
|
|
||||||
yPoint.set('x', 0)
|
|
||||||
yPoint.set('y', 0)
|
|
||||||
yRoot.set('a', yPoint)
|
|
||||||
undoMgr.stopCapturing()
|
|
||||||
|
|
||||||
yPoint.set('x', 100)
|
|
||||||
yPoint.set('y', 100)
|
|
||||||
undoMgr.stopCapturing()
|
|
||||||
|
|
||||||
yPoint.set('x', 200)
|
|
||||||
yPoint.set('y', 200)
|
|
||||||
undoMgr.stopCapturing()
|
|
||||||
|
|
||||||
yPoint.set('x', 300)
|
|
||||||
yPoint.set('y', 300)
|
|
||||||
undoMgr.stopCapturing()
|
|
||||||
|
|
||||||
t.compare(yPoint.toJSON(), { x: 300, y: 300 })
|
|
||||||
|
|
||||||
undoMgr.undo() // x=200, y=200
|
|
||||||
t.compare(yPoint.toJSON(), { x: 200, y: 200 })
|
|
||||||
undoMgr.undo() // x=100, y=100
|
|
||||||
t.compare(yPoint.toJSON(), { x: 100, y: 100 })
|
|
||||||
undoMgr.undo() // x=0, y=0
|
|
||||||
t.compare(yPoint.toJSON(), { x: 0, y: 0 })
|
|
||||||
undoMgr.undo() // nil
|
|
||||||
t.compare(yRoot.get('a'), undefined)
|
|
||||||
|
|
||||||
undoMgr.redo() // x=0, y=0
|
|
||||||
yPoint = yRoot.get('a')
|
|
||||||
|
|
||||||
t.compare(yPoint.toJSON(), { x: 0, y: 0 })
|
|
||||||
undoMgr.redo() // x=100, y=100
|
|
||||||
t.compare(yPoint.toJSON(), { x: 100, y: 100 })
|
|
||||||
undoMgr.redo() // x=200, y=200
|
|
||||||
t.compare(yPoint.toJSON(), { x: 200, y: 200 })
|
|
||||||
undoMgr.redo() // expected x=300, y=300, actually nil
|
|
||||||
t.compare(yPoint.toJSON(), { x: 300, y: 300 })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This issue has been reported in https://github.com/yjs/yjs/issues/304
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testUndoXmlBug = _tc => {
|
|
||||||
const origin = 'origin'
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const fragment = doc.getXmlFragment('t')
|
|
||||||
const undoManager = new Y.UndoManager(fragment, {
|
|
||||||
captureTimeout: 0,
|
|
||||||
trackedOrigins: new Set([origin])
|
|
||||||
})
|
|
||||||
|
|
||||||
// create element
|
|
||||||
doc.transact(() => {
|
|
||||||
const e = new Y.XmlElement('test-node')
|
|
||||||
e.setAttribute('a', '100')
|
|
||||||
e.setAttribute('b', '0')
|
|
||||||
fragment.insert(fragment.length, [e])
|
|
||||||
}, origin)
|
|
||||||
|
|
||||||
// change one attribute
|
|
||||||
doc.transact(() => {
|
|
||||||
const e = fragment.get(0)
|
|
||||||
e.setAttribute('a', '200')
|
|
||||||
}, origin)
|
|
||||||
|
|
||||||
// change both attributes
|
|
||||||
doc.transact(() => {
|
|
||||||
const e = fragment.get(0)
|
|
||||||
e.setAttribute('a', '180')
|
|
||||||
e.setAttribute('b', '50')
|
|
||||||
}, origin)
|
|
||||||
|
|
||||||
undoManager.undo()
|
|
||||||
undoManager.undo()
|
|
||||||
undoManager.undo()
|
|
||||||
|
|
||||||
undoManager.redo()
|
|
||||||
undoManager.redo()
|
|
||||||
undoManager.redo()
|
|
||||||
t.compare(fragment.toString(), '<test-node a="180" b="50"></test-node>')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This issue has been reported in https://github.com/yjs/yjs/issues/343
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testUndoBlockBug = _tc => {
|
|
||||||
const doc = new Y.Doc({ gc: false })
|
|
||||||
const design = doc.getMap()
|
|
||||||
|
|
||||||
const undoManager = new Y.UndoManager(design, { captureTimeout: 0 })
|
|
||||||
|
|
||||||
const text = new Y.Map()
|
|
||||||
|
|
||||||
const blocks1 = new Y.Array()
|
|
||||||
const blocks1block = new Y.Map()
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks1block.set('text', '1')
|
|
||||||
blocks1.push([blocks1block])
|
|
||||||
|
|
||||||
text.set('blocks', blocks1block)
|
|
||||||
design.set('text', text)
|
|
||||||
})
|
|
||||||
|
|
||||||
const blocks2 = new Y.Array()
|
|
||||||
const blocks2block = new Y.Map()
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks2block.set('text', '2')
|
|
||||||
blocks2.push([blocks2block])
|
|
||||||
text.set('blocks', blocks2block)
|
|
||||||
})
|
|
||||||
|
|
||||||
const blocks3 = new Y.Array()
|
|
||||||
const blocks3block = new Y.Map()
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks3block.set('text', '3')
|
|
||||||
blocks3.push([blocks3block])
|
|
||||||
text.set('blocks', blocks3block)
|
|
||||||
})
|
|
||||||
|
|
||||||
const blocks4 = new Y.Array()
|
|
||||||
const blocks4block = new Y.Map()
|
|
||||||
doc.transact(() => {
|
|
||||||
blocks4block.set('text', '4')
|
|
||||||
blocks4.push([blocks4block])
|
|
||||||
text.set('blocks', blocks4block)
|
|
||||||
})
|
|
||||||
|
|
||||||
// {"text":{"blocks":{"text":"4"}}}
|
|
||||||
undoManager.undo() // {"text":{"blocks":{"3"}}}
|
|
||||||
undoManager.undo() // {"text":{"blocks":{"text":"2"}}}
|
|
||||||
undoManager.undo() // {"text":{"blocks":{"text":"1"}}}
|
|
||||||
undoManager.undo() // {}
|
|
||||||
undoManager.redo() // {"text":{"blocks":{"text":"1"}}}
|
|
||||||
undoManager.redo() // {"text":{"blocks":{"text":"2"}}}
|
|
||||||
undoManager.redo() // {"text":{"blocks":{"text":"3"}}}
|
|
||||||
undoManager.redo() // {"text":{}}
|
|
||||||
t.compare(design.toJSON(), { text: { blocks: { text: '4' } } })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Undo text formatting delete should not corrupt peer state.
|
|
||||||
*
|
|
||||||
* @see https://github.com/yjs/yjs/issues/392
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testUndoDeleteTextFormat = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const text = doc.getText()
|
|
||||||
text.insert(0, 'Attack ships on fire off the shoulder of Orion.')
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
const text2 = doc2.getText()
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
|
||||||
const undoManager = new Y.UndoManager(text)
|
|
||||||
|
|
||||||
text.format(13, 7, { bold: true })
|
|
||||||
undoManager.stopCapturing()
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
|
||||||
|
|
||||||
text.format(16, 4, { bold: null })
|
|
||||||
undoManager.stopCapturing()
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
|
||||||
|
|
||||||
undoManager.undo()
|
|
||||||
Y.applyUpdate(doc2, Y.encodeStateAsUpdate(doc))
|
|
||||||
|
|
||||||
const expect = [
|
|
||||||
{ insert: 'Attack ships ' },
|
|
||||||
{
|
|
||||||
insert: 'on fire',
|
|
||||||
attributes: { bold: true }
|
|
||||||
},
|
|
||||||
{ insert: ' off the shoulder of Orion.' }
|
|
||||||
]
|
|
||||||
t.compare(text.toDelta(), expect)
|
|
||||||
t.compare(text2.toDelta(), expect)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Undo text formatting delete should not corrupt peer state.
|
|
||||||
*
|
|
||||||
* @see https://github.com/yjs/yjs/issues/392
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testBehaviorOfIgnoreremotemapchangesProperty = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
doc.on('update', update => Y.applyUpdate(doc2, update, doc))
|
|
||||||
doc2.on('update', update => Y.applyUpdate(doc, update, doc2))
|
|
||||||
const map1 = doc.getMap()
|
|
||||||
const map2 = doc2.getMap()
|
|
||||||
const um1 = new Y.UndoManager(map1, { ignoreRemoteMapChanges: true })
|
|
||||||
map1.set('x', 1)
|
|
||||||
map2.set('x', 2)
|
|
||||||
map1.set('x', 3)
|
|
||||||
map2.set('x', 4)
|
|
||||||
um1.undo()
|
|
||||||
t.assert(map1.get('x') === 2)
|
|
||||||
t.assert(map2.get('x') === 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Special deletion case.
|
|
||||||
*
|
|
||||||
* @see https://github.com/yjs/yjs/issues/447
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSpecialDeletionCase = _tc => {
|
|
||||||
const origin = 'undoable'
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const fragment = doc.getXmlFragment()
|
|
||||||
const undoManager = new Y.UndoManager(fragment, { trackedOrigins: new Set([origin]) })
|
|
||||||
doc.transact(() => {
|
|
||||||
const e = new Y.XmlElement('test')
|
|
||||||
e.setAttribute('a', '1')
|
|
||||||
e.setAttribute('b', '2')
|
|
||||||
fragment.insert(0, [e])
|
|
||||||
})
|
|
||||||
t.compareStrings(fragment.toString(), '<test a="1" b="2"></test>')
|
|
||||||
doc.transact(() => {
|
|
||||||
// change attribute "b" and delete test-node
|
|
||||||
const e = fragment.get(0)
|
|
||||||
e.setAttribute('b', '3')
|
|
||||||
fragment.delete(0)
|
|
||||||
}, origin)
|
|
||||||
t.compareStrings(fragment.toString(), '')
|
|
||||||
undoManager.undo()
|
|
||||||
t.compareStrings(fragment.toString(), '<test a="1" b="2"></test>')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deleted entries in a map should be restored on undo.
|
|
||||||
*
|
|
||||||
* @see https://github.com/yjs/yjs/issues/500
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testUndoDeleteInMap = (tc) => {
|
|
||||||
const { map0 } = init(tc, { users: 3 })
|
|
||||||
const undoManager = new Y.UndoManager(map0, { captureTimeout: 0 })
|
|
||||||
map0.set('a', 'a')
|
|
||||||
map0.delete('a')
|
|
||||||
map0.set('a', 'b')
|
|
||||||
map0.delete('a')
|
|
||||||
map0.set('a', 'c')
|
|
||||||
map0.delete('a')
|
|
||||||
map0.set('a', 'd')
|
|
||||||
t.compare(map0.toJSON(), { a: 'd' })
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(map0.toJSON(), {})
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(map0.toJSON(), { a: 'c' })
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(map0.toJSON(), {})
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(map0.toJSON(), { a: 'b' })
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(map0.toJSON(), {})
|
|
||||||
undoManager.undo()
|
|
||||||
t.compare(map0.toJSON(), { a: 'a' })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* It should expose the StackItem being processed if undoing
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testUndoDoingStackItem = async (_tc) => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const text = doc.getText('text')
|
|
||||||
const undoManager = new Y.UndoManager([text])
|
|
||||||
undoManager.on('stack-item-added', /** @param {any} event */ event => {
|
|
||||||
event.stackItem.meta.set('str', '42')
|
|
||||||
})
|
|
||||||
let metaUndo = /** @type {any} */ (null)
|
|
||||||
let metaRedo = /** @type {any} */ (null)
|
|
||||||
text.observe((event) => {
|
|
||||||
const /** @type {Y.UndoManager} */ origin = event.transaction.origin
|
|
||||||
if (origin === undoManager && origin.undoing) {
|
|
||||||
metaUndo = origin.currStackItem?.meta.get('str')
|
|
||||||
} else if (origin === undoManager && origin.redoing) {
|
|
||||||
metaRedo = origin.currStackItem?.meta.get('str')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
text.insert(0, 'abc')
|
|
||||||
undoManager.undo()
|
|
||||||
undoManager.redo()
|
|
||||||
t.compare(metaUndo, '42', 'currStackItem is accessible while undoing')
|
|
||||||
t.compare(metaRedo, '42', 'currStackItem is accessible while redoing')
|
|
||||||
t.compare(undoManager.currStackItem, null, 'currStackItem is null after observe/transaction')
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,357 +0,0 @@
|
|||||||
import * as t from 'lib0/testing'
|
|
||||||
import { init, compare } from './testHelper.js' // eslint-disable-line
|
|
||||||
import * as Y from '../src/index.js'
|
|
||||||
import { readClientsStructRefs, readDeleteSet, UpdateDecoderV2, UpdateEncoderV2, writeDeleteSet } from '../src/internals.js'
|
|
||||||
import * as encoding from 'lib0/encoding'
|
|
||||||
import * as decoding from 'lib0/decoding'
|
|
||||||
import * as object from 'lib0/object'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object} Enc
|
|
||||||
* @property {function(Array<Uint8Array>):Uint8Array} Enc.mergeUpdates
|
|
||||||
* @property {function(Y.Doc):Uint8Array} Enc.encodeStateAsUpdate
|
|
||||||
* @property {function(Y.Doc, Uint8Array):void} Enc.applyUpdate
|
|
||||||
* @property {function(Uint8Array):void} Enc.logUpdate
|
|
||||||
* @property {function(Uint8Array):{from:Map<number,number>,to:Map<number,number>}} Enc.parseUpdateMeta
|
|
||||||
* @property {function(Y.Doc):Uint8Array} Enc.encodeStateVector
|
|
||||||
* @property {function(Uint8Array):Uint8Array} Enc.encodeStateVectorFromUpdate
|
|
||||||
* @property {'update'|'updateV2'} Enc.updateEventName
|
|
||||||
* @property {string} Enc.description
|
|
||||||
* @property {function(Uint8Array, Uint8Array):Uint8Array} Enc.diffUpdate
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {Enc}
|
|
||||||
*/
|
|
||||||
const encV1 = {
|
|
||||||
mergeUpdates: Y.mergeUpdates,
|
|
||||||
encodeStateAsUpdate: Y.encodeStateAsUpdate,
|
|
||||||
applyUpdate: Y.applyUpdate,
|
|
||||||
logUpdate: Y.logUpdate,
|
|
||||||
parseUpdateMeta: Y.parseUpdateMeta,
|
|
||||||
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdate,
|
|
||||||
encodeStateVector: Y.encodeStateVector,
|
|
||||||
updateEventName: 'update',
|
|
||||||
description: 'V1',
|
|
||||||
diffUpdate: Y.diffUpdate
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {Enc}
|
|
||||||
*/
|
|
||||||
const encV2 = {
|
|
||||||
mergeUpdates: Y.mergeUpdatesV2,
|
|
||||||
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
|
||||||
applyUpdate: Y.applyUpdateV2,
|
|
||||||
logUpdate: Y.logUpdateV2,
|
|
||||||
parseUpdateMeta: Y.parseUpdateMetaV2,
|
|
||||||
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2,
|
|
||||||
encodeStateVector: Y.encodeStateVector,
|
|
||||||
updateEventName: 'updateV2',
|
|
||||||
description: 'V2',
|
|
||||||
diffUpdate: Y.diffUpdateV2
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @type {Enc}
|
|
||||||
*/
|
|
||||||
const encDoc = {
|
|
||||||
mergeUpdates: (updates) => {
|
|
||||||
const ydoc = new Y.Doc({ gc: false })
|
|
||||||
updates.forEach(update => {
|
|
||||||
Y.applyUpdateV2(ydoc, update)
|
|
||||||
})
|
|
||||||
return Y.encodeStateAsUpdateV2(ydoc)
|
|
||||||
},
|
|
||||||
encodeStateAsUpdate: Y.encodeStateAsUpdateV2,
|
|
||||||
applyUpdate: Y.applyUpdateV2,
|
|
||||||
logUpdate: Y.logUpdateV2,
|
|
||||||
parseUpdateMeta: Y.parseUpdateMetaV2,
|
|
||||||
encodeStateVectorFromUpdate: Y.encodeStateVectorFromUpdateV2,
|
|
||||||
encodeStateVector: Y.encodeStateVector,
|
|
||||||
updateEventName: 'updateV2',
|
|
||||||
description: 'Merge via Y.Doc',
|
|
||||||
/**
|
|
||||||
* @param {Uint8Array} update
|
|
||||||
* @param {Uint8Array} sv
|
|
||||||
*/
|
|
||||||
diffUpdate: (update, sv) => {
|
|
||||||
const ydoc = new Y.Doc({ gc: false })
|
|
||||||
Y.applyUpdateV2(ydoc, update)
|
|
||||||
return Y.encodeStateAsUpdateV2(ydoc, sv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const encoders = [encV1, encV2, encDoc]
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Array<Y.Doc>} users
|
|
||||||
* @param {Enc} enc
|
|
||||||
*/
|
|
||||||
const fromUpdates = (users, enc) => {
|
|
||||||
const updates = users.map(user =>
|
|
||||||
enc.encodeStateAsUpdate(user)
|
|
||||||
)
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
enc.applyUpdate(ydoc, enc.mergeUpdates(updates))
|
|
||||||
return ydoc
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testMergeUpdates = tc => {
|
|
||||||
const { users, array0, array1 } = init(tc, { users: 3 })
|
|
||||||
|
|
||||||
array0.insert(0, [1])
|
|
||||||
array1.insert(0, [2])
|
|
||||||
|
|
||||||
compare(users)
|
|
||||||
encoders.forEach(enc => {
|
|
||||||
const merged = fromUpdates(users, enc)
|
|
||||||
t.compareArrays(array0.toArray(), merged.getArray('array').toArray())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testKeyEncoding = tc => {
|
|
||||||
const { users, text0, text1 } = init(tc, { users: 2 })
|
|
||||||
|
|
||||||
text0.insert(0, 'a', { italic: true })
|
|
||||||
text0.insert(0, 'b')
|
|
||||||
text0.insert(0, 'c', { italic: true })
|
|
||||||
|
|
||||||
const update = Y.encodeStateAsUpdateV2(users[0])
|
|
||||||
Y.applyUpdateV2(users[1], update)
|
|
||||||
|
|
||||||
t.compare(text1.toDelta(), [{ insert: 'c', attributes: { italic: true } }, { insert: 'b' }, { insert: 'a', attributes: { italic: true } }])
|
|
||||||
|
|
||||||
compare(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {Y.Doc} ydoc
|
|
||||||
* @param {Array<Uint8Array>} updates - expecting at least 4 updates
|
|
||||||
* @param {Enc} enc
|
|
||||||
* @param {boolean} hasDeletes
|
|
||||||
*/
|
|
||||||
const checkUpdateCases = (ydoc, updates, enc, hasDeletes) => {
|
|
||||||
const cases = []
|
|
||||||
// Case 1: Simple case, simply merge everything
|
|
||||||
cases.push(enc.mergeUpdates(updates))
|
|
||||||
|
|
||||||
// Case 2: Overlapping updates
|
|
||||||
cases.push(enc.mergeUpdates([
|
|
||||||
enc.mergeUpdates(updates.slice(2)),
|
|
||||||
enc.mergeUpdates(updates.slice(0, 2))
|
|
||||||
]))
|
|
||||||
|
|
||||||
// Case 3: Overlapping updates
|
|
||||||
cases.push(enc.mergeUpdates([
|
|
||||||
enc.mergeUpdates(updates.slice(2)),
|
|
||||||
enc.mergeUpdates(updates.slice(1, 3)),
|
|
||||||
updates[0]
|
|
||||||
]))
|
|
||||||
|
|
||||||
// Case 4: Separated updates (containing skips)
|
|
||||||
cases.push(enc.mergeUpdates([
|
|
||||||
enc.mergeUpdates([updates[0], updates[2]]),
|
|
||||||
enc.mergeUpdates([updates[1], updates[3]]),
|
|
||||||
enc.mergeUpdates(updates.slice(4))
|
|
||||||
]))
|
|
||||||
|
|
||||||
// Case 5: overlapping with many duplicates
|
|
||||||
cases.push(enc.mergeUpdates(cases))
|
|
||||||
|
|
||||||
// const targetState = enc.encodeStateAsUpdate(ydoc)
|
|
||||||
// t.info('Target State: ')
|
|
||||||
// enc.logUpdate(targetState)
|
|
||||||
|
|
||||||
cases.forEach((mergedUpdates) => {
|
|
||||||
// t.info('State Case $' + i + ':')
|
|
||||||
// enc.logUpdate(updates)
|
|
||||||
const merged = new Y.Doc({ gc: false })
|
|
||||||
enc.applyUpdate(merged, mergedUpdates)
|
|
||||||
t.compareArrays(merged.getArray().toArray(), ydoc.getArray().toArray())
|
|
||||||
t.compare(enc.encodeStateVector(merged), enc.encodeStateVectorFromUpdate(mergedUpdates))
|
|
||||||
|
|
||||||
if (enc.updateEventName !== 'update') { // @todo should this also work on legacy updates?
|
|
||||||
for (let j = 1; j < updates.length; j++) {
|
|
||||||
const partMerged = enc.mergeUpdates(updates.slice(j))
|
|
||||||
const partMeta = enc.parseUpdateMeta(partMerged)
|
|
||||||
const targetSV = Y.encodeStateVectorFromUpdateV2(Y.mergeUpdatesV2(updates.slice(0, j)))
|
|
||||||
const diffed = enc.diffUpdate(mergedUpdates, targetSV)
|
|
||||||
const diffedMeta = enc.parseUpdateMeta(diffed)
|
|
||||||
t.compare(partMeta, diffedMeta)
|
|
||||||
{
|
|
||||||
// We can'd do the following
|
|
||||||
// - t.compare(diffed, mergedDeletes)
|
|
||||||
// because diffed contains the set of all deletes.
|
|
||||||
// So we add all deletes from `diffed` to `partDeletes` and compare then
|
|
||||||
const decoder = decoding.createDecoder(diffed)
|
|
||||||
const updateDecoder = new UpdateDecoderV2(decoder)
|
|
||||||
readClientsStructRefs(updateDecoder, new Y.Doc())
|
|
||||||
const ds = readDeleteSet(updateDecoder)
|
|
||||||
const updateEncoder = new UpdateEncoderV2()
|
|
||||||
encoding.writeVarUint(updateEncoder.restEncoder, 0) // 0 structs
|
|
||||||
writeDeleteSet(updateEncoder, ds)
|
|
||||||
const deletesUpdate = updateEncoder.toUint8Array()
|
|
||||||
const mergedDeletes = Y.mergeUpdatesV2([deletesUpdate, partMerged])
|
|
||||||
if (!hasDeletes || enc !== encDoc) {
|
|
||||||
// deletes will almost definitely lead to different encoders because of the mergeStruct feature that is present in encDoc
|
|
||||||
t.compare(diffed, mergedDeletes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const meta = enc.parseUpdateMeta(mergedUpdates)
|
|
||||||
meta.from.forEach((clock, client) => t.assert(clock === 0))
|
|
||||||
meta.to.forEach((clock, client) => {
|
|
||||||
const structs = /** @type {Array<Y.Item>} */ (merged.store.clients.get(client))
|
|
||||||
const lastStruct = structs[structs.length - 1]
|
|
||||||
t.assert(lastStruct.id.clock + lastStruct.length === clock)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testMergeUpdates1 = _tc => {
|
|
||||||
encoders.forEach((enc) => {
|
|
||||||
t.info(`Using encoder: ${enc.description}`)
|
|
||||||
const ydoc = new Y.Doc({ gc: false })
|
|
||||||
const updates = /** @type {Array<Uint8Array>} */ ([])
|
|
||||||
ydoc.on(enc.updateEventName, update => { updates.push(update) })
|
|
||||||
|
|
||||||
const array = ydoc.getArray()
|
|
||||||
array.insert(0, [1])
|
|
||||||
array.insert(0, [2])
|
|
||||||
array.insert(0, [3])
|
|
||||||
array.insert(0, [4])
|
|
||||||
|
|
||||||
checkUpdateCases(ydoc, updates, enc, false)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testMergeUpdates2 = tc => {
|
|
||||||
encoders.forEach((enc, i) => {
|
|
||||||
t.info(`Using encoder: ${enc.description}`)
|
|
||||||
const ydoc = new Y.Doc({ gc: false })
|
|
||||||
const updates = /** @type {Array<Uint8Array>} */ ([])
|
|
||||||
ydoc.on(enc.updateEventName, update => { updates.push(update) })
|
|
||||||
|
|
||||||
const array = ydoc.getArray()
|
|
||||||
array.insert(0, [1, 2])
|
|
||||||
array.delete(1, 1)
|
|
||||||
array.insert(0, [3, 4])
|
|
||||||
array.delete(1, 2)
|
|
||||||
|
|
||||||
checkUpdateCases(ydoc, updates, enc, true)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testMergePendingUpdates = tc => {
|
|
||||||
const yDoc = new Y.Doc()
|
|
||||||
/**
|
|
||||||
* @type {Array<Uint8Array>}
|
|
||||||
*/
|
|
||||||
const serverUpdates = []
|
|
||||||
yDoc.on('update', (update, origin, c) => {
|
|
||||||
serverUpdates.splice(serverUpdates.length, 0, update)
|
|
||||||
})
|
|
||||||
const yText = yDoc.getText('textBlock')
|
|
||||||
yText.applyDelta([{ insert: 'r' }])
|
|
||||||
yText.applyDelta([{ insert: 'o' }])
|
|
||||||
yText.applyDelta([{ insert: 'n' }])
|
|
||||||
yText.applyDelta([{ insert: 'e' }])
|
|
||||||
yText.applyDelta([{ insert: 'n' }])
|
|
||||||
|
|
||||||
const yDoc1 = new Y.Doc()
|
|
||||||
Y.applyUpdate(yDoc1, serverUpdates[0])
|
|
||||||
const update1 = Y.encodeStateAsUpdate(yDoc1)
|
|
||||||
|
|
||||||
const yDoc2 = new Y.Doc()
|
|
||||||
Y.applyUpdate(yDoc2, update1)
|
|
||||||
Y.applyUpdate(yDoc2, serverUpdates[1])
|
|
||||||
const update2 = Y.encodeStateAsUpdate(yDoc2)
|
|
||||||
|
|
||||||
const yDoc3 = new Y.Doc()
|
|
||||||
Y.applyUpdate(yDoc3, update2)
|
|
||||||
Y.applyUpdate(yDoc3, serverUpdates[3])
|
|
||||||
const update3 = Y.encodeStateAsUpdate(yDoc3)
|
|
||||||
|
|
||||||
const yDoc4 = new Y.Doc()
|
|
||||||
Y.applyUpdate(yDoc4, update3)
|
|
||||||
Y.applyUpdate(yDoc4, serverUpdates[2])
|
|
||||||
const update4 = Y.encodeStateAsUpdate(yDoc4)
|
|
||||||
|
|
||||||
const yDoc5 = new Y.Doc()
|
|
||||||
Y.applyUpdate(yDoc5, update4)
|
|
||||||
Y.applyUpdate(yDoc5, serverUpdates[4])
|
|
||||||
// @ts-ignore
|
|
||||||
const _update5 = Y.encodeStateAsUpdate(yDoc5) // eslint-disable-line
|
|
||||||
|
|
||||||
const yText5 = yDoc5.getText('textBlock')
|
|
||||||
t.compareStrings(yText5.toString(), 'nenor')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testObfuscateUpdates = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = ydoc.getText('text')
|
|
||||||
const ymap = ydoc.getMap('map')
|
|
||||||
const yarray = ydoc.getArray('array')
|
|
||||||
// test ytext
|
|
||||||
ytext.applyDelta([{ insert: 'text', attributes: { bold: true } }, { insert: { href: 'supersecreturl' } }])
|
|
||||||
// test ymap
|
|
||||||
ymap.set('key', 'secret1')
|
|
||||||
ymap.set('key', 'secret2')
|
|
||||||
// test yarray with subtype & subdoc
|
|
||||||
const subtype = new Y.XmlElement('secretnodename')
|
|
||||||
const subdoc = new Y.Doc({ guid: 'secret' })
|
|
||||||
subtype.setAttribute('attr', 'val')
|
|
||||||
yarray.insert(0, ['teststring', 42, subtype, subdoc])
|
|
||||||
// obfuscate the content and put it into a new document
|
|
||||||
const obfuscatedUpdate = Y.obfuscateUpdate(Y.encodeStateAsUpdate(ydoc))
|
|
||||||
const odoc = new Y.Doc()
|
|
||||||
Y.applyUpdate(odoc, obfuscatedUpdate)
|
|
||||||
const otext = odoc.getText('text')
|
|
||||||
const omap = odoc.getMap('map')
|
|
||||||
const oarray = odoc.getArray('array')
|
|
||||||
// test ytext
|
|
||||||
const delta = otext.toDelta()
|
|
||||||
t.assert(delta.length === 2)
|
|
||||||
t.assert(delta[0].insert !== 'text' && delta[0].insert.length === 4)
|
|
||||||
t.assert(object.length(delta[0].attributes) === 1)
|
|
||||||
t.assert(!object.hasProperty(delta[0].attributes, 'bold'))
|
|
||||||
t.assert(object.length(delta[1]) === 1)
|
|
||||||
t.assert(object.hasProperty(delta[1], 'insert'))
|
|
||||||
// test ymap
|
|
||||||
t.assert(omap.size === 1)
|
|
||||||
t.assert(!omap.has('key'))
|
|
||||||
// test yarray with subtype & subdoc
|
|
||||||
const result = oarray.toArray()
|
|
||||||
t.assert(result.length === 4)
|
|
||||||
t.assert(result[0] !== 'teststring')
|
|
||||||
t.assert(result[1] !== 42)
|
|
||||||
const osubtype = /** @type {Y.XmlElement} */ (result[2])
|
|
||||||
const osubdoc = result[3]
|
|
||||||
// test subtype
|
|
||||||
t.assert(osubtype.nodeName !== subtype.nodeName)
|
|
||||||
t.assert(object.length(osubtype.getAttributes()) === 1)
|
|
||||||
t.assert(osubtype.getAttribute('attr') === undefined)
|
|
||||||
// test subdoc
|
|
||||||
t.assert(osubdoc.guid !== subdoc.guid)
|
|
||||||
}
|
|
||||||
@@ -1,144 +1,9 @@
|
|||||||
import { init, compare, applyRandomTests, Doc } from './testHelper.js' // eslint-disable-line
|
import { init, compare, applyRandomTests, Doc } from './testHelper.js' // eslint-disable-line
|
||||||
|
|
||||||
import * as Y from '../src/index.js'
|
import * as Y from '../src/index.js'
|
||||||
import * as t from 'lib0/testing'
|
import * as t from 'lib0/testing.js'
|
||||||
import * as prng from 'lib0/prng'
|
import * as prng from 'lib0/prng.js'
|
||||||
import * as math from 'lib0/math'
|
import * as math from 'lib0/math.js'
|
||||||
import * as env from 'lib0/environment'
|
|
||||||
|
|
||||||
const isDevMode = env.getVariable('node_env') === 'development'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testBasicUpdate = tc => {
|
|
||||||
const doc1 = new Y.Doc()
|
|
||||||
const doc2 = new Y.Doc()
|
|
||||||
doc1.getArray('array').insert(0, ['hi'])
|
|
||||||
const update = Y.encodeStateAsUpdate(doc1)
|
|
||||||
Y.applyUpdate(doc2, update)
|
|
||||||
t.compare(doc2.getArray('array').toArray(), ['hi'])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testFailsObjectManipulationInDevMode = tc => {
|
|
||||||
if (isDevMode) {
|
|
||||||
t.info('running in dev mode')
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const a = [1, 2, 3]
|
|
||||||
const b = { o: 1 }
|
|
||||||
doc.getArray('test').insert(0, [a])
|
|
||||||
doc.getMap('map').set('k', b)
|
|
||||||
t.fails(() => {
|
|
||||||
a[0] = 42
|
|
||||||
})
|
|
||||||
t.fails(() => {
|
|
||||||
b.o = 42
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
t.info('not in dev mode')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testSlice = tc => {
|
|
||||||
const doc1 = new Y.Doc()
|
|
||||||
const arr = doc1.getArray('array')
|
|
||||||
arr.insert(0, [1, 2, 3])
|
|
||||||
t.compareArrays(arr.slice(0), [1, 2, 3])
|
|
||||||
t.compareArrays(arr.slice(1), [2, 3])
|
|
||||||
t.compareArrays(arr.slice(0, -1), [1, 2])
|
|
||||||
arr.insert(0, [0])
|
|
||||||
t.compareArrays(arr.slice(0), [0, 1, 2, 3])
|
|
||||||
t.compareArrays(arr.slice(0, 2), [0, 1])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testArrayFrom = tc => {
|
|
||||||
const doc1 = new Y.Doc()
|
|
||||||
const db1 = doc1.getMap('root')
|
|
||||||
const nestedArray1 = Y.Array.from([0, 1, 2])
|
|
||||||
db1.set('array', nestedArray1)
|
|
||||||
t.compare(nestedArray1.toArray(), [0, 1, 2])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Debugging yjs#297 - a critical bug connected to the search-marker approach
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testLengthIssue = tc => {
|
|
||||||
const doc1 = new Y.Doc()
|
|
||||||
const arr = doc1.getArray('array')
|
|
||||||
arr.push([0, 1, 2, 3])
|
|
||||||
arr.delete(0)
|
|
||||||
arr.insert(0, [0])
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
doc1.transact(() => {
|
|
||||||
arr.delete(1)
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
arr.insert(1, [1])
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
arr.delete(2)
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
arr.insert(2, [2])
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
})
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
arr.delete(1)
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
arr.insert(1, [1])
|
|
||||||
t.assert(arr.length === arr.toArray().length)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Debugging yjs#314
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testLengthIssue2 = tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const next = doc.getArray()
|
|
||||||
doc.transact(() => {
|
|
||||||
next.insert(0, ['group2'])
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
next.insert(1, ['rectangle3'])
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
next.delete(0)
|
|
||||||
next.insert(0, ['rectangle3'])
|
|
||||||
})
|
|
||||||
next.delete(1)
|
|
||||||
doc.transact(() => {
|
|
||||||
next.insert(1, ['ellipse4'])
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
next.insert(2, ['ellipse3'])
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
next.insert(3, ['ellipse2'])
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
doc.transact(() => {
|
|
||||||
t.fails(() => {
|
|
||||||
next.insert(5, ['rectangle2'])
|
|
||||||
})
|
|
||||||
next.insert(4, ['rectangle2'])
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
// this should not throw an error message
|
|
||||||
next.delete(4)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
console.log(next.toArray())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
@@ -172,7 +37,7 @@ export const testInsertThreeElementsTryRegetProperty = tc => {
|
|||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testConcurrentInsertWithThreeConflicts = tc => {
|
export const testConcurrentInsertWithThreeConflicts = tc => {
|
||||||
const { users, array0, array1, array2 } = init(tc, { users: 3 })
|
var { users, array0, array1, array2 } = init(tc, { users: 3 })
|
||||||
array0.insert(0, [0])
|
array0.insert(0, [0])
|
||||||
array1.insert(0, [1])
|
array1.insert(0, [1])
|
||||||
array2.insert(0, [2])
|
array2.insert(0, [2])
|
||||||
@@ -215,7 +80,7 @@ export const testInsertionsInLateSync = tc => {
|
|||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testDisconnectReallyPreventsSendingMessages = tc => {
|
export const testDisconnectReallyPreventsSendingMessages = tc => {
|
||||||
const { testConnector, users, array0, array1 } = init(tc, { users: 3 })
|
var { testConnector, users, array0, array1 } = init(tc, { users: 3 })
|
||||||
array0.insert(0, ['x', 'y'])
|
array0.insert(0, ['x', 'y'])
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
users[1].disconnect()
|
users[1].disconnect()
|
||||||
@@ -327,57 +192,6 @@ export const testInsertAndDeleteEventsForTypes = tc => {
|
|||||||
compare(users)
|
compare(users)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This issue has been reported in https://discuss.yjs.dev/t/order-in-which-events-yielded-by-observedeep-should-be-applied/261/2
|
|
||||||
*
|
|
||||||
* Deep observers generate multiple events. When an array added at item at, say, position 0,
|
|
||||||
* and item 1 changed then the array-add event should fire first so that the change event
|
|
||||||
* path is correct. A array binding might lead to an inconsistent state otherwise.
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testObserveDeepEventOrder = tc => {
|
|
||||||
const { array0, users } = init(tc, { users: 2 })
|
|
||||||
/**
|
|
||||||
* @type {Array<any>}
|
|
||||||
*/
|
|
||||||
let events = []
|
|
||||||
array0.observeDeep(e => {
|
|
||||||
events = e
|
|
||||||
})
|
|
||||||
array0.insert(0, [new Y.Map()])
|
|
||||||
users[0].transact(() => {
|
|
||||||
array0.get(0).set('a', 'a')
|
|
||||||
array0.insert(0, [0])
|
|
||||||
})
|
|
||||||
for (let i = 1; i < events.length; i++) {
|
|
||||||
t.assert(events[i - 1].path.length <= events[i].path.length, 'path size increases, fire top-level events first')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Correct index when computing event.path in observeDeep - https://github.com/yjs/yjs/issues/457
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testObservedeepIndexes = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const map = doc.getMap()
|
|
||||||
// Create a field with the array as value
|
|
||||||
map.set('my-array', new Y.Array())
|
|
||||||
// Fill the array with some strings and our Map
|
|
||||||
map.get('my-array').push(['a', 'b', 'c', new Y.Map()])
|
|
||||||
/**
|
|
||||||
* @type {Array<any>}
|
|
||||||
*/
|
|
||||||
let eventPath = []
|
|
||||||
map.observeDeep((events) => { eventPath = events[0].path })
|
|
||||||
// set a value on the map inside of our array
|
|
||||||
map.get('my-array').get(3).set('hello', 'world')
|
|
||||||
console.log(eventPath)
|
|
||||||
t.compare(eventPath, ['my-array', 3])
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
@@ -519,57 +333,46 @@ const getUniqueNumber = () => _uniqueNumber++
|
|||||||
const arrayTransactions = [
|
const arrayTransactions = [
|
||||||
function insert (user, gen) {
|
function insert (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
const uniqueNumber = getUniqueNumber()
|
var uniqueNumber = getUniqueNumber()
|
||||||
const content = []
|
var content = []
|
||||||
const len = prng.int32(gen, 1, 4)
|
var len = prng.int31(gen, 1, 4)
|
||||||
for (let i = 0; i < len; i++) {
|
for (var i = 0; i < len; i++) {
|
||||||
content.push(uniqueNumber)
|
content.push(uniqueNumber)
|
||||||
}
|
}
|
||||||
const pos = prng.int32(gen, 0, yarray.length)
|
var pos = prng.int31(gen, 0, yarray.length)
|
||||||
const oldContent = yarray.toArray()
|
|
||||||
yarray.insert(pos, content)
|
yarray.insert(pos, content)
|
||||||
oldContent.splice(pos, 0, ...content)
|
|
||||||
t.compareArrays(yarray.toArray(), oldContent) // we want to make sure that fastSearch markers insert at the correct position
|
|
||||||
},
|
},
|
||||||
function insertTypeArray (user, gen) {
|
function insertTypeArray (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
const pos = prng.int32(gen, 0, yarray.length)
|
var pos = prng.int31(gen, 0, yarray.length)
|
||||||
yarray.insert(pos, [new Y.Array()])
|
yarray.insert(pos, [new Y.Array()])
|
||||||
const array2 = yarray.get(pos)
|
var array2 = yarray.get(pos)
|
||||||
array2.insert(0, [1, 2, 3, 4])
|
array2.insert(0, [1, 2, 3, 4])
|
||||||
},
|
},
|
||||||
function insertTypeMap (user, gen) {
|
function insertTypeMap (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
const pos = prng.int32(gen, 0, yarray.length)
|
var pos = prng.int31(gen, 0, yarray.length)
|
||||||
yarray.insert(pos, [new Y.Map()])
|
yarray.insert(pos, [new Y.Map()])
|
||||||
const map = yarray.get(pos)
|
var map = yarray.get(pos)
|
||||||
map.set('someprop', 42)
|
map.set('someprop', 42)
|
||||||
map.set('someprop', 43)
|
map.set('someprop', 43)
|
||||||
map.set('someprop', 44)
|
map.set('someprop', 44)
|
||||||
},
|
},
|
||||||
function insertTypeNull (user, gen) {
|
|
||||||
const yarray = user.getArray('array')
|
|
||||||
const pos = prng.int32(gen, 0, yarray.length)
|
|
||||||
yarray.insert(pos, [null])
|
|
||||||
},
|
|
||||||
function _delete (user, gen) {
|
function _delete (user, gen) {
|
||||||
const yarray = user.getArray('array')
|
const yarray = user.getArray('array')
|
||||||
const length = yarray.length
|
var length = yarray.length
|
||||||
if (length > 0) {
|
if (length > 0) {
|
||||||
let somePos = prng.int32(gen, 0, length - 1)
|
var somePos = prng.int31(gen, 0, length - 1)
|
||||||
let delLength = prng.int32(gen, 1, math.min(2, length - somePos))
|
var delLength = prng.int31(gen, 1, math.min(2, length - somePos))
|
||||||
if (prng.bool(gen)) {
|
if (prng.bool(gen)) {
|
||||||
const type = yarray.get(somePos)
|
var type = yarray.get(somePos)
|
||||||
if (type instanceof Y.Array && type.length > 0) {
|
if (type.length > 0) {
|
||||||
somePos = prng.int32(gen, 0, type.length - 1)
|
somePos = prng.int31(gen, 0, type.length - 1)
|
||||||
delLength = prng.int32(gen, 0, math.min(2, type.length - somePos))
|
delLength = prng.int31(gen, 0, math.min(2, type.length - somePos))
|
||||||
type.delete(somePos, delLength)
|
type.delete(somePos, delLength)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const oldContent = yarray.toArray()
|
|
||||||
yarray.delete(somePos, delLength)
|
yarray.delete(somePos, delLength)
|
||||||
oldContent.splice(somePos, delLength)
|
|
||||||
t.compareArrays(yarray.toArray(), oldContent)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -578,8 +381,8 @@ const arrayTransactions = [
|
|||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testRepeatGeneratingYarrayTests6 = tc => {
|
export const testRepeatGeneratingYarrayTests4 = tc => {
|
||||||
applyRandomTests(tc, arrayTransactions, 6)
|
applyRandomTests(tc, arrayTransactions, 4)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -5,56 +5,8 @@ import {
|
|||||||
} from '../src/internals.js'
|
} from '../src/internals.js'
|
||||||
|
|
||||||
import * as Y from '../src/index.js'
|
import * as Y from '../src/index.js'
|
||||||
import * as t from 'lib0/testing'
|
import * as t from 'lib0/testing.js'
|
||||||
import * as prng from 'lib0/prng'
|
import * as prng from 'lib0/prng.js'
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testIterators = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
/**
|
|
||||||
* @type {Y.Map<number>}
|
|
||||||
*/
|
|
||||||
const ymap = ydoc.getMap()
|
|
||||||
// we are only checking if the type assumptions are correct
|
|
||||||
/**
|
|
||||||
* @type {Array<number>}
|
|
||||||
*/
|
|
||||||
const vals = Array.from(ymap.values())
|
|
||||||
/**
|
|
||||||
* @type {Array<[string,number]>}
|
|
||||||
*/
|
|
||||||
const entries = Array.from(ymap.entries())
|
|
||||||
/**
|
|
||||||
* @type {Array<string>}
|
|
||||||
*/
|
|
||||||
const keys = Array.from(ymap.keys())
|
|
||||||
console.log(vals, entries, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Computing event changes after transaction should result in an error. See yjs#539
|
|
||||||
*
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testMapEventError = _tc => {
|
|
||||||
const doc = new Y.Doc()
|
|
||||||
const ymap = doc.getMap()
|
|
||||||
/**
|
|
||||||
* @type {any}
|
|
||||||
*/
|
|
||||||
let event = null
|
|
||||||
ymap.observe((e) => {
|
|
||||||
event = e
|
|
||||||
})
|
|
||||||
t.fails(() => {
|
|
||||||
t.info(event.keys)
|
|
||||||
})
|
|
||||||
t.fails(() => {
|
|
||||||
t.info(event.keys)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
@@ -90,7 +42,6 @@ export const testBasicMapTests = tc => {
|
|||||||
const { testConnector, users, map0, map1, map2 } = init(tc, { users: 3 })
|
const { testConnector, users, map0, map1, map2 } = init(tc, { users: 3 })
|
||||||
users[2].disconnect()
|
users[2].disconnect()
|
||||||
|
|
||||||
map0.set('null', null)
|
|
||||||
map0.set('number', 1)
|
map0.set('number', 1)
|
||||||
map0.set('string', 'hello Y')
|
map0.set('string', 'hello Y')
|
||||||
map0.set('object', { key: { key2: 'value' } })
|
map0.set('object', { key: { key2: 'value' } })
|
||||||
@@ -103,29 +54,26 @@ export const testBasicMapTests = tc => {
|
|||||||
array.insert(0, [0])
|
array.insert(0, [0])
|
||||||
array.insert(0, [-1])
|
array.insert(0, [-1])
|
||||||
|
|
||||||
t.assert(map0.get('null') === null, 'client 0 computed the change (null)')
|
|
||||||
t.assert(map0.get('number') === 1, 'client 0 computed the change (number)')
|
t.assert(map0.get('number') === 1, 'client 0 computed the change (number)')
|
||||||
t.assert(map0.get('string') === 'hello Y', 'client 0 computed the change (string)')
|
t.assert(map0.get('string') === 'hello Y', 'client 0 computed the change (string)')
|
||||||
t.assert(map0.get('boolean0') === false, 'client 0 computed the change (boolean)')
|
t.assert(map0.get('boolean0') === false, 'client 0 computed the change (boolean)')
|
||||||
t.assert(map0.get('boolean1') === true, 'client 0 computed the change (boolean)')
|
t.assert(map0.get('boolean1') === true, 'client 0 computed the change (boolean)')
|
||||||
t.compare(map0.get('object'), { key: { key2: 'value' } }, 'client 0 computed the change (object)')
|
t.compare(map0.get('object'), { key: { key2: 'value' } }, 'client 0 computed the change (object)')
|
||||||
t.assert(map0.get('y-map').get('y-array').get(0) === -1, 'client 0 computed the change (type)')
|
t.assert(map0.get('y-map').get('y-array').get(0) === -1, 'client 0 computed the change (type)')
|
||||||
t.assert(map0.size === 7, 'client 0 map has correct size')
|
t.assert(map0.size === 6, 'client 0 map has correct size')
|
||||||
|
|
||||||
users[2].connect()
|
users[2].connect()
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
|
|
||||||
t.assert(map1.get('null') === null, 'client 1 received the update (null)')
|
|
||||||
t.assert(map1.get('number') === 1, 'client 1 received the update (number)')
|
t.assert(map1.get('number') === 1, 'client 1 received the update (number)')
|
||||||
t.assert(map1.get('string') === 'hello Y', 'client 1 received the update (string)')
|
t.assert(map1.get('string') === 'hello Y', 'client 1 received the update (string)')
|
||||||
t.assert(map1.get('boolean0') === false, 'client 1 computed the change (boolean)')
|
t.assert(map1.get('boolean0') === false, 'client 1 computed the change (boolean)')
|
||||||
t.assert(map1.get('boolean1') === true, 'client 1 computed the change (boolean)')
|
t.assert(map1.get('boolean1') === true, 'client 1 computed the change (boolean)')
|
||||||
t.compare(map1.get('object'), { key: { key2: 'value' } }, 'client 1 received the update (object)')
|
t.compare(map1.get('object'), { key: { key2: 'value' } }, 'client 1 received the update (object)')
|
||||||
t.assert(map1.get('y-map').get('y-array').get(0) === -1, 'client 1 received the update (type)')
|
t.assert(map1.get('y-map').get('y-array').get(0) === -1, 'client 1 received the update (type)')
|
||||||
t.assert(map1.size === 7, 'client 1 map has correct size')
|
t.assert(map1.size === 6, 'client 1 map has correct size')
|
||||||
|
|
||||||
// compare disconnected user
|
// compare disconnected user
|
||||||
t.assert(map2.get('null') === null, 'client 2 received the update (null) - was disconnected')
|
|
||||||
t.assert(map2.get('number') === 1, 'client 2 received the update (number) - was disconnected')
|
t.assert(map2.get('number') === 1, 'client 2 received the update (number) - was disconnected')
|
||||||
t.assert(map2.get('string') === 'hello Y', 'client 2 received the update (string) - was disconnected')
|
t.assert(map2.get('string') === 'hello Y', 'client 2 received the update (string) - was disconnected')
|
||||||
t.assert(map2.get('boolean0') === false, 'client 2 computed the change (boolean)')
|
t.assert(map2.get('boolean0') === false, 'client 2 computed the change (boolean)')
|
||||||
@@ -190,7 +138,7 @@ export const testGetAndSetOfMapPropertySyncs = tc => {
|
|||||||
t.compare(map0.get('stuff'), 'stuffy')
|
t.compare(map0.get('stuff'), 'stuffy')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
const u = user.getMap('map')
|
var u = user.getMap('map')
|
||||||
t.compare(u.get('stuff'), 'stuffy')
|
t.compare(u.get('stuff'), 'stuffy')
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@@ -205,7 +153,7 @@ export const testGetAndSetOfMapPropertyWithConflict = tc => {
|
|||||||
map1.set('stuff', 'c1')
|
map1.set('stuff', 'c1')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
const u = user.getMap('map')
|
var u = user.getMap('map')
|
||||||
t.compare(u.get('stuff'), 'c1')
|
t.compare(u.get('stuff'), 'c1')
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@@ -235,55 +183,12 @@ export const testGetAndSetAndDeleteOfMapProperty = tc => {
|
|||||||
map1.delete('stuff')
|
map1.delete('stuff')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
const u = user.getMap('map')
|
var u = user.getMap('map')
|
||||||
t.assert(u.get('stuff') === undefined)
|
t.assert(u.get('stuff') === undefined)
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testSetAndClearOfMapProperties = tc => {
|
|
||||||
const { testConnector, users, map0 } = init(tc, { users: 1 })
|
|
||||||
map0.set('stuff', 'c0')
|
|
||||||
map0.set('otherstuff', 'c1')
|
|
||||||
map0.clear()
|
|
||||||
testConnector.flushAllMessages()
|
|
||||||
for (const user of users) {
|
|
||||||
const u = user.getMap('map')
|
|
||||||
t.assert(u.get('stuff') === undefined)
|
|
||||||
t.assert(u.get('otherstuff') === undefined)
|
|
||||||
t.assert(u.size === 0, `map size after clear is ${u.size}, expected 0`)
|
|
||||||
}
|
|
||||||
compare(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testSetAndClearOfMapPropertiesWithConflicts = tc => {
|
|
||||||
const { testConnector, users, map0, map1, map2, map3 } = init(tc, { users: 4 })
|
|
||||||
map0.set('stuff', 'c0')
|
|
||||||
map1.set('stuff', 'c1')
|
|
||||||
map1.set('stuff', 'c2')
|
|
||||||
map2.set('stuff', 'c3')
|
|
||||||
testConnector.flushAllMessages()
|
|
||||||
map0.set('otherstuff', 'c0')
|
|
||||||
map1.set('otherstuff', 'c1')
|
|
||||||
map2.set('otherstuff', 'c2')
|
|
||||||
map3.set('otherstuff', 'c3')
|
|
||||||
map3.clear()
|
|
||||||
testConnector.flushAllMessages()
|
|
||||||
for (const user of users) {
|
|
||||||
const u = user.getMap('map')
|
|
||||||
t.assert(u.get('stuff') === undefined)
|
|
||||||
t.assert(u.get('otherstuff') === undefined)
|
|
||||||
t.assert(u.size === 0, `map size after clear is ${u.size}, expected 0`)
|
|
||||||
}
|
|
||||||
compare(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
@@ -295,7 +200,7 @@ export const testGetAndSetOfMapPropertyWithThreeConflicts = tc => {
|
|||||||
map2.set('stuff', 'c3')
|
map2.set('stuff', 'c3')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
const u = user.getMap('map')
|
var u = user.getMap('map')
|
||||||
t.compare(u.get('stuff'), 'c3')
|
t.compare(u.get('stuff'), 'c3')
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@@ -318,7 +223,7 @@ export const testGetAndSetAndDeleteOfMapPropertyWithThreeConflicts = tc => {
|
|||||||
map3.delete('stuff')
|
map3.delete('stuff')
|
||||||
testConnector.flushAllMessages()
|
testConnector.flushAllMessages()
|
||||||
for (const user of users) {
|
for (const user of users) {
|
||||||
const u = user.getMap('map')
|
var u = user.getMap('map')
|
||||||
t.assert(u.get('stuff') === undefined)
|
t.assert(u.get('stuff') === undefined)
|
||||||
}
|
}
|
||||||
compare(users)
|
compare(users)
|
||||||
@@ -369,11 +274,11 @@ export const testObserversUsingObservedeep = tc => {
|
|||||||
/**
|
/**
|
||||||
* @type {Array<Array<string|number>>}
|
* @type {Array<Array<string|number>>}
|
||||||
*/
|
*/
|
||||||
const paths = []
|
const pathes = []
|
||||||
let calls = 0
|
let calls = 0
|
||||||
map0.observeDeep(events => {
|
map0.observeDeep(events => {
|
||||||
events.forEach(event => {
|
events.forEach(event => {
|
||||||
paths.push(event.path)
|
pathes.push(event.path)
|
||||||
})
|
})
|
||||||
calls++
|
calls++
|
||||||
})
|
})
|
||||||
@@ -381,35 +286,7 @@ export const testObserversUsingObservedeep = tc => {
|
|||||||
map0.get('map').set('array', new Y.Array())
|
map0.get('map').set('array', new Y.Array())
|
||||||
map0.get('map').get('array').insert(0, ['content'])
|
map0.get('map').get('array').insert(0, ['content'])
|
||||||
t.assert(calls === 3)
|
t.assert(calls === 3)
|
||||||
t.compare(paths, [[], ['map'], ['map', 'array']])
|
t.compare(pathes, [[], ['map'], ['map', 'array']])
|
||||||
compare(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testPathsOfSiblingEvents = tc => {
|
|
||||||
const { users, map0 } = init(tc, { users: 2 })
|
|
||||||
/**
|
|
||||||
* @type {Array<Array<string|number>>}
|
|
||||||
*/
|
|
||||||
const paths = []
|
|
||||||
let calls = 0
|
|
||||||
const doc = users[0]
|
|
||||||
map0.set('map', new Y.Map())
|
|
||||||
map0.get('map').set('text1', new Y.Text('initial'))
|
|
||||||
map0.observeDeep(events => {
|
|
||||||
events.forEach(event => {
|
|
||||||
paths.push(event.path)
|
|
||||||
})
|
|
||||||
calls++
|
|
||||||
})
|
|
||||||
doc.transact(() => {
|
|
||||||
map0.get('map').get('text1').insert(0, 'post-')
|
|
||||||
map0.get('map').set('text2', new Y.Text('new'))
|
|
||||||
})
|
|
||||||
t.assert(calls === 1)
|
|
||||||
t.compare(paths, [['map'], ['map', 'text1']])
|
|
||||||
compare(users)
|
compare(users)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -419,7 +296,7 @@ export const testPathsOfSiblingEvents = tc => {
|
|||||||
* @param {Object<string,any>} should
|
* @param {Object<string,any>} should
|
||||||
*/
|
*/
|
||||||
const compareEvent = (is, should) => {
|
const compareEvent = (is, should) => {
|
||||||
for (const key in should) {
|
for (var key in should) {
|
||||||
t.compare(should[key], is[key])
|
t.compare(should[key], is[key])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -458,30 +335,6 @@ export const testThrowsAddAndUpdateAndDeleteEvents = tc => {
|
|||||||
compare(users)
|
compare(users)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testThrowsDeleteEventsOnClear = tc => {
|
|
||||||
const { users, map0 } = init(tc, { users: 2 })
|
|
||||||
/**
|
|
||||||
* @type {Object<string,any>}
|
|
||||||
*/
|
|
||||||
let event = {}
|
|
||||||
map0.observe(e => {
|
|
||||||
event = e // just put it on event, should be thrown synchronously anyway
|
|
||||||
})
|
|
||||||
// set values
|
|
||||||
map0.set('stuff', 4)
|
|
||||||
map0.set('otherstuff', new Y.Array())
|
|
||||||
// clear
|
|
||||||
map0.clear()
|
|
||||||
compareEvent(event, {
|
|
||||||
keysChanged: new Set(['stuff', 'otherstuff']),
|
|
||||||
target: map0
|
|
||||||
})
|
|
||||||
compare(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
@@ -531,9 +384,9 @@ export const testChangeEvent = tc => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} _tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
export const testYmapEventExceptionsShouldCompleteTransaction = _tc => {
|
export const testYmapEventExceptionsShouldCompleteTransaction = tc => {
|
||||||
const doc = new Y.Doc()
|
const doc = new Y.Doc()
|
||||||
const map = doc.getMap('map')
|
const map = doc.getMap('map')
|
||||||
|
|
||||||
@@ -621,12 +474,12 @@ export const testYmapEventHasCorrectValueWhenSettingAPrimitiveFromOtherUser = tc
|
|||||||
const mapTransactions = [
|
const mapTransactions = [
|
||||||
function set (user, gen) {
|
function set (user, gen) {
|
||||||
const key = prng.oneOf(gen, ['one', 'two'])
|
const key = prng.oneOf(gen, ['one', 'two'])
|
||||||
const value = prng.utf16String(gen)
|
var value = prng.utf16String(gen)
|
||||||
user.getMap('map').set(key, value)
|
user.getMap('map').set(key, value)
|
||||||
},
|
},
|
||||||
function setType (user, gen) {
|
function setType (user, gen) {
|
||||||
const key = prng.oneOf(gen, ['one', 'two'])
|
const key = prng.oneOf(gen, ['one', 'two'])
|
||||||
const type = prng.oneOf(gen, [new Y.Array(), new Y.Map()])
|
var type = prng.oneOf(gen, [new Y.Array(), new Y.Map()])
|
||||||
user.getMap('map').set(key, type)
|
user.getMap('map').set(key, type)
|
||||||
if (type instanceof Y.Array) {
|
if (type instanceof Y.Array) {
|
||||||
type.insert(0, [1, 2, 3, 4])
|
type.insert(0, [1, 2, 3, 4])
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,34 +1,7 @@
|
|||||||
import { init, compare } from './testHelper.js'
|
import { init, compare } from './testHelper.js'
|
||||||
import * as Y from '../src/index.js'
|
import * as Y from '../src/index.js'
|
||||||
|
|
||||||
import * as t from 'lib0/testing'
|
import * as t from 'lib0/testing.js'
|
||||||
|
|
||||||
export const testCustomTypings = () => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ymap = ydoc.getMap()
|
|
||||||
/**
|
|
||||||
* @type {Y.XmlElement<{ num: number, str: string, [k:string]: object|number|string }>}
|
|
||||||
*/
|
|
||||||
const yxml = ymap.set('yxml', new Y.XmlElement('test'))
|
|
||||||
/**
|
|
||||||
* @type {number|undefined}
|
|
||||||
*/
|
|
||||||
const num = yxml.getAttribute('num')
|
|
||||||
/**
|
|
||||||
* @type {string|undefined}
|
|
||||||
*/
|
|
||||||
const str = yxml.getAttribute('str')
|
|
||||||
/**
|
|
||||||
* @type {object|number|string|undefined}
|
|
||||||
*/
|
|
||||||
const dtrn = yxml.getAttribute('dtrn')
|
|
||||||
const attrs = yxml.getAttributes()
|
|
||||||
/**
|
|
||||||
* @type {object|number|string|undefined}
|
|
||||||
*/
|
|
||||||
const any = attrs.shouldBeAny
|
|
||||||
console.log({ num, str, dtrn, attrs, any })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
@@ -42,23 +15,6 @@ export const testSetProperty = tc => {
|
|||||||
compare(users)
|
compare(users)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} tc
|
|
||||||
*/
|
|
||||||
export const testHasProperty = tc => {
|
|
||||||
const { testConnector, users, xml0, xml1 } = init(tc, { users: 2 })
|
|
||||||
xml0.setAttribute('height', '10')
|
|
||||||
t.assert(xml0.hasAttribute('height'), 'Simple set+has works')
|
|
||||||
testConnector.flushAllMessages()
|
|
||||||
t.assert(xml1.hasAttribute('height'), 'Simple set+has works (remote)')
|
|
||||||
|
|
||||||
xml0.removeAttribute('height')
|
|
||||||
t.assert(!xml0.hasAttribute('height'), 'Simple set+remove+has works')
|
|
||||||
testConnector.flushAllMessages()
|
|
||||||
t.assert(!xml1.hasAttribute('height'), 'Simple set+remove+has works (remote)')
|
|
||||||
compare(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {t.TestCase} tc
|
* @param {t.TestCase} tc
|
||||||
*/
|
*/
|
||||||
@@ -117,107 +73,3 @@ export const testTreewalker = tc => {
|
|||||||
t.assert(xml0.querySelector('p') === paragraph1, 'querySelector found paragraph1')
|
t.assert(xml0.querySelector('p') === paragraph1, 'querySelector found paragraph1')
|
||||||
compare(users)
|
compare(users)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testYtextAttributes = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const ytext = /** @type {Y.XmlText} */ (ydoc.get('', Y.XmlText))
|
|
||||||
ytext.observe(event => {
|
|
||||||
t.compare(event.changes.keys.get('test'), { action: 'add', oldValue: undefined })
|
|
||||||
})
|
|
||||||
ytext.setAttribute('test', 42)
|
|
||||||
t.compare(ytext.getAttribute('test'), 42)
|
|
||||||
t.compare(ytext.getAttributes(), { test: 42 })
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testSiblings = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yxml = ydoc.getXmlFragment()
|
|
||||||
const first = new Y.XmlText()
|
|
||||||
const second = new Y.XmlElement('p')
|
|
||||||
yxml.insert(0, [first, second])
|
|
||||||
t.assert(first.nextSibling === second)
|
|
||||||
t.assert(second.prevSibling === first)
|
|
||||||
t.assert(first.parent === yxml)
|
|
||||||
t.assert(yxml.parent === null)
|
|
||||||
t.assert(yxml.firstChild === first)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testInsertafter = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yxml = ydoc.getXmlFragment()
|
|
||||||
const first = new Y.XmlText()
|
|
||||||
const second = new Y.XmlElement('p')
|
|
||||||
const third = new Y.XmlElement('p')
|
|
||||||
|
|
||||||
const deepsecond1 = new Y.XmlElement('span')
|
|
||||||
const deepsecond2 = new Y.XmlText()
|
|
||||||
second.insertAfter(null, [deepsecond1])
|
|
||||||
second.insertAfter(deepsecond1, [deepsecond2])
|
|
||||||
|
|
||||||
yxml.insertAfter(null, [first, second])
|
|
||||||
yxml.insertAfter(second, [third])
|
|
||||||
|
|
||||||
t.assert(yxml.length === 3)
|
|
||||||
t.assert(second.get(0) === deepsecond1)
|
|
||||||
t.assert(second.get(1) === deepsecond2)
|
|
||||||
|
|
||||||
t.compareArrays(yxml.toArray(), [first, second, third])
|
|
||||||
|
|
||||||
t.fails(() => {
|
|
||||||
const el = new Y.XmlElement('p')
|
|
||||||
el.insertAfter(deepsecond1, [new Y.XmlText()])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testClone = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yxml = ydoc.getXmlFragment()
|
|
||||||
const first = new Y.XmlText('text')
|
|
||||||
const second = new Y.XmlElement('p')
|
|
||||||
const third = new Y.XmlElement('p')
|
|
||||||
yxml.push([first, second, third])
|
|
||||||
t.compareArrays(yxml.toArray(), [first, second, third])
|
|
||||||
const cloneYxml = yxml.clone()
|
|
||||||
ydoc.getArray('copyarr').insert(0, [cloneYxml])
|
|
||||||
t.assert(cloneYxml.length === 3)
|
|
||||||
t.compare(cloneYxml.toJSON(), yxml.toJSON())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testFormattingBug = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yxml = /** @type {Y.XmlText} */ (ydoc.get('', Y.XmlText))
|
|
||||||
const delta = [
|
|
||||||
{ insert: 'A', attributes: { em: {}, strong: {} } },
|
|
||||||
{ insert: 'B', attributes: { em: {} } },
|
|
||||||
{ insert: 'C', attributes: { em: {}, strong: {} } }
|
|
||||||
]
|
|
||||||
yxml.applyDelta(delta)
|
|
||||||
t.compare(yxml.toDelta(), delta)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {t.TestCase} _tc
|
|
||||||
*/
|
|
||||||
export const testElement = _tc => {
|
|
||||||
const ydoc = new Y.Doc()
|
|
||||||
const yxmlel = ydoc.getXmlElement()
|
|
||||||
const text1 = new Y.XmlText('text1')
|
|
||||||
const text2 = new Y.XmlText('text2')
|
|
||||||
yxmlel.insert(0, [text1, text2])
|
|
||||||
t.compareArrays(yxmlel.toArray(), [text1, text2])
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,21 +1,64 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "ES2021",
|
/* Basic Options */
|
||||||
"lib": ["ES2021", "dom"],
|
"target": "es2018",
|
||||||
"module": "node16",
|
"lib": ["es2018", "dom"], /* Specify library files to be included in the compilation. */
|
||||||
"allowJs": true,
|
"allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
"checkJs": true,
|
"checkJs": true, /* Report errors in .js files. */
|
||||||
"declaration": true,
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||||
"declarationMap": true,
|
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||||
"outDir": "./dist",
|
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||||
"baseUrl": "./",
|
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||||
|
// "outFile": "./dist/yjs.js", /* Concatenate and emit output to single file. */
|
||||||
|
"outDir": "./dist", /* Redirect output structure to the directory. */
|
||||||
|
"rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||||
|
// "composite": true, /* Enable project compilation */
|
||||||
|
// "removeComments": true, /* Do not emit comments to output. */
|
||||||
|
// "noEmit": true, /* Do not emit outputs. */
|
||||||
|
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||||
|
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||||
|
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||||
|
|
||||||
|
/* Strict Type-Checking Options */
|
||||||
|
"strict": true, /* Enable all strict type-checking options. */
|
||||||
|
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||||
"emitDeclarationOnly": true,
|
"emitDeclarationOnly": true,
|
||||||
"strict": true,
|
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||||
"noImplicitAny": true,
|
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||||
"moduleResolution": "nodenext",
|
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||||
|
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||||
|
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||||
|
|
||||||
|
/* Additional Checks */
|
||||||
|
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||||
|
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||||
|
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||||
|
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||||
|
|
||||||
|
/* Module Resolution Options */
|
||||||
|
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||||
|
"baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||||
"paths": {
|
"paths": {
|
||||||
"yjs": ["./src/index.js"]
|
"yjs": ["./src/index.js"]
|
||||||
}
|
}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||||
|
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||||
|
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||||
|
// "types": [], /* Type declaration files to be included in compilation. */
|
||||||
|
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||||
|
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||||
|
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||||
|
|
||||||
|
/* Source Map Options */
|
||||||
|
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||||
|
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||||
|
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||||
|
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||||
|
|
||||||
|
/* Experimental Options */
|
||||||
|
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||||
|
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||||
|
// "maxNodeModuleJsDepth": 0,
|
||||||
|
// "types": ["./src/utils/typedefs.js"]
|
||||||
},
|
},
|
||||||
"include": ["./src/**/*.js", "./tests/**/*.js"]
|
"include": ["./src/**/*.js", "./tests/**/*.js"]
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user