remove bare for .. of iterations - fixes #220

This commit is contained in:
Kevin Jahns 2020-07-12 20:04:56 +02:00
parent 5414ac7f6e
commit bb45816f05
10 changed files with 38 additions and 40 deletions

View File

@ -564,11 +564,11 @@ export const typeMapGetAll = (parent) => {
* @type {Object<string,any>}
*/
const res = {}
for (const [key, value] of parent._map) {
parent._map.forEach((value, key) => {
if (!value.deleted) {
res[key] = value.content.getContent()[value.length - 1]
}
}
})
return res
}

View File

@ -74,9 +74,9 @@ export class YMap extends AbstractType {
*/
_integrate (y, item) {
super._integrate(y, item)
for (const [key, value] of /** @type {Map<string, any>} */ (this._prelimContent)) {
;/** @type {Map<string, any>} */ (this._prelimContent).forEach((value, key) => {
this.set(key, value)
}
})
this._prelimContent = null
}
@ -104,12 +104,12 @@ export class YMap extends AbstractType {
* @type {Object<string,T>}
*/
const map = {}
for (const [key, item] of this._map) {
this._map.forEach((item, key) => {
if (!item.deleted) {
const v = item.content.getContent()[item.length - 1]
map[key] = v instanceof AbstractType ? v.toJSON() : v
}
}
})
return map
}
@ -159,11 +159,11 @@ export class YMap extends AbstractType {
* @type {Object<string,T>}
*/
const map = {}
for (const [key, item] of this._map) {
this._map.forEach((item, key) => {
if (!item.deleted) {
f(item.content.getContent()[item.length - 1], key, this)
}
}
})
return map
}

View File

@ -149,10 +149,10 @@ const insertNegatedAttributes = (transaction, parent, currPos, negatedAttributes
}
const doc = transaction.doc
const ownClientId = doc.clientID
for (const [key, val] of negatedAttributes) {
negatedAttributes.forEach((val, key) => {
left = new Item(createID(ownClientId, getState(doc.store, ownClientId)), left, left && left.lastId, right, right && right.id, parent, null, new ContentFormat(key, val))
left.integrate(transaction, 0)
}
})
currPos.left = left
currPos.right = right
}
@ -244,11 +244,11 @@ const insertAttributes = (transaction, parent, currPos, currentAttributes, attri
* @function
**/
const insertText = (transaction, parent, currPos, currentAttributes, text, attributes) => {
for (const [key] of currentAttributes) {
currentAttributes.forEach((val, key) => {
if (attributes[key] === undefined) {
attributes[key] = null
}
}
})
const doc = transaction.doc
const ownClientId = doc.clientID
minimizeAttributeChanges(currPos, currentAttributes, attributes)
@ -567,11 +567,11 @@ export class YTextEvent extends YEvent {
op = { insert }
if (currentAttributes.size > 0) {
op.attributes = {}
for (const [key, value] of currentAttributes) {
currentAttributes.forEach((value, key) => {
if (value !== null) {
op.attributes[key] = value
}
}
})
}
insert = ''
break
@ -771,7 +771,7 @@ export class YText extends AbstractType {
if (!transaction.local) {
// check if another formatting item was inserted
let foundFormattingItem = false
for (const [client, afterClock] of transaction.afterState) {
for (const [client, afterClock] of transaction.afterState.entries()) {
const clock = transaction.beforeState.get(client) || 0
if (afterClock === clock) {
continue
@ -908,10 +908,10 @@ export class YText extends AbstractType {
*/
const attributes = {}
let addAttributes = false
for (const [key, value] of currentAttributes) {
currentAttributes.forEach((value, key) => {
addAttributes = true
attributes[key] = value
}
})
/**
* @type {Object<string,any>}
*/
@ -965,9 +965,9 @@ export class YText extends AbstractType {
if (currentAttributes.size > 0) {
const attrs = /** @type {Object<string,any>} */ ({})
op.attributes = attrs
for (const [key, value] of currentAttributes) {
currentAttributes.forEach((value, key) => {
attrs[key] = value
}
})
}
ops.push(op)
break

View File

@ -180,9 +180,9 @@ export class Doc extends Observable {
*/
const doc = {}
for (const [k, v] of this.share.entries()) {
doc[k] = v.toJSON()
}
this.share.forEach((value, key) => {
doc[key] = value.toJSON()
})
return doc
}

View File

@ -81,7 +81,7 @@ export const readID = decoder =>
*/
export const findRootTypeKey = type => {
// @ts-ignore _y must be defined, otherwise unexpected case
for (const [key, value] of type.doc.share) {
for (const [key, value] of type.doc.share.entries()) {
if (value === type) {
return key
}

View File

@ -132,7 +132,7 @@ export class PermanentUserData {
* @return {string | null}
*/
getUserByDeletedId (id) {
for (const [userDescription, ds] of this.dss) {
for (const [userDescription, ds] of this.dss.entries()) {
if (isDeleted(ds, id)) {
return userDescription
}

View File

@ -51,12 +51,12 @@ export const equalSnapshots = (snap1, snap2) => {
if (sv1.size !== sv2.size || ds1.size !== ds2.size) {
return false
}
for (const [key, value] of sv1) {
for (const [key, value] of sv1.entries()) {
if (sv2.get(key) !== value) {
return false
}
}
for (const [client, dsitems1] of ds1) {
for (const [client, dsitems1] of ds1.entries()) {
const dsitems2 = ds2.get(client) || []
if (dsitems1.length !== dsitems2.length) {
return false

View File

@ -11,7 +11,7 @@ import {
Item,
generateNewClientId,
createID,
AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV1, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
AbstractUpdateEncoder, GC, StructStore, UpdateEncoderV2, DefaultUpdateEncoder, AbstractType, AbstractStruct, YEvent, Doc // eslint-disable-line
} from '../internals.js'
import * as map from 'lib0/map.js'
@ -19,8 +19,6 @@ import * as math from 'lib0/math.js'
import * as set from 'lib0/set.js'
import * as logging from 'lib0/logging.js'
import { callAll } from 'lib0/function.js'
import { DefaultUpdateEncoder } from './encoding.js'
import { UpdateEncoderV2 } from './UpdateEncoder.js'
/**
* A transaction is created for every change on the Yjs model. It is possible
@ -171,7 +169,7 @@ const tryToMergeWithLeft = (structs, pos) => {
* @param {function(Item):boolean} gcFilter
*/
const tryGcDeleteSet = (ds, store, gcFilter) => {
for (const [client, deleteItems] of ds.clients) {
for (const [client, deleteItems] of ds.clients.entries()) {
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
for (let di = deleteItems.length - 1; di >= 0; di--) {
const deleteItem = deleteItems[di]
@ -200,7 +198,7 @@ const tryGcDeleteSet = (ds, store, gcFilter) => {
const tryMergeDeleteSet = (ds, store) => {
// try to merge deleted / gc'd items
// merge from right to left for better efficiecy and so we don't miss any merge targets
for (const [client, deleteItems] of ds.clients) {
ds.clients.forEach((deleteItems, client) => {
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
for (let di = deleteItems.length - 1; di >= 0; di--) {
const deleteItem = deleteItems[di]
@ -214,7 +212,7 @@ const tryMergeDeleteSet = (ds, store) => {
tryToMergeWithLeft(structs, si)
}
}
}
})
}
/**
@ -292,7 +290,7 @@ const cleanupTransactions = (transactionCleanups, i) => {
tryMergeDeleteSet(ds, store)
// on all affected store.clients props, try to merge
for (const [client, clock] of transaction.afterState) {
transaction.afterState.forEach((clock, client) => {
const beforeClock = transaction.beforeState.get(client) || 0
if (beforeClock !== clock) {
const structs = /** @type {Array<GC|Item>} */ (store.clients.get(client))
@ -302,7 +300,7 @@ const cleanupTransactions = (transactionCleanups, i) => {
tryToMergeWithLeft(structs, i)
}
}
}
})
// try to merge mergeStructs
// @todo: it makes more sense to transform mergeStructs to a DS, sort it, and merge from right to left
// but at the moment DS does not handle duplicates

View File

@ -290,7 +290,7 @@ export const writeStructsFromTransaction = (encoder, transaction) => writeClient
*/
const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
const pendingClientsStructRefs = store.pendingClientsStructRefs
for (const [client, structRefs] of clientsStructsRefs) {
clientsStructsRefs.forEach((structRefs, client) => {
const pendingStructRefs = pendingClientsStructRefs.get(client)
if (pendingStructRefs === undefined) {
pendingClientsStructRefs.set(client, { refs: structRefs, i: 0 })
@ -303,7 +303,7 @@ const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
pendingStructRefs.i = 0
pendingStructRefs.refs = merged.sort((r1, r2) => r1.id.clock - r2.id.clock)
}
}
})
}
/**
@ -311,14 +311,14 @@ const mergeReadStructsIntoPendingReads = (store, clientsStructsRefs) => {
*/
const cleanupPendingStructs = pendingClientsStructRefs => {
// cleanup pendingClientsStructs if not fully finished
for (const [client, refs] of pendingClientsStructRefs) {
pendingClientsStructRefs.forEach((refs, client) => {
if (refs.i === refs.refs.length) {
pendingClientsStructRefs.delete(client)
} else {
refs.refs.splice(0, refs.i)
refs.i = 0
}
}
})
}
/**

View File

@ -362,7 +362,7 @@ export const compareStructStores = (ss1, ss2) => {
*/
export const compareDS = (ds1, ds2) => {
t.assert(ds1.clients.size === ds2.clients.size)
for (const [client, deleteItems1] of ds1.clients) {
ds1.clients.forEach((deleteItems1, client) => {
const deleteItems2 = /** @type {Array<DeleteItem>} */ (ds2.clients.get(client))
t.assert(deleteItems2 !== undefined && deleteItems1.length === deleteItems2.length)
for (let i = 0; i < deleteItems1.length; i++) {
@ -372,7 +372,7 @@ export const compareDS = (ds1, ds2) => {
t.fail('DeleteSets dont match')
}
}
}
})
}
/**