changing the connector adapter to serve a maximum of 30 operations
This commit is contained in:
parent
00458bab58
commit
592a0969d3
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -50,5 +50,10 @@ window.onload = function(){
|
||||
}
|
||||
}
|
||||
});
|
||||
yatta.val("textfield","stuff", "mutable");
|
||||
connector.whenSynced(function(){
|
||||
if(yatta.val("textfield") == null){
|
||||
yatta.val("textfield","stuff", "mutable");
|
||||
}
|
||||
})
|
||||
|
||||
};
|
@ -26,21 +26,25 @@ adaptConnector = (connector, engine, HB, execution_listener)->
|
||||
state_vector[s.user] = s.state
|
||||
state_vector
|
||||
|
||||
sendStateVector = ()->
|
||||
getStateVector = ()->
|
||||
encode_state_vector HB.getOperationCounter()
|
||||
|
||||
sendHb = (v)->
|
||||
getHB = (v)->
|
||||
state_vector = parse_state_vector v
|
||||
hb = HB._encode state_vector
|
||||
for o in hb
|
||||
o.fromHB = "true" # execute immediately
|
||||
json =
|
||||
hb: HB._encode(state_vector)
|
||||
hb: hb
|
||||
state_vector: encode_state_vector HB.getOperationCounter()
|
||||
json
|
||||
|
||||
applyHb = (res)->
|
||||
HB.renewStateVector parse_state_vector res.state_vector
|
||||
engine.applyOpsCheckDouble res.hb
|
||||
applyHB = (hb)->
|
||||
engine.applyOp hb
|
||||
|
||||
connector.whenSyncing sendStateVector, sendHb, applyHb
|
||||
connector.getStateVector = getStateVector
|
||||
connector.getHB = getHB
|
||||
connector.applyHB = applyHB
|
||||
|
||||
connector.whenReceiving (sender, op)->
|
||||
if op.uid.creator isnt HB.getUserId()
|
||||
|
@ -67,10 +67,12 @@ class Engine
|
||||
for op_json in op_json_array
|
||||
# $parse_and_execute will return false if $o_json was parsed and executed, otherwise the parsed operadion
|
||||
o = @parseOperation op_json
|
||||
if op_json.fromHB?
|
||||
o.fromHB = op_json.fromHB
|
||||
# @HB.addOperation o
|
||||
if @HB.getOperation(o)?
|
||||
# nop
|
||||
else if (not @HB.isExpectedOperation(o)) or (not o.execute())
|
||||
else if ((not @HB.isExpectedOperation(o)) and (not o.fromHB?)) or (not o.execute())
|
||||
@unprocessed_ops.push o
|
||||
window?.unprocessed_types.push o.type # TODO: delete this
|
||||
@tryUnprocessed()
|
||||
@ -86,7 +88,7 @@ class Engine
|
||||
for op in @unprocessed_ops
|
||||
if @HB.getOperation(op)?
|
||||
# nop
|
||||
else if (not @HB.isExpectedOperation(op)) or (not op.execute())
|
||||
else if (not @HB.isExpectedOperation(op) and (not op.fromHB?)) or (not op.execute())
|
||||
unprocessed.push op
|
||||
@unprocessed_ops = unprocessed
|
||||
if @unprocessed_ops.length is old_length
|
||||
|
@ -18,7 +18,7 @@ class HistoryBuffer
|
||||
@garbage = [] # Will be cleaned on next call of garbageCollector
|
||||
@trash = [] # Is deleted. Wait until it is not used anymore.
|
||||
@performGarbageCollection = true
|
||||
@garbageCollectTimeout = 20000
|
||||
@garbageCollectTimeout = 30000
|
||||
@reserved_identifier_counter = 0
|
||||
setTimeout @emptyGarbage, @garbageCollectTimeout
|
||||
|
||||
@ -173,7 +173,7 @@ class HistoryBuffer
|
||||
@buffer[o.uid.creator] = {}
|
||||
if @buffer[o.uid.creator][o.uid.op_number]?
|
||||
throw new Error "You must not overwrite operations!"
|
||||
if (o.uid.op_number.constructor isnt String) and (not @isExpectedOperation(o)) # you already do this in the engine, so delete it here!
|
||||
if (o.uid.op_number.constructor isnt String) and (not @isExpectedOperation(o)) and (not o.fromHB?) # you already do this in the engine, so delete it here!
|
||||
throw new Error "this operation was not expected!"
|
||||
@addToCounter(o)
|
||||
@buffer[o.uid.creator][o.uid.op_number] = o
|
||||
|
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user