changing the connector adapter to serve a maximum of 30 operations
This commit is contained in:
parent
00458bab58
commit
592a0969d3
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -50,5 +50,10 @@ window.onload = function(){
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
connector.whenSynced(function(){
|
||||||
|
if(yatta.val("textfield") == null){
|
||||||
yatta.val("textfield","stuff", "mutable");
|
yatta.val("textfield","stuff", "mutable");
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
};
|
};
|
@ -26,21 +26,25 @@ adaptConnector = (connector, engine, HB, execution_listener)->
|
|||||||
state_vector[s.user] = s.state
|
state_vector[s.user] = s.state
|
||||||
state_vector
|
state_vector
|
||||||
|
|
||||||
sendStateVector = ()->
|
getStateVector = ()->
|
||||||
encode_state_vector HB.getOperationCounter()
|
encode_state_vector HB.getOperationCounter()
|
||||||
|
|
||||||
sendHb = (v)->
|
getHB = (v)->
|
||||||
state_vector = parse_state_vector v
|
state_vector = parse_state_vector v
|
||||||
|
hb = HB._encode state_vector
|
||||||
|
for o in hb
|
||||||
|
o.fromHB = "true" # execute immediately
|
||||||
json =
|
json =
|
||||||
hb: HB._encode(state_vector)
|
hb: hb
|
||||||
state_vector: encode_state_vector HB.getOperationCounter()
|
state_vector: encode_state_vector HB.getOperationCounter()
|
||||||
json
|
json
|
||||||
|
|
||||||
applyHb = (res)->
|
applyHB = (hb)->
|
||||||
HB.renewStateVector parse_state_vector res.state_vector
|
engine.applyOp hb
|
||||||
engine.applyOpsCheckDouble res.hb
|
|
||||||
|
|
||||||
connector.whenSyncing sendStateVector, sendHb, applyHb
|
connector.getStateVector = getStateVector
|
||||||
|
connector.getHB = getHB
|
||||||
|
connector.applyHB = applyHB
|
||||||
|
|
||||||
connector.whenReceiving (sender, op)->
|
connector.whenReceiving (sender, op)->
|
||||||
if op.uid.creator isnt HB.getUserId()
|
if op.uid.creator isnt HB.getUserId()
|
||||||
|
@ -67,10 +67,12 @@ class Engine
|
|||||||
for op_json in op_json_array
|
for op_json in op_json_array
|
||||||
# $parse_and_execute will return false if $o_json was parsed and executed, otherwise the parsed operadion
|
# $parse_and_execute will return false if $o_json was parsed and executed, otherwise the parsed operadion
|
||||||
o = @parseOperation op_json
|
o = @parseOperation op_json
|
||||||
|
if op_json.fromHB?
|
||||||
|
o.fromHB = op_json.fromHB
|
||||||
# @HB.addOperation o
|
# @HB.addOperation o
|
||||||
if @HB.getOperation(o)?
|
if @HB.getOperation(o)?
|
||||||
# nop
|
# nop
|
||||||
else if (not @HB.isExpectedOperation(o)) or (not o.execute())
|
else if ((not @HB.isExpectedOperation(o)) and (not o.fromHB?)) or (not o.execute())
|
||||||
@unprocessed_ops.push o
|
@unprocessed_ops.push o
|
||||||
window?.unprocessed_types.push o.type # TODO: delete this
|
window?.unprocessed_types.push o.type # TODO: delete this
|
||||||
@tryUnprocessed()
|
@tryUnprocessed()
|
||||||
@ -86,7 +88,7 @@ class Engine
|
|||||||
for op in @unprocessed_ops
|
for op in @unprocessed_ops
|
||||||
if @HB.getOperation(op)?
|
if @HB.getOperation(op)?
|
||||||
# nop
|
# nop
|
||||||
else if (not @HB.isExpectedOperation(op)) or (not op.execute())
|
else if (not @HB.isExpectedOperation(op) and (not op.fromHB?)) or (not op.execute())
|
||||||
unprocessed.push op
|
unprocessed.push op
|
||||||
@unprocessed_ops = unprocessed
|
@unprocessed_ops = unprocessed
|
||||||
if @unprocessed_ops.length is old_length
|
if @unprocessed_ops.length is old_length
|
||||||
|
@ -18,7 +18,7 @@ class HistoryBuffer
|
|||||||
@garbage = [] # Will be cleaned on next call of garbageCollector
|
@garbage = [] # Will be cleaned on next call of garbageCollector
|
||||||
@trash = [] # Is deleted. Wait until it is not used anymore.
|
@trash = [] # Is deleted. Wait until it is not used anymore.
|
||||||
@performGarbageCollection = true
|
@performGarbageCollection = true
|
||||||
@garbageCollectTimeout = 20000
|
@garbageCollectTimeout = 30000
|
||||||
@reserved_identifier_counter = 0
|
@reserved_identifier_counter = 0
|
||||||
setTimeout @emptyGarbage, @garbageCollectTimeout
|
setTimeout @emptyGarbage, @garbageCollectTimeout
|
||||||
|
|
||||||
@ -173,7 +173,7 @@ class HistoryBuffer
|
|||||||
@buffer[o.uid.creator] = {}
|
@buffer[o.uid.creator] = {}
|
||||||
if @buffer[o.uid.creator][o.uid.op_number]?
|
if @buffer[o.uid.creator][o.uid.op_number]?
|
||||||
throw new Error "You must not overwrite operations!"
|
throw new Error "You must not overwrite operations!"
|
||||||
if (o.uid.op_number.constructor isnt String) and (not @isExpectedOperation(o)) # you already do this in the engine, so delete it here!
|
if (o.uid.op_number.constructor isnt String) and (not @isExpectedOperation(o)) and (not o.fromHB?) # you already do this in the engine, so delete it here!
|
||||||
throw new Error "this operation was not expected!"
|
throw new Error "this operation was not expected!"
|
||||||
@addToCounter(o)
|
@addToCounter(o)
|
||||||
@buffer[o.uid.creator][o.uid.op_number] = o
|
@buffer[o.uid.creator][o.uid.op_number] = o
|
||||||
|
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user