Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 46 additions & 46 deletions verified-node-replication/src/exec/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -444,38 +444,38 @@ pub open spec fn inv(&self, v: u64, tid: nat, cell: PCell<PendingOperation<DT>>,
&&& (v == 1 ==> self.slots.value().is_Request() || self.slots.value().is_InProgress())

&&& (self.slots.value().is_Empty() ==> {
&&& self.update.is_None()
&&& self.batch_perms.is_None()
&&& self.update.is_none()
&&& self.batch_perms.is_none()
})

&&& (self.slots.value().is_Request() ==> {
&&& self.update.is_Some()
&&& self.update.get_Some_0().value().is_Init()
&&& self.update.get_Some_0().key() == self.slots.value().get_ReqId()
&&& self.update.get_Some_0().instance_id() == inst.id()

&&& self.batch_perms.is_Some()
&&& self.batch_perms.get_Some_0().mem_contents().is_init()
&&& self.batch_perms.get_Some_0()@.pcell == cell.id()
&&& self.batch_perms.get_Some_0().mem_contents().value().op == self.update.get_Some_0().value().get_Init_op()
&&& self.update matches Some(update)
&&& update.value().is_Init()
&&& update.key() == self.slots.value().get_ReqId()
&&& update.instance_id() == inst.id()

&&& self.batch_perms matches Some(batch_perms)
&&& batch_perms.mem_contents().is_init()
&&& batch_perms@.pcell == cell.id()
&&& batch_perms.mem_contents().value().op == update.value().get_Init_op()
})

&&& (self.slots.value().is_InProgress() ==> {
&&& self.update.is_None()
&&& self.batch_perms.is_None()
&&& self.update.is_none()
&&& self.batch_perms.is_none()
})

&&& (self.slots.value().is_Response() ==> {
&&& self.update.is_Some()
&&& self.update.get_Some_0().value().is_Done()
&&& self.update.get_Some_0().key() == self.slots.value().get_ReqId()
&&& self.update.get_Some_0().instance_id() == inst.id()

&&& self.batch_perms.is_Some()
&&& self.batch_perms.get_Some_0().mem_contents().is_init()
&&& self.batch_perms.get_Some_0()@.pcell == cell.id()
&&& self.batch_perms.get_Some_0().mem_contents().value().resp.is_Some()
&&& self.batch_perms.get_Some_0().mem_contents().value().resp.get_Some_0() == self.update.get_Some_0().value().get_Done_ret()
&&& self.update matches Some(update)
&&& update.value().is_Done()
&&& update.key() == self.slots.value().get_ReqId()
&&& update.instance_id() == inst.id()

&&& self.batch_perms matches Some(batch_perms)
&&& batch_perms.mem_contents().is_init()
&&& batch_perms@.pcell == cell.id()
&&& batch_perms.mem_contents().value().resp matches Some(resp)
&&& resp == update.value().get_Done_ret()
})
}
}
Expand All @@ -499,30 +499,30 @@ impl<DT: Dispatch> FCClientRequestResponseGhost<DT> {
fc_inst: FlatCombiner::Instance,
inst: UnboundedLog::Instance<DT>,
) -> bool {
&&& self.local_updates.is_Some()
&&& self.local_updates.get_Some_0().instance_id() == inst.id()
&&& self.local_updates.get_Some_0().value().is_Init()
&&& self.local_updates.get_Some_0().value().get_Init_op() == op
&&& self.batch_perms.is_Some()
&&& self.batch_perms.get_Some_0()@.pcell == self.cell_id
&&& self.local_updates matches Some(local_updates)
&&& local_updates.instance_id() == inst.id()
&&& local_updates.value().is_Init()
&&& local_updates.value().get_Init_op() == op
&&& self.batch_perms matches Some(batch_perms)
&&& batch_perms@.pcell == self.cell_id
&&& self.cell_id == batch_cell
&&& self.batch_perms.get_Some_0().mem_contents().is_uninit()
&&& batch_perms.mem_contents().is_uninit()
&&& self.fc_clients.instance_id() == fc_inst.id()
&&& self.fc_clients.key() == tid
&&& self.fc_clients.value().is_Idle()
}

pub open spec fn enqueue_op_post(&self, pre: FCClientRequestResponseGhost<DT>) -> bool
recommends
pre.local_updates.is_Some(),
pre.local_updates.is_some(),
{
&&& self.fc_clients.value().is_Waiting()
&&& self.fc_clients.value().get_Waiting_0() == pre.local_updates.get_Some_0().key()
&&& self.fc_clients.value().get_Waiting_0() == pre.local_updates.unwrap().key()
&&& self.fc_clients.instance_id() == pre.fc_clients.instance_id()
&&& self.fc_clients.key() == pre.fc_clients.key()
&&& self.cell_id == pre.cell_id
&&& self.batch_perms.is_None()
&&& self.local_updates.is_None()
&&& self.batch_perms.is_none()
&&& self.local_updates.is_none()
}

pub open spec fn dequeue_resp_pre(
Expand All @@ -534,8 +534,8 @@ impl<DT: Dispatch> FCClientRequestResponseGhost<DT> {
&&& self.fc_clients.key() == tid
&&& self.fc_clients.instance_id() == fc_inst.id()
&&& self.fc_clients.value().is_Waiting()
&&& self.batch_perms.is_None()
&&& self.local_updates.is_None()
&&& self.batch_perms.is_none()
&&& self.local_updates.is_none()
&&& self.cell_id == batch_cell
}

Expand All @@ -545,21 +545,21 @@ impl<DT: Dispatch> FCClientRequestResponseGhost<DT> {
ret: Option<DT::Response>,
inst: UnboundedLog::Instance<DT>,
) -> bool {
&&& ret.is_Some() ==> {
&&& ret matches Some(ret) ==> {
&&& self.cell_id == pre.cell_id
&&& self.batch_perms.is_Some()
&&& self.batch_perms.get_Some_0().mem_contents().is_uninit()
&&& self.batch_perms.get_Some_0()@.pcell == self.cell_id
&&& self.local_updates.is_Some()
&&& self.local_updates.get_Some_0().instance_id() == inst.id()
&&& self.local_updates.get_Some_0().value().is_Done()
&&& self.local_updates.get_Some_0().key() == pre.fc_clients.value().get_Waiting_0()
&&& self.local_updates.get_Some_0().value().get_Done_ret() == ret.get_Some_0()
&&& self.batch_perms matches Some(batch_perms)
&&& batch_perms.mem_contents().is_uninit()
&&& batch_perms@.pcell == self.cell_id
&&& self.local_updates matches Some(local_updates)
&&& local_updates.instance_id() == inst.id()
&&& local_updates.value().is_Done()
&&& local_updates.key() == pre.fc_clients.value().get_Waiting_0()
&&& local_updates.value().get_Done_ret() == ret
&&& self.fc_clients.instance_id() == pre.fc_clients.instance_id()
&&& self.fc_clients.key() == pre.fc_clients.key()
&&& self.fc_clients.value().is_Idle()
}
&&& ret.is_None() ==> {
&&& ret.is_none() ==> {
&&& self == pre
}
}
Expand Down
14 changes: 7 additions & 7 deletions verified-node-replication/src/exec/log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,10 +273,10 @@ impl<DT: Dispatch> NrLog<DT> {
-log_size <= logical_log_idx <= 0,
slog_entries.len() == log_idx,
cell_ids.len() == log_idx,
forall|i| 0 <= i < log_idx ==> (#[trigger] slog_entries[i]).is_Some(),
forall|i| 0 <= i < log_idx ==> (#[trigger] slog_entries[i]).is_some(),
forall|i|
0 <= i < log_idx ==> #[trigger] cell_ids[i] == (
#[trigger] slog_entries[i]).get_Some_0().id(),
#[trigger] slog_entries[i]).unwrap().id(),
forall|i| -log_size <= i < logical_log_idx <==> #[trigger] contents.contains_key(i),
forall|i| #[trigger]
contents.contains_key(i) ==> stored_type_inv(
Expand Down Expand Up @@ -367,12 +367,12 @@ impl<DT: Dispatch> NrLog<DT> {
&&& cb_alive_bits[i].value() == false
&&& cb_alive_bits[i].instance_id() == cyclic_buffer_instance.id()
},
forall|i| 0 <= i < log_idx ==> slog_entries.spec_index(i).is_None(),
forall|i| 0 <= i < log_idx ==> slog_entries.spec_index(i).is_none(),
forall|i|
#![trigger slog_entries[i]]
log_idx <= i < log_size ==> {
&&& slog_entries[i].is_Some()
&&& slog_entries[i].get_Some_0().id() == cell_ids[i]
&&& slog_entries[i] matches Some(entry)
&&& entry.id() == cell_ids[i]
},
decreases log_size - log_idx
{
Expand All @@ -382,7 +382,7 @@ impl<DT: Dispatch> NrLog<DT> {
}
let mut log_entry = Option::None;
slog_entries.set_and_swap(log_idx, &mut log_entry);
assert(log_entry.is_Some());
assert(log_entry.is_some());
let log_entry = log_entry.unwrap();
let cb_inst = Tracked(cyclic_buffer_instance.clone());
let entry = BufferEntry {
Expand Down Expand Up @@ -1527,7 +1527,7 @@ impl<DT: Dispatch> NrLog<DT> {
} else {
// case: remote dispatch
proof {
assert(stored_entry.log_entry.get_Some_0().value().node_id != nid);
assert(stored_entry.log_entry.unwrap().value().node_id != nid);
if let Option::Some(e) = &stored_entry.log_entry {
assert(e.value().node_id != nid);
// assert(ghost_replica@.key == nid as nat);
Expand Down
10 changes: 5 additions & 5 deletions verified-node-replication/src/exec/replica.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ pub open spec fn wf(&self) -> bool {
// v != 0 means lock is not taken, if it's not taken, the ghost state is Some
&&& (v == 0) <==> g.is_some()
//
&&& (g.is_some() ==> g.get_Some_0().inv(flat_combiner_instance@, responses.id(), collected_operations.id(), collected_operations_per_thread.id()))
&&& (g matches Some(g) ==> g.inv(flat_combiner_instance@, responses.id(), collected_operations.id(), collected_operations_per_thread.id()))
}

// invariant on num_threads with (flat_combiner_instance) specifically (self.num_threads.0) is (v: u64, g: Tracked<u64>) {
Expand Down Expand Up @@ -462,7 +462,7 @@ impl<DT: Dispatch> Replica<DT> {
self.wf(),
ensures
result.0 ==> result.1@.is_some(),
result.0 ==> result.1@.get_Some_0().inv(
result.0 ==> result.1@.unwrap().inv(
self.flat_combiner_instance@,
self.responses.id(),
self.collected_operations.id(),
Expand Down Expand Up @@ -783,7 +783,7 @@ impl<DT: Dispatch> Replica<DT> {
self.flat_combiner_instance.borrow().pre_combiner_collect_request(&g.slots, flat_combiner.borrow());

rids_match_add_rid(flat_combiner.view().value().get_Collecting_0(), request_ids,
0, flat_combiner.view().value().get_Collecting_0().len(), 0, request_ids.len(),g.update.get_Some_0().key());
0, flat_combiner.view().value().get_Collecting_0().len(), 0, request_ids.len(),g.update.unwrap().key());

update_req = g.update;
batch_perms = g.batch_perms;
Expand Down Expand Up @@ -998,7 +998,7 @@ impl<DT: Dispatch> Replica<DT> {
old(self).replica_token@ == self.replica_token@,
old(self).unbounded_log_instance@ == self.unbounded_log_instance@,
old(self).cyclic_buffer_instance@ == self.cyclic_buffer_instance@,
res.is_Some() ==> res.get_Some_0().wf(self),
res matches Some(res) ==> res.wf(self),
{
self.thread_tokens.pop()
}
Expand Down Expand Up @@ -1234,7 +1234,7 @@ impl<DT: Dispatch> Replica<DT> {
context.flat_combiner_instance@ == self.flat_combiner_instance@,
context.unbounded_log_instance@ == self.unbounded_log_instance@,
0 <= iter <= RESPONSE_CHECK_INTERVAL,
r.is_None() ==> context_ghost_new@.dequeue_resp_pre(
r.is_none() ==> context_ghost_new@.dequeue_resp_pre(
context.batch.0.id(),
tid as nat,
self.flat_combiner_instance@,
Expand Down
6 changes: 3 additions & 3 deletions verified-node-replication/src/exec/rwlock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,16 +288,16 @@ impl<T> RwLock<T> {
while !acquired
invariant
self.wf(),
acquired ==> token.is_Some() && token.get_Some_0().instance_id() == self.inst@.id()
&& token.get_Some_0().value() == 0,
acquired ==> (token matches Some(token) && token.instance_id() == self.inst@.id()
&& token.value() == 0),
{
let result =
atomic_with_ghost!(
&self.exc_locked.0 => compare_exchange(false, true);
returning res;
ghost g =>
{
if res.is_Ok() {
if res.is_ok() {
token = Option::Some(self.inst.borrow().exc_start(&mut g));
}
});
Expand Down
16 changes: 8 additions & 8 deletions verified-node-replication/src/exec/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ pub open spec fn rids_match(
if bools_end == bools_start {
rids_end == rids_start
} else {
if bools[bools_end - 1].is_Some() {
if let Some(end) = bools[bools_end - 1] {
&&& rids_end > rids_start
&&& rids[rids_end - 1] == bools[bools_end - 1].get_Some_0()
&&& rids[rids_end - 1] == end
&&& rids_match(
bools,
rids,
Expand Down Expand Up @@ -62,7 +62,7 @@ pub proof fn rids_match_add_none(
if bools_end == bools_start {
assert(rids_match(bools_new, rids, bools_start, bools_end, rids_start, rids_end));
} else {
if bools[bools_end - 1].is_Some() {
if bools[bools_end - 1].is_some() {
rids_match_add_none(
bools,
rids,
Expand Down Expand Up @@ -113,7 +113,7 @@ pub proof fn rids_match_add_rid(
if bools_end == bools_start {
assert(rids_match(bools_new, rids_new, bools_start, bools_end, rids_start, rids_end));
} else {
if bools[bools_end - 1].is_Some() {
if bools[bools_end - 1].is_some() {
rids_match_add_rid(
bools,
rids,
Expand Down Expand Up @@ -154,9 +154,9 @@ pub proof fn rids_match_pop(
rids_match(bools, rids, bools_start, bools_end, rids_start, rids_end)
},
bools_end > bools_start ==> {
&&& bools[bools_start as int].is_Some() ==> {
&&& bools[bools_start as int] matches Some(start) ==> {
&&& rids_start < rids_end
&&& rids[rids_start as int] == bools[bools_start as int].get_Some_0()
&&& rids[rids_start as int] == start
&&& rids_match(
bools,
rids,
Expand All @@ -166,7 +166,7 @@ pub proof fn rids_match_pop(
rids_end,
)
}
&&& bools[bools_start as int].is_None() ==> {
&&& bools[bools_start as int].is_none() ==> {
&&& rids_match(
bools,
rids,
Expand All @@ -181,7 +181,7 @@ pub proof fn rids_match_pop(
{
if bools_end == bools_start {
} else {
if bools[bools_end - 1].is_Some() {
if bools[bools_end - 1].is_some() {
rids_match_pop(
bools,
rids,
Expand Down
22 changes: 11 additions & 11 deletions verified-node-replication/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ pub trait NodeReplicatedT<DT: Dispatch + Sync>: Sized {
old(self).wf(),
ensures
self.wf(),
result.is_Some() ==> result.get_Some_0().wf(&self.replicas()[replica_id as int]),
result matches Some(r) ==> r.wf(&self.replicas()[replica_id as int]),
;

/// executes an update operation against the data structure.
Expand All @@ -281,13 +281,13 @@ pub trait NodeReplicatedT<DT: Dispatch + Sync>: Sized {
tkn.wf(&self.replicas().spec_index(tkn.replica_id_spec() as int)),
is_update_ticket(ticket@, op, self.unbounded_log_instance()),
ensures
result.is_Ok() ==> is_update_stub(
result.get_Ok_0().2@,
result matches Ok(ok) ==> is_update_stub(
ok.2@,
ticket@.key(),
result.get_Ok_0().0,
ok.0,
self.unbounded_log_instance(),
) && result.get_Ok_0().1.wf(&self.replicas().spec_index(tkn.replica_id_spec() as int)),
result.is_Err() ==> result.get_Err_0().1 == ticket && result.get_Err_0().0 == tkn,
) && ok.1.wf(&self.replicas().spec_index(tkn.replica_id_spec() as int)),
result matches Err(err) ==> err == (tkn, ticket),
;

/// executes a read-only operation against the data structure.
Expand All @@ -305,13 +305,13 @@ pub trait NodeReplicatedT<DT: Dispatch + Sync>: Sized {
tkn.wf(&self.replicas()[tkn.replica_id_spec() as int]),
is_readonly_ticket(ticket@, op, self.unbounded_log_instance()),
ensures
result.is_Ok() ==> is_readonly_stub(
result.get_Ok_0().2@,
result matches Ok(ok) ==> is_readonly_stub(
ok.2@,
ticket@.key(),
result.get_Ok_0().0,
ok.0,
self.unbounded_log_instance(),
) && result.get_Ok_0().1.wf(&self.replicas()[tkn.replica_id_spec() as int]),
result.is_Err() ==> result.get_Err_0().1 == ticket && result.get_Err_0().0 == tkn,
) && ok.1.wf(&self.replicas()[tkn.replica_id_spec() as int]),
result matches Err(err) ==> err == (tkn, ticket),
;
}

Expand Down
14 changes: 7 additions & 7 deletions verified-node-replication/src/spec/cyclicbuffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,13 @@ pub open spec fn stored_type_inv<DT: Dispatch>(
&&& st.cell_perms.mem_contents().is_init()
&&& st.cell_perms@.pcell == cell_id
&&& idx >= 0 ==> {
&&& st.log_entry.is_Some()
&&& st.log_entry.get_Some_0().key() == idx
&&& st.log_entry.get_Some_0().instance_id() == unbounded_log_instance.id()
&&& st.cell_perms.mem_contents().value().is_Some()
&&& st.cell_perms.mem_contents().value().get_Some_0().node_id as NodeId
== st.log_entry.get_Some_0().value().node_id
&&& st.cell_perms.mem_contents().value().get_Some_0().op == st.log_entry.get_Some_0().value().op
&&& st.log_entry matches Some(log_entry)
&&& log_entry.key() == idx
&&& log_entry.instance_id() == unbounded_log_instance.id()
&&& st.cell_perms.mem_contents().value() matches Some(cell_perms_value)
&&& cell_perms_value.node_id as NodeId
== log_entry.value().node_id
&&& cell_perms_value.op == log_entry.value().op
}
&&& idx < 0 ==> {
&&& true
Expand Down
Loading
Loading