Skip to content

Commit

Permalink
B+ tree is working!
Browse files Browse the repository at this point in the history
  • Loading branch information
wies committed Nov 1, 2024
1 parent 51b43be commit f2ddc42
Show file tree
Hide file tree
Showing 3 changed files with 102 additions and 36 deletions.
11 changes: 6 additions & 5 deletions test/arrays/ordered_array.rav
Original file line number Diff line number Diff line change
Expand Up @@ -118,21 +118,22 @@ interface OrderedArray[E: Library.OrderedType] : Array {
assert set_of_map(m1, 0, new_len) == set_of_map(ms, 0, idx) ++ {|k|} ++ set_of_map(ms, idx + 1, len + 1);
}

lemma map_delete_content_set(m: Map[Int, E], m1: Map[Int, E], len: Int, new_len: Int, idx: Int, k: E)
lemma map_delete_content_set(m: Map[Int, E], len: Int, idx: Int, k: E)
requires sorted_map_seg(m, 0, len)
requires idx == map_find(m, 0, len, k)
requires 0 <= idx <= len
requires k !in set_of_map(m, 0, len) ==> new_len == len && m1 == m
requires k in set_of_map(m, 0, len) ==> new_len == len - 1 && m1 == map_shift(m, idx + 1, idx, len - (idx + 1))
ensures set_of_map(m1, 0, new_len) == set_of_map(m, 0, len) -- {|k|}
//requires k !in set_of_map(m, 0, len) ==> new_len == len && m1 == m
//requires k in set_of_map(m, 0, len) ==> new_len == len - 1 && m1 == map_shift(m, idx + 1, idx, len - (idx + 1))
ensures set_of_map((k in set_of_map(m, 0, len) ? map_shift(m, idx + 1, idx, len - (idx + 1)) : m), 0,
(k in set_of_map(m, 0, len) ? len - 1 : len)) == set_of_map(m, 0, len) -- {|k|}
{
//map_find_content_set(m, 0, len, idx, k);
//set_of_map_split(m, 0, idx, len);

if (idx == len || m[idx] != k) {
return;
}

val m1: Map[Int, E] = map_shift(m, idx + 1, idx, len - (idx + 1));
// prove: set_of_map(m, 0, len) -- {k} == set_of_map(m1, 0, idx - 1) ++ set_of_map(m1, idx - 1, len - 1)
set_of_map_equal(m, m1, 0, 0, idx);
set_of_map_equal(m, m1, idx + 1, idx, len - (idx + 1));
Expand Down
97 changes: 81 additions & 16 deletions test/concurrent/templates/bplustree.rav
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ interface BPlusTree[O: Library.OrderedType] : NodeImpl {
&& (l > 0 ==> le(lb, kmap[0]) && lt(kmap[l-1], ub))
// Consistency of cmap
&& (forall i: Int :: {cmap[i]} 0 <= i <= l ==> n != cmap[i])
&& AN.injective(cmap, 0, l+1)
&& (cmap[0] != null ==> AN.injective(cmap, 0, l+1))
&& (cmap[0] == null ==> (forall i: Int :: {cmap[i]} 0 <= i < 2*b ==> cmap[i] == null))
// Internal nodes don't point to null
&& (cmap[0] != null ==> (forall i: Int :: {cmap[i]} 0 <= i <= l ==> cmap[i] != null))
}
Expand All @@ -99,10 +100,10 @@ interface BPlusTree[O: Library.OrderedType] : NodeImpl {
}

// Show that if query key k is in the keyset of node x than x must be a leaf.
/*lemma keyset_implies_leaf(n: Ref, k: K, i: Int,
implicit ghost cmap: , implicit ghost c: Set[K])
exists l: Int, lb: KOption, ub: KOption, ks: AK.T, chlds: AN.T, kmap: Map[Int, K], cmap: Map[Int, Ref] ::
node_cond(n, c, flow_int, l, lb, ub, ks, chlds, kmap, cmap)
lemma keyset_implies_leaf(n: Ref, k: K, i: Int, flow_int: Flow_K, c: Set[K],
implicit ghost l: Int, implicit ghost lb: KOption, implicit ghost ub: KOption,
implicit ghost ks: AK.T, implicit ghost chlds: AN.T, implicit ghost kmap: Map[Int, K], implicit ghost cmap: Map[Int, Ref])
requires node_cond(n, c, flow_int, l, lb, ub, ks, chlds, kmap, cmap)
// Heap resources
&& own(n, len, l, 1.0)
&& own(n, rangeLb, lb, 1.0)
Expand All @@ -111,23 +112,34 @@ interface BPlusTree[O: Library.OrderedType] : NodeImpl {
&& AK.sorted_array_with_content(ks, l, kmap)
&& own(n, ptrs, chlds, 1.0)
&& AN.arr(chlds, cmap)
requires int.inf[n][k] == 1 && (forall y: Node :: {int.out[y][k]} int.out[y][k] == 0)
requires (i <= 0 || O.le(x.keys[i-1], k)) && 0 <= i <= x.len
ensures node(x, Ix, C)
ensures x.ptrs[0] == null // x is a leaf
requires k in keyset(flow_int)
requires (i <= 0 || O.le(kmap[i-1], k)) && 0 <= i <= l
ensures node_cond(n, c, flow_int, l, lb, ub, ks, chlds, kmap, cmap)
// Heap resources
&& own(n, len, l, 1.0)
&& own(n, rangeLb, lb, 1.0)
&& own(n, rangeUb, ub, 1.0)
&& own(n, keys, ks, 1.0)
&& AK.sorted_array_with_content(ks, l, kmap)
&& own(n, ptrs, chlds, 1.0)
&& AN.arr(chlds, cmap)
ensures cmap[0] == null // x is a leaf
{
if (x.ptrs[0] != null) {
if (i < x.len) {
if (lt(k, x.keys[i])) {
pure assert Ix.out[x.ptrs[i]][k] == 1;
assert (flow_int.inf[n])[k] == 1;
assert (forall y: Ref :: {outset(flow_int, y)} k !in outset(flow_int, y));
assert (forall y: Ref :: {(flow_int.out[y])[k]} k !in outset(flow_int, y) ==> (flow_int.out[y])[k] == 0);
if (cmap[0] != null) {
if (i < l) {
if (O.lt(k, kmap[i])) {
assert (flow_int.out[cmap[i]])[k] == 1;
} else {
keyset_implies_leaf(x, Ix, k, i + 1);
keyset_implies_leaf(n, k, i + 1, flow_int, c);
}
} else {
pure assert Ix.out[x.ptrs[i]][k] == 1;
assert (flow_int.out[cmap[i]])[k] == 1;
}
}
}*/
}


/*Initialize a new root node */
Expand Down Expand Up @@ -210,5 +222,58 @@ interface BPlusTree[O: Library.OrderedType] : NodeImpl {
fold node(n, c, i_n);
}

proc decisiveOp(dop: Op, n: Ref, k: K, implicit ghost c: Set[K], implicit ghost i_n: Flow_K)
returns (succ: Bool, res: Bool, implicit ghost c1: Set[K])
requires k in keyset(i_n)
requires node(n, c, i_n)
ensures node(n, c1, i_n)
ensures succ ==> opSpec(dop, k, c) == ((c1, res))
ensures !succ ==> c == c1
{
unfold node(n, c, i_n);
var n_len : Int := n.len;
var n_ptrs : AN.T := n.ptrs;
ghost var cmap: Map[Int, Ref];
cmap :| AN.arr(n_ptrs, cmap);

c1 := (opSpec(dop, k, c))#0;

var idx: Int;
var new_len: Int;
var n_keys : AK.T := n.keys;
ghost var m1: Map[Int, K];
ghost var kmap: Map[Int, K];
kmap :| AK.sorted_array_with_content(n_keys, n_len, kmap);

keyset_implies_leaf(n, k, 0, i_n, c);

if (dop == searchOp) {
res, idx := AK.arr_find(n_keys, n_len, k);
fold node(n, c, i_n);
return true, res, c;
} else if (dop == insertOp) {
if (n_len < 2*b - 1) {
idx, new_len, m1 := AK.arr_insert(n_keys, k, n_len);
AK.map_insert_content_set(kmap, m1, idx, k, n_len, new_len);

n.len := new_len;
fold node(n, c1, i_n);
return true, new_len != n_len, c1;
} else {
fold node(n, c, i_n);
return false, false, c;
}
} else {
new_len, idx, m1 := AK.arr_delete(n_keys, k, n_len);

AK.map_delete_content_set(kmap, n_len, idx, k);

n.len := new_len;

ghost var c1: Set[K] := (opSpec(dop, k, c))#0;
fold node(n, c1, i_n);
return true, new_len != n_len, c1;
}
}

}
30 changes: 15 additions & 15 deletions test/concurrent/templates/give-up.rav
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ interface SearchStructureSpec {

type Op = data { case searchOp ; case insertOp ; case deleteOp }

pred opSpec(op: Op, k: K, c_in: Set[K]; c_out: Set[K], res: Bool) {
func opSpec(op: Op, k: K, c_in: Set[K]) returns (c_out_res: (Set[K], Bool)) {
op == searchOp() ?
c_in == c_out && res == (k in c_in) :
((c_in, (k in c_in))) :
(op == insertOp() ?
c_out == c_in ++ {| k |} && res == (k !in c_in) :
c_out == c_in -- {| k |} && res == (k in c_in))
((c_in ++ {| k |}, (k !in c_in))) :
((c_in -- {| k |}, (k in c_in))))
}
}

Expand Down Expand Up @@ -94,7 +94,7 @@ interface NodeImpl {
requires k in keyset(i_n)
requires node(n, c, i_n)
ensures node(n, c1, i_n)
ensures succ ==> opSpec(dop, k, c, c1, res)
ensures succ ==> opSpec(dop, k, c) == ((c1, res))
ensures !succ ==> c == c1

proc findNext(n: Ref, k: K, implicit ghost c: Set[K], implicit ghost i_n: Flow_K)
Expand Down Expand Up @@ -499,32 +499,32 @@ interface GiveUpTemplate[Node: NodeImpl] {

lemma keyset_theorem(r: Ref, dop: Op, k: K, c_n: Set[K], c_n1: Set[K], c: Set[K], res: Bool, k_n: Set[K])
returns (c1: Set[K])
requires opSpec(dop, k, c_n, c_n1, res)
requires opSpec(dop, k, c_n) == ((c_n1, res))
requires own(r, authKS, AuthKeyset_K.auth(Keyset_K.prodKS(keyspace, c)))
requires own(r, authKS, AuthKeyset_K.frag(Keyset_K.prodKS(k_n, c_n)))
// requires own(r, authFlow, AuthFlow_K.auth(g_i))requires own(r, authFlow, AuthFlow_K.frag(g_i))
requires c_n1 subseteq k_n
requires k in k_n
requires k in keyspace

ensures opSpec(dop, k, c, c1, res)
ensures opSpec(dop, k, c) == ((c1, res))
ensures own(r, authKS, AuthKeyset_K.auth(Keyset_K.prodKS(keyspace, c1)))
ensures own(r, authKS, AuthKeyset_K.frag(Keyset_K.prodKS(k_n, c_n1)))
{
unfold opSpec(dop, k, c_n, c_n1, res);
//unfold opSpec(dop, k, c_n, c_n1, res);

if (dop == searchOp() || !res) {
if (dop == searchOp || !res) {
c1 := c;
fold opSpec(dop, k, c, c1, res);
//fold opSpec(dop, k, c, c1, res);
} else {
if (dop == insertOp()) {
if (dop == insertOp) {
c1 := c ++ {| k |};
} else {
c1 := c -- {| k |};
}

{!
fold opSpec(dop, k, c, c1, res);
//fold opSpec(dop, k, c, c1, res);

/*assert AuthKeyset_K.fpuAllowed(
AuthKeyset_K.auth_frag(Keyset_K.prodKS(keyspace, c), Keyset_K.prodKS(k_n, c_n)),
Expand All @@ -549,7 +549,7 @@ interface GiveUpTemplate[Node: NodeImpl] {
returns (res: Bool, implicit ghost cc: Set[K])
requires k in keyspace
atomic requires css(r, c)
atomic ensures css(r, cc) && opSpec(dop, k, c, cc, res)
atomic ensures css(r, cc) && opSpec(dop, k, c) == ((cc, res))
{
ghost val phi: AtomicToken := bindAU();

Expand Down Expand Up @@ -604,8 +604,8 @@ interface GiveUpTemplate[Node: NodeImpl] {
unlockNode(n);

unfold globalRes(r, c1, g_i1);
unfold opSpec(dop, k, c_n, c_n1, res);
fold opSpec(dop, k, c_n, c_n1, res);
//unfold opSpec(dop, k, c_n, c_n1, res);
//fold opSpec(dop, k, c_n, c_n1, res);

cc := keyset_theorem(r, dop, k, c_n, c_n1, c1, res, keyset(i_n));

Expand Down

0 comments on commit f2ddc42

Please sign in to comment.