diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db59c4ee..fcab7c8d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,8 +30,8 @@ jobs: strategy: matrix: - os: [macos-latest, ubuntu-latest] - go-version: [1.17, 1.18, 1.19] + os: [ubuntu-latest] + go-version: ['1.20', 1.21] steps: - name: Install Go @@ -54,4 +54,4 @@ jobs: - name: Run tests run: | go version - go test -timeout 60m -race -v ./... + go test -timeout 180m -race -v ./... diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 1fe10561..88b4907a 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -32,7 +32,7 @@ jobs: run: go build ./... - name: Generate coverage report - run: go test -timeout 60m -race -coverprofile=coverage.txt -covermode=atomic + run: go test -timeout 180m -race -coverprofile=coverage.txt -covermode=atomic - name: Upload coverage report to Codecov uses: codecov/codecov-action@v3.1.4 diff --git a/array.go b/array.go index cf3217c8..d8f39487 100644 --- a/array.go +++ b/array.go @@ -3346,103 +3346,184 @@ func (a *Array) Storable(_ SlabStorage, _ Address, maxInlineSize uint64) (Storab } } -var emptyArrayIterator = &ArrayIterator{} +type ArrayIterator interface { + CanMutate() bool + Next() (Value, error) +} + +type emptyArrayIterator struct { + readOnly bool +} + +var _ ArrayIterator = &emptyArrayIterator{} + +var emptyMutableArrayIterator = &emptyArrayIterator{readOnly: false} +var emptyReadOnlyArrayIterator = &emptyArrayIterator{readOnly: true} + +func (i *emptyArrayIterator) CanMutate() bool { + return !i.readOnly +} + +func (*emptyArrayIterator) Next() (Value, error) { + return nil, nil +} -type ArrayIterator struct { +type mutableArrayIterator struct { + array *Array + nextIndex uint64 + lastIndex uint64 // noninclusive index +} + +var _ ArrayIterator = &mutableArrayIterator{} + +func (i *mutableArrayIterator) CanMutate() bool { + return true +} + +func (i *mutableArrayIterator) Next() (Value, error) { + if i.nextIndex == i.lastIndex { + // No more elements. + return nil, nil + } + + // Don't need to set up notification callback for v because + // Get() returns value with notification already. + v, err := i.array.Get(i.nextIndex) + if err != nil { + return nil, err + } + + i.nextIndex++ + + return v, nil +} + +type readOnlyArrayIterator struct { array *Array - id SlabID dataSlab *ArrayDataSlab - indexInArray int - indexInDataSlab int - remainingCount int - readOnly bool + indexInDataSlab uint64 + remainingCount uint64 // needed for range iteration } -func (i *ArrayIterator) CanMutate() bool { - return !i.readOnly +var _ ArrayIterator = &readOnlyArrayIterator{} + +func (i *readOnlyArrayIterator) CanMutate() bool { + return false } -func (i *ArrayIterator) Next() (Value, error) { +func (i *readOnlyArrayIterator) Next() (Value, error) { if i.remainingCount == 0 { return nil, nil } - if i.dataSlab == nil { - if i.id == SlabIDUndefined { + if i.indexInDataSlab >= uint64(len(i.dataSlab.elements)) { + // No more elements in current data slab. + + nextDataSlabID := i.dataSlab.next + + if nextDataSlabID == SlabIDUndefined { + // No more elements in array. return nil, nil } - slab, found, err := i.array.Storage.Retrieve(i.id) + // Load next data slab. + slab, found, err := i.array.Storage.Retrieve(nextDataSlabID) if err != nil { // Wrap err as external error (if needed) because err is returned by SlabStorage interface. - return nil, wrapErrorfAsExternalErrorIfNeeded(err, fmt.Sprintf("failed to retrieve slab %s", i.id)) + return nil, wrapErrorfAsExternalErrorIfNeeded(err, fmt.Sprintf("failed to retrieve slab %s", nextDataSlabID)) } if !found { - return nil, NewSlabNotFoundErrorf(i.id, "slab not found during array iteration") + return nil, NewSlabNotFoundErrorf(nextDataSlabID, "slab not found during array iteration") } i.dataSlab = slab.(*ArrayDataSlab) i.indexInDataSlab = 0 - } - var element Value - var err error - if i.indexInDataSlab < len(i.dataSlab.elements) { - element, err = i.dataSlab.elements[i.indexInDataSlab].StoredValue(i.array.Storage) - if err != nil { - // Wrap err as external error (if needed) because err is returned by Storable interface. - return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") + // Check current data slab isn't empty because i.remainingCount > 0. + if len(i.dataSlab.elements) == 0 { + return nil, NewSlabDataErrorf("data slab contains 0 elements, expect more") } - - if i.CanMutate() { - // Set up notification callback in child value so - // when child value is modified parent a is notified. - i.array.setCallbackWithChild(uint64(i.indexInArray), element, maxInlineArrayElementSize) - } - - i.indexInDataSlab++ - i.indexInArray++ } - if i.indexInDataSlab >= len(i.dataSlab.elements) { - i.id = i.dataSlab.next - i.dataSlab = nil + // At this point: + // - There are elements to iterate in array (i.remainingCount > 0), and + // - There are elements to iterate in i.dataSlab (i.indexInDataSlab < len(i.dataSlab.elements)) + + element, err := i.dataSlab.elements[i.indexInDataSlab].StoredValue(i.array.Storage) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Storable interface. + return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") } + i.indexInDataSlab++ i.remainingCount-- return element, nil } -func (a *Array) Iterator() (*ArrayIterator, error) { +// Iterator returns mutable iterator for array elements. +// Mutable iterator handles: +// - indirect element mutation, such as modifying nested container +// - direct element mutation, such as overwriting existing element with new element +// Mutable iterator doesn't handle: +// - inserting new elements into the array +// - removing existing elements from the array +// NOTE: Use readonly iterator if mutation is not needed for better performance. +func (a *Array) Iterator() (ArrayIterator, error) { + if a.Count() == 0 { + return emptyMutableArrayIterator, nil + } + + return &mutableArrayIterator{ + array: a, + lastIndex: a.Count(), + }, nil +} + +// ReadOnlyIterator returns readonly iterator for array elements. +// If elements are mutated, those changes are not guaranteed to persist. +// NOTE: Use readonly iterator if mutation is not needed for better performance. +func (a *Array) ReadOnlyIterator() (ArrayIterator, error) { + if a.Count() == 0 { + return emptyReadOnlyArrayIterator, nil + } + slab, err := firstArrayDataSlab(a.Storage, a.root) if err != nil { // Don't need to wrap error as external error because err is already categorized by firstArrayDataSlab(). return nil, err } - return &ArrayIterator{ + return &readOnlyArrayIterator{ array: a, - id: slab.SlabID(), dataSlab: slab, - remainingCount: int(a.Count()), + remainingCount: a.Count(), }, nil } -// ReadOnlyIterator returns readonly iterator for array elements. -// If elements of child containers are mutated, those changes -// are not guaranteed to persist. -func (a *Array) ReadOnlyIterator() (*ArrayIterator, error) { - iterator, err := a.Iterator() - if err != nil { - // Don't need to wrap error as external error because err is already categorized by Iterator(). - return nil, err +func (a *Array) RangeIterator(startIndex uint64, endIndex uint64) (ArrayIterator, error) { + count := a.Count() + + if startIndex > count || endIndex > count { + return nil, NewSliceOutOfBoundsError(startIndex, endIndex, 0, count) } - iterator.readOnly = true - return iterator, nil + + if startIndex > endIndex { + return nil, NewInvalidSliceIndexError(startIndex, endIndex) + } + + if endIndex == startIndex { + return emptyMutableArrayIterator, nil + } + + return &mutableArrayIterator{ + array: a, + nextIndex: startIndex, + lastIndex: endIndex, + }, nil } -func (a *Array) RangeIterator(startIndex uint64, endIndex uint64) (*ArrayIterator, error) { +func (a *Array) ReadOnlyRangeIterator(startIndex uint64, endIndex uint64) (ArrayIterator, error) { count := a.Count() if startIndex > count || endIndex > count { @@ -3456,7 +3537,7 @@ func (a *Array) RangeIterator(startIndex uint64, endIndex uint64) (*ArrayIterato numberOfElements := endIndex - startIndex if numberOfElements == 0 { - return emptyArrayIterator, nil + return emptyReadOnlyArrayIterator, nil } var dataSlab *ArrayDataSlab @@ -3483,28 +3564,17 @@ func (a *Array) RangeIterator(startIndex uint64, endIndex uint64) (*ArrayIterato } } - return &ArrayIterator{ + return &readOnlyArrayIterator{ array: a, - id: dataSlab.SlabID(), dataSlab: dataSlab, - indexInArray: int(startIndex), - indexInDataSlab: int(index), - remainingCount: int(numberOfElements), + indexInDataSlab: index, + remainingCount: numberOfElements, }, nil } -func (a *Array) ReadOnlyRangeIterator(startIndex uint64, endIndex uint64) (*ArrayIterator, error) { - iterator, err := a.RangeIterator(startIndex, endIndex) - if err != nil { - return nil, err - } - iterator.readOnly = true - return iterator, nil -} - type ArrayIterationFunc func(element Value) (resume bool, err error) -func iterateArray(iterator *ArrayIterator, fn ArrayIterationFunc) error { +func iterateArray(iterator ArrayIterator, fn ArrayIterationFunc) error { for { value, err := iterator.Next() if err != nil { @@ -3621,18 +3691,23 @@ func getArraySlab(storage SlabStorage, id SlabID) (ArraySlab, error) { } func firstArrayDataSlab(storage SlabStorage, slab ArraySlab) (*ArrayDataSlab, error) { - if slab.IsData() { - return slab.(*ArrayDataSlab), nil - } - meta := slab.(*ArrayMetaDataSlab) - firstChildID := meta.childrenHeaders[0].slabID - firstChild, err := getArraySlab(storage, firstChildID) - if err != nil { - // Don't need to wrap error as external error because err is already categorized by getArraySlab(). - return nil, err + switch slab := slab.(type) { + case *ArrayDataSlab: + return slab, nil + + case *ArrayMetaDataSlab: + firstChildID := slab.childrenHeaders[0].slabID + firstChild, err := getArraySlab(storage, firstChildID) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by getArraySlab(). + return nil, err + } + // Don't need to wrap error as external error because err is already categorized by firstArrayDataSlab(). + return firstArrayDataSlab(storage, firstChild) + + default: + return nil, NewUnreachableError() } - // Don't need to wrap error as external error because err is already categorized by firstArrayDataSlab(). - return firstArrayDataSlab(storage, firstChild) } // getArrayDataSlabWithIndex returns data slab containing element at specified index diff --git a/array_test.go b/array_test.go index 1a30d718..7c64d61c 100644 --- a/array_test.go +++ b/array_test.go @@ -692,7 +692,7 @@ func TestArrayRemove(t *testing.T) { }) } -func TestArrayIterate(t *testing.T) { +func TestReadOnlyArrayIterate(t *testing.T) { t.Run("empty", func(t *testing.T) { typeInfo := testTypeInfo{42} @@ -906,12 +906,788 @@ func TestArrayIterate(t *testing.T) { require.Equal(t, count/2, i) }) +} + +func TestMutableArrayIterate(t *testing.T) { + + t.Run("empty", func(t *testing.T) { + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + i := uint64(0) + err = array.Iterate(func(v Value) (bool, error) { + i++ + return true, nil + }) + require.NoError(t, err) + require.Equal(t, uint64(0), i) + }) + + t.Run("mutate primitive values, root is data slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + v := Uint64Value(i) + err = array.Append(v) + require.NoError(t, err) + + expectedValues[i] = v + } + require.True(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + require.Equal(t, Uint64Value(i), v) + + // Mutate primitive array elements by overwritting existing elements of similar byte size. + newValue := Uint64Value(i * 2) + existingStorable, err := array.Set(uint64(i), newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, Uint64Value(i), existingValue) + + expectedValues[i] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.True(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate primitive values, root is metadata slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 1024 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + v := Uint64Value(i) + err = array.Append(v) + require.NoError(t, err) + + expectedValues[i] = v + } + require.False(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + require.Equal(t, Uint64Value(i), v) + + // Mutate primitive array elements by overwritting existing elements with elements of similar size. + newValue := Uint64Value(i * 2) + existingStorable, err := array.Set(uint64(i), newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, Uint64Value(i), existingValue) + + expectedValues[i] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.False(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate primitive values, root is data slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + r := rune('a') + for i := uint64(0); i < arraySize; i++ { + v := NewStringValue(string(r)) + err = array.Append(v) + require.NoError(t, err) + + expectedValues[i] = v + r++ + } + require.True(t, array.root.IsData()) + + i := 0 + r = rune('a') + err = array.Iterate(func(v Value) (bool, error) { + require.Equal(t, NewStringValue(string(r)), v) + + // Mutate primitive array elements by overwritting existing elements with larger elements. + // Larger elements causes slabs to split. + newValue := NewStringValue(strings.Repeat(string(r), 25)) + existingStorable, err := array.Set(uint64(i), newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, NewStringValue(string(r)), existingValue) + + expectedValues[i] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.False(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate primitive values, root is metadata slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 200 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + r := rune('a') + for i := uint64(0); i < arraySize; i++ { + v := NewStringValue(string(r)) + err = array.Append(v) + require.NoError(t, err) + + expectedValues[i] = v + r++ + } + require.False(t, array.root.IsData()) + + i := 0 + r = rune('a') + err = array.Iterate(func(v Value) (bool, error) { + require.Equal(t, NewStringValue(string(r)), v) + + // Mutate primitive array elements by overwritting existing elements with larger elements. + // Larger elements causes slabs to split. + newValue := NewStringValue(strings.Repeat(string(r), 25)) + existingStorable, err := array.Set(uint64(i), newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, NewStringValue(string(r)), existingValue) + + expectedValues[i] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.False(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate primitive values, root is metadata slab, merge slabs", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 80 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + r := rune('a') + for i := uint64(0); i < arraySize; i++ { + v := NewStringValue(strings.Repeat(string(r), 25)) + err = array.Append(v) + require.NoError(t, err) + + expectedValues[i] = v + r++ + } + require.False(t, array.root.IsData()) + + i := 0 + r = rune('a') + err = array.Iterate(func(v Value) (bool, error) { + require.Equal(t, NewStringValue(strings.Repeat(string(r), 25)), v) + + // Mutate primitive array elements by overwritting existing elements with smaller elements. + // Smaller elements causes slabs to merge. + newValue := NewStringValue(string(r)) + existingStorable, err := array.Set(uint64(i), newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, NewStringValue(strings.Repeat(string(r), 25)), existingValue) + + expectedValues[i] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.True(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate inlined container, root is data slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + v := Uint64Value(i) + err = childArray.Append(v) + require.NoError(t, err) + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = arrayValue{v} + } + require.True(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(1), childArray.Count()) + require.True(t, childArray.Inlined()) + + // Mutate array elements by inserting more elements to child arrays. + newElement := Uint64Value(0) + err := childArray.Append(newElement) + require.NoError(t, err) + require.Equal(t, uint64(2), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + expectedChildArrayValues = append(expectedChildArrayValues, newElement) + expectedValues[i] = expectedChildArrayValues + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.True(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate inlined container, root is metadata slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const arraySize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + v := Uint64Value(i) + err = childArray.Append(v) + require.NoError(t, err) + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = arrayValue{v} + } + require.False(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(1), childArray.Count()) + require.True(t, childArray.Inlined()) + + // Mutate array elements by inserting more elements to child arrays. + newElement := Uint64Value(0) + err := childArray.Append(newElement) + require.NoError(t, err) + require.Equal(t, uint64(2), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + expectedChildArrayValues = append(expectedChildArrayValues, newElement) + expectedValues[i] = expectedChildArrayValues + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.False(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate inlined container, root is data slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const ( + arraySize = 15 + childArraySize = 1 + mutatedChildArraySize = 4 + ) + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + var expectedValue arrayValue + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = expectedValue + } + require.True(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + // Mutate array elements by inserting more elements to child arrays. + for j := i; j < i+mutatedChildArraySize-childArraySize; j++ { + newElement := Uint64Value(j) + + err := childArray.Append(newElement) + require.NoError(t, err) + + expectedChildArrayValues = append(expectedChildArrayValues, newElement) + } + + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedValues[i] = expectedChildArrayValues + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.False(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate inlined container, root is metadata slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const ( + arraySize = 25 + childArraySize = 1 + mutatedChildArraySize = 4 + ) + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + var expectedValue arrayValue + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = expectedValue + } + require.False(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + // Mutate array elements by inserting more elements to child arrays. + for j := i; j < i+mutatedChildArraySize-childArraySize; j++ { + newElement := Uint64Value(j) + + err := childArray.Append(newElement) + require.NoError(t, err) + + expectedChildArrayValues = append(expectedChildArrayValues, newElement) + } + + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedValues[i] = expectedChildArrayValues + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.False(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("mutate inlined container, root is metadata slab, merge slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const ( + arraySize = 10 + childArraySize = 10 + mutatedChildArraySize = 1 + ) + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + var expectedValue arrayValue + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = expectedValue + } + + require.False(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + for j := childArraySize - 1; j > mutatedChildArraySize-1; j-- { + existingStorble, err := childArray.Remove(uint64(j)) + require.NoError(t, err) + + existingValue, err := existingStorble.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, Uint64Value(i+j), existingValue) + } + + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedValues[i] = expectedChildArrayValues[:mutatedChildArraySize] + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.True(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("uninline inlined container, root is data slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const ( + arraySize = 2 + childArraySize = 1 + mutatedChildArraySize = 50 + ) + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + var expectedValue arrayValue + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = expectedValue + } + + require.True(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + for j := childArraySize; j < mutatedChildArraySize; j++ { + v := Uint64Value(i + j) + + err := childArray.Append(v) + require.NoError(t, err) + + expectedChildArrayValues = append(expectedChildArrayValues, v) + } + + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) + require.False(t, childArray.Inlined()) + + expectedValues[i] = expectedChildArrayValues + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + + require.True(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("uninline inlined container, root is metadata slab, merge slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const ( + arraySize = 10 + childArraySize = 10 + mutatedChildArraySize = 50 + ) + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + var expectedValue arrayValue + + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } + + err = array.Append(childArray) + require.NoError(t, err) + + expectedValues[i] = expectedValue + } + + require.False(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + for j := childArraySize; j < mutatedChildArraySize; j++ { + v := Uint64Value(i + j) + + err := childArray.Append(v) + require.NoError(t, err) + + expectedChildArrayValues = append(expectedChildArrayValues, v) + } + + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) + require.False(t, childArray.Inlined()) + + expectedValues[i] = expectedChildArrayValues + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + require.True(t, array.root.IsData()) - t.Run("mutation", func(t *testing.T) { + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("inline uninlined container, root is data slab, no slab operation", func(t *testing.T) { SetThreshold(256) defer SetThreshold(1024) - const arraySize = 15 + const ( + arraySize = 2 + childArraySize = 50 + mutatedChildArraySize = 1 + ) typeInfo := testTypeInfo{42} storage := newTestPersistentStorage(t) @@ -925,48 +1701,196 @@ func TestArrayIterate(t *testing.T) { childArray, err := NewArray(storage, address, typeInfo) require.NoError(t, err) - v := Uint64Value(i) - err = childArray.Append(v) - require.NoError(t, err) + var expectedValue arrayValue + + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } err = array.Append(childArray) require.NoError(t, err) - expectedValues[i] = arrayValue{v} + expectedValues[i] = expectedValue } - require.True(t, array.root.IsData()) - sizeBeforeMutation := array.root.Header().size + require.True(t, array.root.IsData()) i := 0 - newElement := Uint64Value(0) err = array.Iterate(func(v Value) (bool, error) { childArray, ok := v.(*Array) require.True(t, ok) - require.Equal(t, uint64(1), childArray.Count()) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.False(t, childArray.Inlined()) + + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) + require.True(t, ok) + + for j := childArraySize - 1; j > mutatedChildArraySize-1; j-- { + existingStorable, err := childArray.Remove(uint64(j)) + require.NoError(t, err) + + value, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, Uint64Value(i+j), value) + } + + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) require.True(t, childArray.Inlined()) - err := childArray.Append(newElement) + expectedValues[i] = expectedChildArrayValues[:1] + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, arraySize, i) + + require.True(t, array.root.IsData()) + + testArray(t, storage, typeInfo, address, array, expectedValues, false) + }) + + t.Run("inline uninlined container, root is data slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const ( + arraySize = 4 + childArraySize = 50 + mutatedChildArraySize = 25 + ) + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + expectedValues := make([]Value, arraySize) + for i := uint64(0); i < arraySize; i++ { + childArray, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + var expectedValue arrayValue + + for j := i; j < i+childArraySize; j++ { + v := Uint64Value(j) + err = childArray.Append(v) + require.NoError(t, err) + + expectedValue = append(expectedValue, v) + } + + err = array.Append(childArray) require.NoError(t, err) + expectedValues[i] = expectedValue + } + + require.True(t, array.root.IsData()) + + i := 0 + err = array.Iterate(func(v Value) (bool, error) { + childArray, ok := v.(*Array) + require.True(t, ok) + require.Equal(t, uint64(childArraySize), childArray.Count()) + require.False(t, childArray.Inlined()) + expectedChildArrayValues, ok := expectedValues[i].(arrayValue) require.True(t, ok) - expectedChildArrayValues = append(expectedChildArrayValues, newElement) - expectedValues[i] = expectedChildArrayValues + for j := childArraySize - 1; j >= mutatedChildArraySize; j-- { + existingStorable, err := childArray.Remove(uint64(j)) + require.NoError(t, err) - i++ + value, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, Uint64Value(i+j), value) + } - require.Equal(t, array.root.Header().size, sizeBeforeMutation+uint32(i)*newElement.ByteSize()) + require.Equal(t, uint64(mutatedChildArraySize), childArray.Count()) + require.True(t, childArray.Inlined()) + + expectedValues[i] = expectedChildArrayValues[:mutatedChildArraySize] + + i++ return true, nil }) require.NoError(t, err) require.Equal(t, arraySize, i) - require.True(t, array.root.IsData()) + + require.False(t, array.root.IsData()) testArray(t, storage, typeInfo, address, array, expectedValues, false) }) + + t.Run("stop", func(t *testing.T) { + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + const count = 10 + for i := uint64(0); i < count; i++ { + err := array.Append(Uint64Value(i)) + require.NoError(t, err) + } + + i := 0 + err = array.Iterate(func(_ Value) (bool, error) { + if i == count/2 { + return false, nil + } + i++ + return true, nil + }) + require.NoError(t, err) + require.Equal(t, count/2, i) + }) + + t.Run("error", func(t *testing.T) { + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + const count = 10 + for i := uint64(0); i < count; i++ { + err := array.Append(Uint64Value(i)) + require.NoError(t, err) + } + + testErr := errors.New("test") + + i := 0 + err = array.Iterate(func(_ Value) (bool, error) { + if i == count/2 { + return false, testErr + } + i++ + return true, nil + }) + // err is testErr wrapped in ExternalError. + require.Equal(t, 1, errorCategorizationCount(err)) + var externalError *ExternalError + require.ErrorAs(t, err, &externalError) + require.Equal(t, testErr, externalError.Unwrap()) + + require.Equal(t, count/2, i) + }) } func testArrayIterateRange(t *testing.T, array *Array, values []Value) { @@ -1029,7 +1953,7 @@ func testArrayIterateRange(t *testing.T, array *Array, values []Value) { } } -func TestArrayIterateRange(t *testing.T) { +func TestReadOnlyArrayIterateRange(t *testing.T) { typeInfo := testTypeInfo{42} address := Address{1, 2, 3, 4, 5, 6, 7, 8} @@ -1143,8 +2067,29 @@ func TestArrayIterateRange(t *testing.T) { require.Equal(t, testErr, externalError.Unwrap()) require.Equal(t, count/2, i) }) +} + +func TestMutableArrayIterateRange(t *testing.T) { + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + + t.Run("empty", func(t *testing.T) { + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + i := 0 + err = array.IterateRange(0, 0, func(v Value) (bool, error) { + i++ + return true, nil + }) + require.NoError(t, err) + require.Equal(t, 0, i) + }) - t.Run("mutation", func(t *testing.T) { + t.Run("mutate inlined container, root is data slab, no slab operation", func(t *testing.T) { SetThreshold(256) defer SetThreshold(1024) @@ -1207,6 +2152,67 @@ func TestArrayIterateRange(t *testing.T) { testArray(t, storage, typeInfo, address, array, expectedValues, false) }) + + t.Run("stop", func(t *testing.T) { + const arraySize = 10 + + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + for i := uint64(0); i < arraySize; i++ { + err := array.Append(Uint64Value(i)) + require.NoError(t, err) + } + + i := uint64(0) + startIndex := uint64(1) + endIndex := uint64(5) + count := endIndex - startIndex + err = array.IterateRange(startIndex, endIndex, func(_ Value) (bool, error) { + if i == count/2 { + return false, nil + } + i++ + return true, nil + }) + require.NoError(t, err) + require.Equal(t, count/2, i) + }) + + t.Run("error", func(t *testing.T) { + storage := newTestPersistentStorage(t) + + array, err := NewArray(storage, address, typeInfo) + require.NoError(t, err) + + const arraySize = 10 + for i := uint64(0); i < arraySize; i++ { + err := array.Append(Uint64Value(i)) + require.NoError(t, err) + } + + testErr := errors.New("test") + + i := uint64(0) + startIndex := uint64(1) + endIndex := uint64(5) + count := endIndex - startIndex + err = array.IterateRange(startIndex, endIndex, func(_ Value) (bool, error) { + if i == count/2 { + return false, testErr + } + i++ + return true, nil + }) + // err is testErr wrapped in ExternalError. + require.Equal(t, 1, errorCategorizationCount(err)) + var externalError *ExternalError + require.ErrorAs(t, err, &externalError) + require.Equal(t, testErr, externalError.Unwrap()) + require.Equal(t, count/2, i) + }) } func TestArrayRootSlabID(t *testing.T) { @@ -3206,7 +4212,7 @@ func TestEmptyArray(t *testing.T) { require.Nil(t, s) }) - t.Run("iterate", func(t *testing.T) { + t.Run("readonly iterate", func(t *testing.T) { i := uint64(0) err := array.IterateReadOnly(func(v Value) (bool, error) { i++ @@ -3216,6 +4222,16 @@ func TestEmptyArray(t *testing.T) { require.Equal(t, uint64(0), i) }) + t.Run("iterate", func(t *testing.T) { + i := uint64(0) + err := array.Iterate(func(v Value) (bool, error) { + i++ + return true, nil + }) + require.NoError(t, err) + require.Equal(t, uint64(0), i) + }) + t.Run("count", func(t *testing.T) { count := array.Count() require.Equal(t, uint64(0), count) diff --git a/map.go b/map.go index 63db77ab..57891d7a 100644 --- a/map.go +++ b/map.go @@ -114,6 +114,15 @@ type MapValue Storable type element interface { fmt.Stringer + getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, + ) (MapKey, MapValue, MapKey, error) + Get( storage SlabStorage, digester Digester, @@ -173,6 +182,15 @@ type elementGroup interface { type elements interface { fmt.Stringer + getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, + ) (MapKey, MapValue, MapKey, error) + Get( storage SlabStorage, digester Digester, @@ -317,6 +335,15 @@ var _ MapSlab = &MapMetaDataSlab{} type MapSlab interface { Slab + getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, + ) (MapKey, MapValue, MapKey, error) + Get( storage SlabStorage, digester Digester, @@ -616,6 +643,20 @@ func (e *singleElement) Encode(enc *Encoder) error { return nil } +func (e *singleElement) getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, +) (MapKey, MapValue, MapKey, error) { + k, v, err := e.Get(storage, digester, level, hkey, comparator, key) + + nextKey := MapKey(nil) + return k, v, nextKey, err +} + func (e *singleElement) Get(storage SlabStorage, _ Digester, _ uint, _ Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { equal, err := comparator(storage, key, e.key) if err != nil { @@ -788,6 +829,27 @@ func (e *inlineCollisionGroup) Encode(enc *Encoder) error { return nil } +func (e *inlineCollisionGroup) getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + _ Digest, + comparator ValueComparator, + key Value, +) (MapKey, MapValue, MapKey, error) { + + // Adjust level and hkey for collision group. + level++ + if level > digester.Levels() { + return nil, nil, nil, NewHashLevelErrorf("inline collision group digest level is %d, want <= %d", level, digester.Levels()) + } + hkey, _ := digester.Digest(level) + + // Search key in collision group with adjusted hkeyPrefix and hkey. + // Don't need to wrap error as external error because err is already categorized by elements.Get(). + return e.elements.getElementAndNextKey(storage, digester, level, hkey, comparator, key) +} + func (e *inlineCollisionGroup) Get(storage SlabStorage, digester Digester, level uint, _ Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { // Adjust level and hkey for collision group @@ -977,6 +1039,32 @@ func (e *externalCollisionGroup) Encode(enc *Encoder) error { return nil } +func (e *externalCollisionGroup) getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + _ Digest, + comparator ValueComparator, + key Value, +) (MapKey, MapValue, MapKey, error) { + slab, err := getMapSlab(storage, e.slabID) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by getMapSlab(). + return nil, nil, nil, err + } + + // Adjust level and hkey for collision group. + level++ + if level > digester.Levels() { + return nil, nil, nil, NewHashLevelErrorf("external collision group digest level is %d, want <= %d", level, digester.Levels()) + } + hkey, _ := digester.Digest(level) + + // Search key in collision group with adjusted hkeyPrefix and hkey. + // Don't need to wrap error as external error because err is already categorized by MapSlab.getElementAndNextKey(). + return slab.getElementAndNextKey(storage, digester, level, hkey, comparator, key) +} + func (e *externalCollisionGroup) Get(storage SlabStorage, digester Digester, level uint, _ Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { slab, err := getMapSlab(storage, e.slabID) if err != nil { @@ -1329,10 +1417,15 @@ func (e *hkeyElements) Encode(enc *Encoder) error { return nil } -func (e *hkeyElements) Get(storage SlabStorage, digester Digester, level uint, hkey Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { +func (e *hkeyElements) getElement( + digester Digester, + level uint, + hkey Digest, + key Value, +) (element, int, error) { if level >= digester.Levels() { - return nil, nil, NewHashLevelErrorf("hkey elements digest level is %d, want < %d", level, digester.Levels()) + return nil, 0, NewHashLevelErrorf("hkey elements digest level is %d, want < %d", level, digester.Levels()) } // binary search by hkey @@ -1354,15 +1447,73 @@ func (e *hkeyElements) Get(storage SlabStorage, digester Digester, level uint, h // No matching hkey if equalIndex == -1 { - return nil, nil, NewKeyNotFoundError(key) + return nil, 0, NewKeyNotFoundError(key) } - elem := e.elems[equalIndex] + return e.elems[equalIndex], equalIndex, nil +} + +func (e *hkeyElements) Get(storage SlabStorage, digester Digester, level uint, hkey Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { + elem, _, err := e.getElement(digester, level, hkey, key) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by hkeyElements.getElement(). + return nil, nil, err + } // Don't need to wrap error as external error because err is already categorized by element.Get(). return elem.Get(storage, digester, level, hkey, comparator, key) } +func (e *hkeyElements) getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, +) (MapKey, MapValue, MapKey, error) { + elem, index, err := e.getElement(digester, level, hkey, key) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by hkeyElements.getElement(). + return nil, nil, nil, err + } + + k, v, nk, err := elem.getElementAndNextKey(storage, digester, level, hkey, comparator, key) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by hkeyElements.get(). + return nil, nil, nil, err + } + + if nk != nil { + // Found next key in element group. + return k, v, nk, nil + } + + nextIndex := index + 1 + + switch { + case nextIndex < len(e.elems): + // Next element is still in the same hkeyElements group. + nextElement := e.elems[nextIndex] + + nextKey, err := firstKeyInElement(storage, nextElement) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by firstKeyInElement(). + return nil, nil, nil, err + } + + return k, v, nextKey, nil + + case nextIndex == len(e.elems): + // Next element is outside this hkeyElements group, so nextKey is nil. + return k, v, nil, nil + + default: // nextIndex > len(e.elems) + // This should never happen. + return nil, nil, nil, NewUnreachableError() + } +} + func (e *hkeyElements) Set( storage SlabStorage, address Address, @@ -1968,25 +2119,61 @@ func (e *singleElements) Encode(enc *Encoder) error { return nil } -func (e *singleElements) Get(storage SlabStorage, digester Digester, level uint, _ Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { +func (e *singleElements) get(storage SlabStorage, digester Digester, level uint, _ Digest, comparator ValueComparator, key Value) (MapKey, MapValue, int, error) { if level != digester.Levels() { - return nil, nil, NewHashLevelErrorf("single elements digest level is %d, want %d", level, digester.Levels()) + return nil, nil, 0, NewHashLevelErrorf("single elements digest level is %d, want %d", level, digester.Levels()) } // linear search by key - for _, elem := range e.elems { + for i, elem := range e.elems { equal, err := comparator(storage, key, elem.key) if err != nil { // Wrap err as external error (if needed) because err is returned by ValueComparator callback. - return nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to compare keys") + return nil, nil, 0, wrapErrorfAsExternalErrorIfNeeded(err, "failed to compare keys") } if equal { - return elem.key, elem.value, nil + return elem.key, elem.value, i, nil } } - return nil, nil, NewKeyNotFoundError(key) + return nil, nil, 0, NewKeyNotFoundError(key) +} + +func (e *singleElements) Get(storage SlabStorage, digester Digester, level uint, hkey Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { + k, v, _, err := e.get(storage, digester, level, hkey, comparator, key) + return k, v, err +} + +func (e *singleElements) getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, +) (MapKey, MapValue, MapKey, error) { + k, v, index, err := e.get(storage, digester, level, hkey, comparator, key) + if err != nil { + return nil, nil, nil, err + } + + nextIndex := index + 1 + + switch { + case nextIndex < len(e.elems): + // Next element is still in the same singleElements group. + nextKey := e.elems[nextIndex].key + return k, v, nextKey, nil + + case nextIndex == len(e.elems): + // Next element is outside this singleElements group, so nextKey is nil. + return k, v, nil, nil + + default: // nextIndex > len(e.elems) + // This should never happen. + return nil, nil, nil, NewUnreachableError() + } } func (e *singleElements) Set( @@ -3818,7 +4005,7 @@ func (m *MapMetaDataSlab) ChildStorables() []Storable { return childIDs } -func (m *MapMetaDataSlab) Get(storage SlabStorage, digester Digester, level uint, hkey Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { +func (m *MapMetaDataSlab) getChildSlabByDigest(storage SlabStorage, hkey Digest, key Value) (MapSlab, int, error) { ans := -1 i, j := 0, len(m.childrenHeaders) @@ -3833,7 +4020,7 @@ func (m *MapMetaDataSlab) Get(storage SlabStorage, digester Digester, level uint } if ans == -1 { - return nil, nil, NewKeyNotFoundError(key) + return nil, 0, NewKeyNotFoundError(key) } childHeaderIndex := ans @@ -3842,7 +4029,15 @@ func (m *MapMetaDataSlab) Get(storage SlabStorage, digester Digester, level uint child, err := getMapSlab(storage, childID) if err != nil { - // Don't need to wrap error as external error because err is already categorized by getMapSlab(). + return nil, 0, err + } + + return child, childHeaderIndex, nil +} + +func (m *MapMetaDataSlab) Get(storage SlabStorage, digester Digester, level uint, hkey Digest, comparator ValueComparator, key Value) (MapKey, MapValue, error) { + child, _, err := m.getChildSlabByDigest(storage, hkey, key) + if err != nil { return nil, nil, err } @@ -3850,6 +4045,60 @@ func (m *MapMetaDataSlab) Get(storage SlabStorage, digester Digester, level uint return child.Get(storage, digester, level, hkey, comparator, key) } +func (m *MapMetaDataSlab) getElementAndNextKey( + storage SlabStorage, + digester Digester, + level uint, + hkey Digest, + comparator ValueComparator, + key Value, +) (MapKey, MapValue, MapKey, error) { + child, index, err := m.getChildSlabByDigest(storage, hkey, key) + if err != nil { + return nil, nil, nil, err + } + + k, v, nextKey, err := child.getElementAndNextKey(storage, digester, level, hkey, comparator, key) + if err != nil { + return nil, nil, nil, err + } + + if nextKey != nil { + // Next element is still in the same child slab. + return k, v, nextKey, nil + } + + // Next element is in the next child slab. + + nextIndex := index + 1 + + switch { + case nextIndex < len(m.childrenHeaders): + // Next element is in the next child of this MapMetaDataSlab. + nextChildID := m.childrenHeaders[nextIndex].slabID + + nextChild, err := getMapSlab(storage, nextChildID) + if err != nil { + return nil, nil, nil, err + } + + nextKey, err = firstKeyInMapSlab(storage, nextChild) + if err != nil { + return nil, nil, nil, err + } + + return k, v, nextKey, nil + + case nextIndex == len(m.childrenHeaders): + // Next element is outside this MapMetaDataSlab, so nextKey is nil. + return k, v, nil, nil + + default: // nextIndex > len(m.childrenHeaders) + // This should never happen. + return nil, nil, nil, NewUnreachableError() + } +} + func (m *MapMetaDataSlab) Set( storage SlabStorage, b DigesterBuilder, @@ -4853,6 +5102,92 @@ func (m *OrderedMap) get(comparator ValueComparator, hip HashInputProvider, key return m.root.Get(m.Storage, keyDigest, level, hkey, comparator, key) } +func (m *OrderedMap) getElementAndNextKey(comparator ValueComparator, hip HashInputProvider, key Value) (Value, Value, Value, error) { + + keyDigest, err := m.digesterBuilder.Digest(hip, key) + if err != nil { + // Wrap err as external error (if needed) because err is returned by DigesterBuilder interface. + return nil, nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to create map key digester") + } + defer putDigester(keyDigest) + + level := uint(0) + + hkey, err := keyDigest.Digest(level) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Digesert interface. + return nil, nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, fmt.Sprintf("failed to get map key digest at level %d", level)) + } + + keyStorable, valueStorable, nextKeyStorable, err := m.root.getElementAndNextKey(m.Storage, keyDigest, level, hkey, comparator, key) + if err != nil { + return nil, nil, nil, err + } + + k, err := keyStorable.StoredValue(m.Storage) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Storable interface. + return nil, nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") + } + + v, err := valueStorable.StoredValue(m.Storage) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Storable interface. + return nil, nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") + } + + var nextKey Value + if nextKeyStorable != nil { + nextKey, err = nextKeyStorable.StoredValue(m.Storage) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Storable interface. + return nil, nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") + } + } + + // As a parent, this map (m) sets up notification callback with child + // value (v) so this map can be notified when child value is modified. + maxInlineSize := maxInlineMapValueSize(uint64(keyStorable.ByteSize())) + m.setCallbackWithChild(comparator, hip, key, v, maxInlineSize) + + return k, v, nextKey, nil +} + +func (m *OrderedMap) getNextKey(comparator ValueComparator, hip HashInputProvider, key Value) (Value, error) { + + keyDigest, err := m.digesterBuilder.Digest(hip, key) + if err != nil { + // Wrap err as external error (if needed) because err is returned by DigesterBuilder interface. + return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to create map key digester") + } + defer putDigester(keyDigest) + + level := uint(0) + + hkey, err := keyDigest.Digest(level) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Digesert interface. + return nil, wrapErrorfAsExternalErrorIfNeeded(err, fmt.Sprintf("failed to get map key digest at level %d", level)) + } + + _, _, nextKeyStorable, err := m.root.getElementAndNextKey(m.Storage, keyDigest, level, hkey, comparator, key) + if err != nil { + return nil, err + } + + if nextKeyStorable == nil { + return nil, nil + } + + nextKey, err := nextKeyStorable.StoredValue(m.Storage) + if err != nil { + // Wrap err as external error (if needed) because err is returned by Storable interface. + return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get storable's stored value") + } + + return nextKey, nil +} + func (m *OrderedMap) Set(comparator ValueComparator, hip HashInputProvider, key Value, value Value) (Storable, error) { storable, err := m.set(comparator, hip, key, value) if err != nil { @@ -5252,19 +5587,24 @@ func getMapSlab(storage SlabStorage, id SlabID) (MapSlab, error) { return mapSlab, nil } -func firstMapDataSlab(storage SlabStorage, slab MapSlab) (MapSlab, error) { - if slab.IsData() { +func firstMapDataSlab(storage SlabStorage, slab MapSlab) (*MapDataSlab, error) { + switch slab := slab.(type) { + case *MapDataSlab: return slab, nil + + case *MapMetaDataSlab: + firstChildID := slab.childrenHeaders[0].slabID + firstChild, err := getMapSlab(storage, firstChildID) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by getMapSlab(). + return nil, err + } + // Don't need to wrap error as external error because err is already categorized by firstMapDataSlab(). + return firstMapDataSlab(storage, firstChild) + + default: + return nil, NewUnreachableError() } - meta := slab.(*MapMetaDataSlab) - firstChildID := meta.childrenHeaders[0].slabID - firstChild, err := getMapSlab(storage, firstChildID) - if err != nil { - // Don't need to wrap error as external error because err is already categorized by getMapSlab(). - return nil, err - } - // Don't need to wrap error as external error because err is already categorized by firstMapDataSlab(). - return firstMapDataSlab(storage, firstChild) } func (m *MapExtraData) incrementCount() { @@ -5335,17 +5675,116 @@ func (i *mapElementIterator) next() (key MapKey, value MapValue, err error) { type MapEntryIterationFunc func(Value, Value) (resume bool, err error) type MapElementIterationFunc func(Value) (resume bool, err error) -type MapIterator struct { - m *OrderedMap - comparator ValueComparator - hip HashInputProvider - id SlabID - elemIterator *mapElementIterator +type MapIterator interface { + CanMutate() bool + Next() (Value, Value, error) + NextKey() (Value, error) + NextValue() (Value, error) +} + +type emptyMapIterator struct { + readOnly bool +} + +var _ MapIterator = &emptyMapIterator{} + +var emptyMutableMapIterator = &emptyMapIterator{readOnly: false} +var emptyReadOnlyMapIterator = &emptyMapIterator{readOnly: true} + +func (i *emptyMapIterator) CanMutate() bool { + return !i.readOnly +} + +func (*emptyMapIterator) Next() (Value, Value, error) { + return nil, nil, nil +} + +func (*emptyMapIterator) NextKey() (Value, error) { + return nil, nil +} + +func (*emptyMapIterator) NextValue() (Value, error) { + return nil, nil +} + +type mutableMapIterator struct { + m *OrderedMap + comparator ValueComparator + hip HashInputProvider + nextKey Value +} + +var _ MapIterator = &mutableMapIterator{} + +func (i *mutableMapIterator) CanMutate() bool { + return true +} + +func (i *mutableMapIterator) Next() (Value, Value, error) { + if i.nextKey == nil { + // No more elements. + return nil, nil, nil + } + + // Don't need to set up notification callback for v because + // getElementAndNextKey() returns value with notification already. + k, v, nk, err := i.m.getElementAndNextKey(i.comparator, i.hip, i.nextKey) + if err != nil { + return nil, nil, err + } + + i.nextKey = nk + + return k, v, nil +} + +func (i *mutableMapIterator) NextKey() (Value, error) { + if i.nextKey == nil { + // No more elements. + return nil, nil + } + + key := i.nextKey + + nk, err := i.m.getNextKey(i.comparator, i.hip, key) + if err != nil { + return nil, err + } + + i.nextKey = nk + + return key, nil +} + +func (i *mutableMapIterator) NextValue() (Value, error) { + if i.nextKey == nil { + // No more elements. + return nil, nil + } + + // Don't need to set up notification callback for v because + // getElementAndNextKey() returns value with notification already. + _, v, nk, err := i.m.getElementAndNextKey(i.comparator, i.hip, i.nextKey) + if err != nil { + return nil, err + } + + i.nextKey = nk + + return v, nil +} + +type readOnlyMapIterator struct { + m *OrderedMap + nextDataSlabID SlabID + elemIterator *mapElementIterator } -func (i *MapIterator) Next() (key Value, value Value, err error) { +var _ MapIterator = &readOnlyMapIterator{} + +func (i *readOnlyMapIterator) Next() (key Value, value Value, err error) { if i.elemIterator == nil { - if i.id == SlabIDUndefined { + if i.nextDataSlabID == SlabIDUndefined { return nil, nil, nil } @@ -5375,11 +5814,6 @@ func (i *MapIterator) Next() (key Value, value Value, err error) { return nil, nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get map value's stored value") } - if i.CanMutate() { - maxInlineSize := maxInlineMapValueSize(uint64(ks.ByteSize())) - i.m.setCallbackWithChild(i.comparator, i.hip, key, value, maxInlineSize) - } - return key, value, nil } @@ -5389,9 +5823,9 @@ func (i *MapIterator) Next() (key Value, value Value, err error) { return i.Next() } -func (i *MapIterator) NextKey() (key Value, err error) { +func (i *readOnlyMapIterator) NextKey() (key Value, err error) { if i.elemIterator == nil { - if i.id == SlabIDUndefined { + if i.nextDataSlabID == SlabIDUndefined { return nil, nil } @@ -5424,9 +5858,9 @@ func (i *MapIterator) NextKey() (key Value, err error) { return i.NextKey() } -func (i *MapIterator) NextValue() (value Value, err error) { +func (i *readOnlyMapIterator) NextValue() (value Value, err error) { if i.elemIterator == nil { - if i.id == SlabIDUndefined { + if i.nextDataSlabID == SlabIDUndefined { return nil, nil } @@ -5437,8 +5871,8 @@ func (i *MapIterator) NextValue() (value Value, err error) { } } - var ks, vs Storable - ks, vs, err = i.elemIterator.next() + var vs Storable + _, vs, err = i.elemIterator.next() if err != nil { // Don't need to wrap error as external error because err is already categorized by MapElementIterator.Next(). return nil, err @@ -5450,17 +5884,6 @@ func (i *MapIterator) NextValue() (value Value, err error) { return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get map value's stored value") } - if i.CanMutate() { - key, err := ks.StoredValue(i.m.Storage) - if err != nil { - // Wrap err as external error (if needed) because err is returned by Storable interface. - return nil, wrapErrorfAsExternalErrorIfNeeded(err, "failed to get map value's stored value") - } - - maxInlineSize := maxInlineMapValueSize(uint64(ks.ByteSize())) - i.m.setCallbackWithChild(i.comparator, i.hip, key, value, maxInlineSize) - } - return value, nil } @@ -5470,22 +5893,22 @@ func (i *MapIterator) NextValue() (value Value, err error) { return i.NextValue() } -func (i *MapIterator) advance() error { - slab, found, err := i.m.Storage.Retrieve(i.id) +func (i *readOnlyMapIterator) advance() error { + slab, found, err := i.m.Storage.Retrieve(i.nextDataSlabID) if err != nil { // Wrap err as external error (if needed) because err is returned by SlabStorage interface. - return wrapErrorfAsExternalErrorIfNeeded(err, fmt.Sprintf("failed to retrieve slab %s", i.id)) + return wrapErrorfAsExternalErrorIfNeeded(err, fmt.Sprintf("failed to retrieve slab %s", i.nextDataSlabID)) } if !found { - return NewSlabNotFoundErrorf(i.id, "slab not found during map iteration") + return NewSlabNotFoundErrorf(i.nextDataSlabID, "slab not found during map iteration") } dataSlab, ok := slab.(*MapDataSlab) if !ok { - return NewSlabDataErrorf("slab %s isn't MapDataSlab", i.id) + return NewSlabDataErrorf("slab %s isn't MapDataSlab", i.nextDataSlabID) } - i.id = dataSlab.next + i.nextDataSlabID = dataSlab.next i.elemIterator = &mapElementIterator{ storage: i.m.Storage, @@ -5495,50 +5918,72 @@ func (i *MapIterator) advance() error { return nil } -func (m *OrderedMap) iterator(comparator ValueComparator, hip HashInputProvider) (*MapIterator, error) { - slab, err := firstMapDataSlab(m.Storage, m.root) +func (i *readOnlyMapIterator) CanMutate() bool { + return false +} + +// Iterator returns mutable iterator for map elements. +// Mutable iterator handles: +// - indirect element mutation, such as modifying nested container +// - direct element mutation, such as overwriting existing element with new element +// Mutable iterator doesn't handle: +// - inserting new elements into the map +// - removing existing elements from the map +// NOTE: Use readonly iterator if mutation is not needed for better performance. +func (m *OrderedMap) Iterator(comparator ValueComparator, hip HashInputProvider) (MapIterator, error) { + if m.Count() == 0 { + return emptyMutableMapIterator, nil + } + + keyStorable, err := firstKeyInMapSlab(m.Storage, m.root) if err != nil { - // Don't need to wrap error as external error because err is already categorized by firstMapDataSlab(). + // Don't need to wrap error as external error because err is already categorized by firstKeyInMapSlab(). return nil, err } - dataSlab := slab.(*MapDataSlab) + if keyStorable == nil { + // This should never happen because m.Count() > 0. + return nil, NewSlabDataErrorf("failed to find first key in map while map count > 0") + } + + key, err := keyStorable.StoredValue(m.Storage) + if err != nil { + return nil, err + } - return &MapIterator{ + return &mutableMapIterator{ m: m, comparator: comparator, hip: hip, - id: dataSlab.next, - elemIterator: &mapElementIterator{ - storage: m.Storage, - elements: dataSlab.elements, - }, + nextKey: key, }, nil } -func (i *MapIterator) CanMutate() bool { - return i.comparator != nil && i.hip != nil -} +// ReadOnlyIterator returns readonly iterator for map elements. +// If elements are mutated, those changes are not guaranteed to persist. +// NOTE: Use readonly iterator if mutation is not needed for better performance. +func (m *OrderedMap) ReadOnlyIterator() (MapIterator, error) { + if m.Count() == 0 { + return emptyReadOnlyMapIterator, nil + } -func (m *OrderedMap) Iterator(comparator ValueComparator, hip HashInputProvider) (*MapIterator, error) { - iterator, err := m.iterator(comparator, hip) + dataSlab, err := firstMapDataSlab(m.Storage, m.root) if err != nil { + // Don't need to wrap error as external error because err is already categorized by firstMapDataSlab(). return nil, err } - if !iterator.CanMutate() { - return nil, NewUserError(fmt.Errorf("failed to create MapIterator: ValueComparator or HashInputProvider is nil")) - } - return iterator, nil -} -// ReadOnlyIterator returns readonly iterator for map elements. -// If elements of child containers are mutated, those changes -// are not guaranteed to persist. -func (m *OrderedMap) ReadOnlyIterator() (*MapIterator, error) { - return m.iterator(nil, nil) + return &readOnlyMapIterator{ + m: m, + nextDataSlabID: dataSlab.next, + elemIterator: &mapElementIterator{ + storage: m.Storage, + elements: dataSlab.elements, + }, + }, nil } -func iterateMap(iterator *MapIterator, fn MapEntryIterationFunc) error { +func iterateMap(iterator MapIterator, fn MapEntryIterationFunc) error { var err error var key, value Value for { @@ -5579,14 +6024,8 @@ func (m *OrderedMap) IterateReadOnly(fn MapEntryIterationFunc) error { return iterateMap(iterator, fn) } -func (m *OrderedMap) IterateReadOnlyKeys(fn MapElementIterationFunc) error { - - iterator, err := m.ReadOnlyIterator() - if err != nil { - // Don't need to wrap error as external error because err is already categorized by OrderedMap.Iterator(). - return err - } - +func iterateMapKeys(iterator MapIterator, fn MapElementIterationFunc) error { + var err error var key Value for { key, err = iterator.NextKey() @@ -5608,7 +6047,25 @@ func (m *OrderedMap) IterateReadOnlyKeys(fn MapElementIterationFunc) error { } } -func iterateMapValues(iterator *MapIterator, fn MapElementIterationFunc) error { +func (m *OrderedMap) IterateKeys(comparator ValueComparator, hip HashInputProvider, fn MapElementIterationFunc) error { + iterator, err := m.Iterator(comparator, hip) + if err != nil { + // Don't need to wrap error as external error because err is already categorized by OrderedMap.Iterator(). + return err + } + return iterateMapKeys(iterator, fn) +} + +func (m *OrderedMap) IterateReadOnlyKeys(fn MapElementIterationFunc) error { + iterator, err := m.ReadOnlyIterator() + if err != nil { + // Don't need to wrap error as external error because err is already categorized by OrderedMap.ReadOnlyIterator(). + return err + } + return iterateMapKeys(iterator, fn) +} + +func iterateMapValues(iterator MapIterator, fn MapElementIterationFunc) error { var err error var value Value for { @@ -6313,3 +6770,49 @@ func (m *OrderedMap) IterateReadOnlyLoadedValues(fn MapEntryIterationFunc) error } } } + +func firstKeyInMapSlab(storage SlabStorage, slab MapSlab) (MapKey, error) { + dataSlab, err := firstMapDataSlab(storage, slab) + if err != nil { + return nil, err + } + return firstKeyInElements(storage, dataSlab.elements) +} + +func firstKeyInElements(storage SlabStorage, elems elements) (MapKey, error) { + switch elements := elems.(type) { + case *hkeyElements: + if len(elements.elems) == 0 { + return nil, nil + } + firstElem := elements.elems[0] + return firstKeyInElement(storage, firstElem) + + case *singleElements: + if len(elements.elems) == 0 { + return nil, nil + } + firstElem := elements.elems[0] + return firstElem.key, nil + + default: + return nil, NewUnreachableError() + } +} + +func firstKeyInElement(storage SlabStorage, elem element) (MapKey, error) { + switch elem := elem.(type) { + case *singleElement: + return elem.key, nil + + case elementGroup: + group, err := elem.Elements(storage) + if err != nil { + return nil, err + } + return firstKeyInElements(storage, group) + + default: + return nil, NewUnreachableError() + } +} diff --git a/map_test.go b/map_test.go index dccfd4bc..ba8ca3a0 100644 --- a/map_test.go +++ b/map_test.go @@ -1107,7 +1107,7 @@ func TestMapRemove(t *testing.T) { }) } -func TestMapIterate(t *testing.T) { +func TestReadOnlyMapIterate(t *testing.T) { t.Run("empty", func(t *testing.T) { @@ -1303,16 +1303,4504 @@ func TestMapIterate(t *testing.T) { testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) }) +} + +func TestMutableMapIterate(t *testing.T) { + + t.Run("empty", func(t *testing.T) { + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + // Iterate key value pairs + i := 0 + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + i++ + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, 0, i) + + testMap(t, storage, typeInfo, address, m, mapValue{}, nil, false) + }) + + t.Run("mutate primitive values, root is data slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) * 2 + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + sortedKeys[i] = k + keyValues[k] = v + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) * 2 + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is data slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + r := 'a' + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + r := 'a' + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, merge slabs", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 10 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + r := 'a' + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + r++ + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate collision primitive values, 1 level", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i * 2) + + digests := []Digest{ + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) / 2 + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, keyValues[k], existingValue) + + i++ + keyValues[k] = newValue + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate collision primitive values, 4 levels", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i * 2) + + digests := []Digest{ + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) / 2 + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, keyValues[k], existingValue) + + i++ + keyValues[k] = newValue + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate inlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := Uint64Value(i) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (updating elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[childKey] = childNewValue + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := Uint64Value(i) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (updating elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[childKey] = childNewValue + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is data slab, split slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 1 + mutatedChildMapSize = 5 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, split slab", func(t *testing.T) { + const ( + mapSize = 35 + childMapSize = 1 + mutatedChildMapSize = 5 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, merge slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 10 + mutatedChildMapSize = 1 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j >= mutatedChildMapSize; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate collision inlined container, 1 level", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + ck := Uint64Value(0) + cv := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + digests := []Digest{ + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues[childKey] = childNewValue + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate collision inlined container, 4 levels", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + ck := Uint64Value(0) + cv := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + digests := []Digest{ + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues[childKey] = childNewValue + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate inlined container", func(t *testing.T) { + const ( + mapSize = 15 + valueStringSize = 16 + ) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + i := uint64(0) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := NewStringValue(randStr(r, valueStringSize)) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + sortedKeys[i] = k + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i = uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + newChildMapKey := Uint64Value(1) // Previous key is 0 + newChildMapValue := NewStringValue(randStr(r, valueStringSize)) + + existingStorable, err := childMap.Set(compare, hashInputProvider, newChildMapKey, newChildMapValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(2), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[newChildMapKey] = newChildMapValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Iterate and mutate child map (removing elements) + i = uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(2), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + // Remove key 0 + ck := Uint64Value(0) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, ck) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + delete(expectedChildMapValues, ck) + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("uninline inlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 1 + mutatedChildMapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("uninline inlined container, root is metadata slab, merge slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 5 + mutatedChildMapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("inline uninlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 35 + mutatedChildMapSize = 1 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j > mutatedChildMapSize-1; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("inline uninlined container, root is data slab, split slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 35 + mutatedChildMapSize = 10 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j > mutatedChildMapSize-1; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) +} + +func TestMutableMapIterateKeys(t *testing.T) { + + t.Run("empty", func(t *testing.T) { + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + i := 0 + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + i++ + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, 0, i) + + testMap(t, storage, typeInfo, address, m, mapValue{}, nil, false) + }) + + t.Run("mutate primitive values, root is data slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := v.(Uint64Value) * 2 + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + sortedKeys[i] = k + keyValues[k] = v + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := v.(Uint64Value) * 2 + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is data slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + r := 'a' + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + r := 'a' + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, merge slabs", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 10 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + r := 'a' + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + r++ + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (bool, error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate collision primitive values, 1 level", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i * 2) + + digests := []Digest{ + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := v.(Uint64Value) / 2 + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, keyValues[k], existingValue) + + i++ + keyValues[k] = newValue + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate collision primitive values, 4 levels", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i * 2) + + digests := []Digest{ + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v := keyValues[k] + newValue := v.(Uint64Value) / 2 + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, keyValues[k], existingValue) + + i++ + keyValues[k] = newValue + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate inlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := Uint64Value(i) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (updating elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[childKey] = childNewValue + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := Uint64Value(i) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (updating elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[childKey] = childNewValue + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is data slab, split slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 1 + mutatedChildMapSize = 5 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, split slab", func(t *testing.T) { + const ( + mapSize = 35 + childMapSize = 1 + mutatedChildMapSize = 5 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, merge slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 10 + mutatedChildMapSize = 1 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j >= mutatedChildMapSize; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate collision inlined container, 1 level", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + ck := Uint64Value(0) + cv := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + digests := []Digest{ + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues[childKey] = childNewValue + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate collision inlined container, 4 levels", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + ck := Uint64Value(0) + cv := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + digests := []Digest{ + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues[childKey] = childNewValue + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate inlined container", func(t *testing.T) { + const ( + mapSize = 15 + valueStringSize = 16 + ) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + i := uint64(0) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := NewStringValue(randStr(r, valueStringSize)) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + sortedKeys[i] = k + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i = uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + newChildMapKey := Uint64Value(1) // Previous key is 0 + newChildMapValue := NewStringValue(randStr(r, valueStringSize)) + + existingStorable, err := childMap.Set(compare, hashInputProvider, newChildMapKey, newChildMapValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(2), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[newChildMapKey] = newChildMapValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Iterate and mutate child map (removing elements) + i = uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(2), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + // Remove key 0 + ck := Uint64Value(0) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, ck) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + delete(expectedChildMapValues, ck) + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("uninline inlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 1 + mutatedChildMapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("uninline inlined container, root is metadata slab, merge slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 5 + mutatedChildMapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("inline uninlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 35 + mutatedChildMapSize = 1 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j > mutatedChildMapSize-1; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("inline uninlined container, root is data slab, split slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 35 + mutatedChildMapSize = 10 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateKeys(compare, hashInputProvider, func(k Value) (resume bool, err error) { + valueEqual(t, sortedKeys[i], k) + + v, err := m.Get(compare, hashInputProvider, k) + require.NoError(t, err) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j > mutatedChildMapSize-1; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) +} + +func TestMutableMapIterateValues(t *testing.T) { + + t.Run("empty", func(t *testing.T) { + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + i := 0 + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + i++ + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, 0, i) + + testMap(t, storage, typeInfo, address, m, mapValue{}, nil, false) + }) + + t.Run("mutate primitive values, root is data slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.IterateValues(compare, hashInputProvider, func(v Value) (bool, error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) * 2 + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, no slab operation", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + sortedKeys[i] = k + keyValues[k] = v + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.IterateValues(compare, hashInputProvider, func(v Value) (bool, error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) * 2 + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is data slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 15 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + r := 'a' + err = m.IterateValues(compare, hashInputProvider, func(v Value) (bool, error) { + k := sortedKeys[i] + + valueEqual(t, sortedKeys[i], k) + valueEqual(t, keyValues[k], v) + + newValue := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, split slab", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 25 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + r := 'a' + err = m.IterateValues(compare, hashInputProvider, func(v Value) (bool, error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + newValue := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate primitive values, root is metadata slab, merge slabs", func(t *testing.T) { + SetThreshold(256) + defer SetThreshold(1024) + + const mapSize = 10 + + typeInfo := testTypeInfo{42} + storage := newTestPersistentStorage(t) + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + r := 'a' + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := NewStringValue(strings.Repeat(string(r), 25)) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + r++ + keyValues[k] = v + sortedKeys[i] = k + } + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + i := 0 + err = m.IterateValues(compare, hashInputProvider, func(v Value) (bool, error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + newValue := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, v, existingValue) + + keyValues[k] = newValue + + i++ + r++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, mapSize, i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate collision primitive values, 1 level", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i * 2) + + digests := []Digest{ + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) / 2 + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, keyValues[k], existingValue) + + i++ + keyValues[k] = newValue + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate collision primitive values, 4 levels", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + k := Uint64Value(i) + v := Uint64Value(i * 2) + + digests := []Digest{ + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err := m.Set(compare, hashInputProvider, k, v) + require.NoError(t, err) + require.Nil(t, existingStorable) + + keyValues[k] = v + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + newValue := v.(Uint64Value) / 2 + existingStorable, err := m.Set(compare, hashInputProvider, k, newValue) + require.NoError(t, err) + + existingValue, err := existingStorable.StoredValue(storage) + require.NoError(t, err) + require.Equal(t, keyValues[k], existingValue) + + i++ + keyValues[k] = newValue + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate inlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := Uint64Value(i) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (updating elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[childKey] = childNewValue + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := Uint64Value(i) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (updating elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[childKey] = childNewValue + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is data slab, split slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 1 + mutatedChildMapSize = 5 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, split slab", func(t *testing.T) { + const ( + mapSize = 35 + childMapSize = 1 + mutatedChildMapSize = 5 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate inlined container, root is metadata slab, merge slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 10 + mutatedChildMapSize = 1 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.False(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize - 1; j >= mutatedChildMapSize; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("mutate collision inlined container, 1 level", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + ck := Uint64Value(0) + cv := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + digests := []Digest{ + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues[childKey] = childNewValue + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate collision inlined container, 4 levels", func(t *testing.T) { + const ( + mapSize = 1024 + ) + + r := newRand(t) + + digesterBuilder := &mockDigesterBuilder{} + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + ck := Uint64Value(0) + cv := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + + digests := []Digest{ + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + Digest(r.Intn(256)), + } + digesterBuilder.On("Digest", k).Return(mockDigester{digests}) + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + sortedKeys[i] = k + } + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate key value pairs + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + childKey := Uint64Value(0) + childNewValue := Uint64Value(i) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childNewValue) + require.NoError(t, err) + require.NotNil(t, existingStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues[childKey] = childNewValue + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, i, uint64(mapSize)) + + testMap(t, storage, typeInfo, address, m, keyValues, sortedKeys, false) + }) + + t.Run("mutate inlined container", func(t *testing.T) { + const ( + mapSize = 15 + valueStringSize = 16 + ) + + r := newRand(t) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + i := uint64(0) + for i := 0; i < mapSize; i++ { + ck := Uint64Value(0) + cv := NewStringValue(randStr(r, valueStringSize)) + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + k := Uint64Value(i) + sortedKeys[i] = k + + existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = mapValue{ck: cv} + } + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i = uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + newChildMapKey := Uint64Value(1) // Previous key is 0 + newChildMapValue := NewStringValue(randStr(r, valueStringSize)) + + existingStorable, err := childMap.Set(compare, hashInputProvider, newChildMapKey, newChildMapValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(2), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + expectedChildMapValues[newChildMapKey] = newChildMapValue + + i++ + + return true, nil + }) + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Iterate and mutate child map (removing elements) + i = uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(2), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + // Remove key 0 + ck := Uint64Value(0) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, ck) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + require.Equal(t, uint64(1), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + delete(expectedChildMapValues, ck) + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("uninline inlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 1 + mutatedChildMapSize = 35 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) - t.Run("mutation", func(t *testing.T) { - const ( - mapSize = 15 - valueStringSize = 16 - ) + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) - r := newRand(t) + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] - elementSize := digestSize + singleElementPrefixSize + Uint64Value(0).ByteSize() + NewStringValue(randStr(r, valueStringSize)).ByteSize() + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) + + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) + + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("uninline inlined container, root is metadata slab, merge slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 5 + mutatedChildMapSize = 35 + ) typeInfo := testTypeInfo{42} address := Address{1, 2, 3, 4, 5, 6, 7, 8} @@ -1323,109 +5811,271 @@ func TestMapIterate(t *testing.T) { require.NoError(t, err) keyValues := make(map[Value]Value, mapSize) - sortedKeys := make([]Value, 0, mapSize) - i := uint64(0) + sortedKeys := make([]Value, mapSize) for i := 0; i < mapSize; i++ { - ck := Uint64Value(0) - cv := NewStringValue(randStr(r, valueStringSize)) childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) require.NoError(t, err) - existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) - require.NoError(t, err) - require.Nil(t, existingStorable) + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } k := Uint64Value(i) - sortedKeys = append(sortedKeys, k) - existingStorable, err = m.Set(compare, hashInputProvider, k, childMap) + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) require.NoError(t, err) require.Nil(t, existingStorable) - - require.Equal(t, uint64(1), childMap.Count()) + require.Equal(t, uint64(childMapSize), childMap.Count()) require.True(t, childMap.Inlined()) - keyValues[k] = mapValue{ck: cv} + keyValues[k] = childMapValues + sortedKeys[i] = k } + require.Equal(t, uint64(mapSize), m.Count()) - require.True(t, m.root.IsData()) + require.False(t, m.root.IsData()) testMap(t, storage, typeInfo, address, m, keyValues, nil, false) // Sort keys by digest sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) - sizeBeforeMutation := m.root.Header().size - // Iterate and mutate child map (inserting elements) - i = uint64(0) - err = m.Iterate(compare, hashInputProvider, func(k Value, v Value) (resume bool, err error) { + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) childMap, ok := v.(*OrderedMap) require.True(t, ok) - require.Equal(t, uint64(1), childMap.Count()) + require.Equal(t, uint64(childMapSize), childMap.Count()) require.True(t, childMap.Inlined()) - newChildMapKey := Uint64Value(1) // Previous key is 0 - newChildMapValue := NewStringValue(randStr(r, valueStringSize)) - - existingStorable, err := childMap.Set(compare, hashInputProvider, newChildMapKey, newChildMapValue) - require.NoError(t, err) - require.Nil(t, existingStorable) - expectedChildMapValues, ok := keyValues[k].(mapValue) require.True(t, ok) - expectedChildMapValues[newChildMapKey] = newChildMapValue + for j := childMapSize; j < mutatedChildMapSize; j++ { + childKey := Uint64Value(j) + childValue := Uint64Value(j) - valueEqual(t, sortedKeys[i], k) - valueEqual(t, keyValues[k], v) - i++ + existingStorable, err := childMap.Set(compare, hashInputProvider, childKey, childValue) + require.NoError(t, err) + require.Nil(t, existingStorable) - require.Equal(t, m.root.Header().size, sizeBeforeMutation+uint32(i)*elementSize) + expectedChildMapValues[childKey] = childValue + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + i++ return true, nil }) require.NoError(t, err) require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("inline uninlined container, root is data slab, no slab operation", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 35 + mutatedChildMapSize = 1 + ) - sizeAfterInsertionMutation := m.root.Header().size + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() - // Iterate and mutate child map (removing elements) - i = uint64(0) + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + childMap, ok := v.(*OrderedMap) require.True(t, ok) - require.Equal(t, uint64(2), childMap.Count()) - require.True(t, childMap.Inlined()) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) - // Remove key 0 - ck := Uint64Value(0) + expectedChildMapValues, ok := keyValues[k].(mapValue) + require.True(t, ok) - existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, ck) - require.NoError(t, err) - require.NotNil(t, existingKeyStorable) - require.NotNil(t, existingValueStorable) + for j := childMapSize - 1; j > mutatedChildMapSize-1; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) i++ - require.Equal(t, m.root.Header().size, sizeAfterInsertionMutation-uint32(i)*elementSize) return true, nil }) require.NoError(t, err) require.Equal(t, uint64(mapSize), i) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + }) + + t.Run("inline uninlined container, root is data slab, split slab", func(t *testing.T) { + const ( + mapSize = 15 + childMapSize = 35 + mutatedChildMapSize = 10 + ) + + typeInfo := testTypeInfo{42} + address := Address{1, 2, 3, 4, 5, 6, 7, 8} + storage := newTestPersistentStorage(t) + digesterBuilder := newBasicDigesterBuilder() + + m, err := NewMap(storage, address, digesterBuilder, typeInfo) + require.NoError(t, err) + + keyValues := make(map[Value]Value, mapSize) + sortedKeys := make([]Value, mapSize) + for i := 0; i < mapSize; i++ { + + childMap, err := NewMap(storage, address, newBasicDigesterBuilder(), typeInfo) + require.NoError(t, err) + + childMapValues := make(mapValue) + for j := 0; j < childMapSize; j++ { + ck := Uint64Value(j) + cv := Uint64Value(j) + + existingStorable, err := childMap.Set(compare, hashInputProvider, ck, cv) + require.NoError(t, err) + require.Nil(t, existingStorable) + + childMapValues[ck] = cv + } + + k := Uint64Value(i) + + existingStorable, err := m.Set(compare, hashInputProvider, k, childMap) + require.NoError(t, err) + require.Nil(t, existingStorable) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) + + keyValues[k] = childMapValues + sortedKeys[i] = k + } + + require.Equal(t, uint64(mapSize), m.Count()) + require.True(t, m.root.IsData()) + + testMap(t, storage, typeInfo, address, m, keyValues, nil, false) + + // Sort keys by digest + sort.Stable(keysByDigest{sortedKeys, digesterBuilder}) + + // Iterate and mutate child map (inserting elements) + i := uint64(0) + err = m.IterateValues(compare, hashInputProvider, func(v Value) (resume bool, err error) { + k := sortedKeys[i] + + valueEqual(t, keyValues[k], v) + + childMap, ok := v.(*OrderedMap) + require.True(t, ok) + require.Equal(t, uint64(childMapSize), childMap.Count()) + require.False(t, childMap.Inlined()) - for k := range keyValues { expectedChildMapValues, ok := keyValues[k].(mapValue) require.True(t, ok) - delete(expectedChildMapValues, Uint64Value(0)) - } + for j := childMapSize - 1; j > mutatedChildMapSize-1; j-- { + childKey := Uint64Value(j) + + existingKeyStorable, existingValueStorable, err := childMap.Remove(compare, hashInputProvider, childKey) + require.NoError(t, err) + require.NotNil(t, existingKeyStorable) + require.NotNil(t, existingValueStorable) + + delete(expectedChildMapValues, childKey) + } + + require.Equal(t, uint64(mutatedChildMapSize), childMap.Count()) + require.True(t, childMap.Inlined()) + + i++ + + return true, nil + }) + + require.NoError(t, err) + require.Equal(t, uint64(mapSize), i) + require.False(t, m.root.IsData()) testMap(t, storage, typeInfo, address, m, keyValues, nil, false) }) @@ -8075,7 +12725,7 @@ func TestEmptyMap(t *testing.T) { require.Nil(t, existingMapValueStorable) }) - t.Run("iterate", func(t *testing.T) { + t.Run("readonly iterate", func(t *testing.T) { i := 0 err := m.IterateReadOnly(func(k Value, v Value) (bool, error) { i++ @@ -8085,6 +12735,16 @@ func TestEmptyMap(t *testing.T) { require.Equal(t, 0, i) }) + t.Run("iterate", func(t *testing.T) { + i := 0 + err := m.Iterate(compare, hashInputProvider, func(k Value, v Value) (bool, error) { + i++ + return true, nil + }) + require.NoError(t, err) + require.Equal(t, 0, i) + }) + t.Run("count", func(t *testing.T) { count := m.Count() require.Equal(t, uint64(0), count)