Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat:blob decode #113

Merged
merged 1 commit into from
Feb 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion hildr-node/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ jacocoTestCoverageVerification {
violationRules {
rule {
limit {
minimum = 0
minimum = 0.2
}
}
}
Expand Down
27 changes: 26 additions & 1 deletion hildr-utilities/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,10 @@ dependencies {

test {
useJUnitPlatform()
testLogging {
events "passed", "skipped", "failed"
}
finalizedBy jacocoTestReport
}

//checkstyle {
Expand All @@ -130,12 +134,33 @@ jacocoTestCoverageVerification {
violationRules {
rule {
limit {
minimum = 0
minimum = 0.3
}
}
}
}

tasks.named('test') {
// Use JUnit Platform for unit tests.
useJUnitPlatform()
}

check {
dependsOn += jacocoTestCoverageVerification
// dependsOn += integrationTest
}

tasks.withType(Test).configureEach {
def outputDir = reports.junitXml.outputLocation
jvmArgumentProviders << ({
[
"-Djunit.platform.reporting.open.xml.enabled=true",
"-Djunit.platform.reporting.output.dir=${outputDir.get().asFile.absolutePath}",
"--enable-preview"
]
} as CommandLineArgumentProvider)
}

spotless {
// optional: limit format enforcement to just the files changed by this feature branch
// ratchetFrom 'origin/main'
Expand Down
125 changes: 125 additions & 0 deletions hildr-utilities/src/main/java/io/optimism/utilities/BlobCodec.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
package io.optimism.utilities;

import org.apache.commons.lang3.ArrayUtils;

/**
* The BlobCodec class.
*
* @author grapebaba
* @since 0.2.6
*/
public final class BlobCodec {

private static final int BLOB_SIZE = 4096 * 32;
private static final int MAX_BLOB_DATA_SIZE = (4 * 31 + 3) * 1024 - 4;
private static final int ENCODING_VERSION = 0;
private static final int VERSION_OFFSET = 1; // offset of the version byte in the blob encoding
private static final int ROUNDS = 1024;

/**
* Instantiates a new Blob codec.
*/
public BlobCodec() {}

/**
* Decode blob.
*
* @param blob the blob
* @return the byte[]
*/
public static byte[] decode(byte[] blob) {
// check the version
if (blob[VERSION_OFFSET] != ENCODING_VERSION) {
throw new IllegalArgumentException("invalid encoding version: expected version %d, got %d"
.formatted(blob[VERSION_OFFSET], ENCODING_VERSION));
}

// decode the 3-byte big-endian length value into a 4-byte integer
int outputLength = blob[2] << 16 | blob[3] << 8 | blob[4];
if (outputLength > MAX_BLOB_DATA_SIZE) {
throw new IllegalArgumentException("invalid length for blob: output length %d exceeds maximum %d"
.formatted(outputLength, MAX_BLOB_DATA_SIZE));
}

// round 0 is special cased to copy only the remaining 27 bytes of the first field element into
// the output due to version/length encoding already occupying its first 5 bytes.
byte[] output = new byte[MAX_BLOB_DATA_SIZE];
System.arraycopy(blob, 5, output, 0, 27);

// now process remaining 3 field elements to complete round 0
int opos = 28; // current position into output buffer
int ipos = 32; // current position into the input blob

byte[] encoded = new byte[4];
encoded[0] = blob[0];
for (int i = 1; i < 4; i++) {
FieldElementMeta fieldElementMeta = decodeFieldElement(blob, output, opos, ipos);
encoded[i] = fieldElementMeta.first;
opos = fieldElementMeta.opos;
ipos = fieldElementMeta.ipos;
}

opos = reassembleBytes(opos, encoded, output);

// in each remaining round we decode 4 field elements (128 bytes) of the input into 127 bytes
// of output
for (int i = 1; i < ROUNDS && opos < outputLength; i++) {
for (int j = 0; j < 4; j++) {
// save the first byte of each field element for later re-assembly
FieldElementMeta fieldElementMeta = decodeFieldElement(blob, output, opos, ipos);
encoded[j] = fieldElementMeta.first;
opos = fieldElementMeta.opos;
ipos = fieldElementMeta.ipos;
}
opos = reassembleBytes(opos, encoded, output);
}

for (int i = outputLength; i < output.length; i++) {
if (output[i] != 0) {
throw new IllegalArgumentException(
"fe=%d: non-zero data encountered where field element should be empty".formatted(opos / 32));
}
}

output = ArrayUtils.subarray(output, 0, outputLength);
for (; ipos < BLOB_SIZE; ipos++) {
if (blob[ipos] != 0) {
throw new IllegalArgumentException(
"pos=%d: non-zero data encountered where blob should be empty".formatted(ipos));
}
}

return output;
}

/**
* The type Field element meta.
*
* @param first the first
* @param opos the opos
* @param ipos the ipos
*/
public record FieldElementMeta(byte first, int opos, int ipos) {}

private static FieldElementMeta decodeFieldElement(byte[] blob, byte[] output, int opos, int ipos) {
// two highest order bits of the first byte of each field element should always be 0
if ((blob[ipos] & 0b1100_0000) != 0) {
throw new IllegalArgumentException("invalid field element: field element: %d".formatted(ipos));
}

System.arraycopy(blob, ipos + 1, output, opos, 31);
return new FieldElementMeta(blob[ipos], opos + 32, ipos + 32);
}

private static int reassembleBytes(int opos, byte[] encoded, byte[] output) {
opos--;
byte x = (byte) ((encoded[0] & 0b0011_1111) | ((encoded[1] & 0b0011_0000) << 2));
byte y = (byte) ((encoded[1] & 0b0000_1111) | ((encoded[3] & 0b0000_1111) << 4));
byte z = (byte) ((encoded[2] & 0b0011_1111) | ((encoded[3] & 0b0011_0000) << 2));
// put the re-assembled bytes in their appropriate output locations
output[opos - 32] = z;
output[opos - (32 * 2)] = y;
output[opos - (32 * 3)] = x;
return opos;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
package io.optimism.utilities;

import static org.junit.jupiter.api.Assertions.assertEquals;

import com.google.common.base.Charsets;
import com.google.common.io.Resources;
import java.io.IOException;
import java.net.URL;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.web3j.utils.Numeric;

/**
* The type Blob codec test.
*
* @author grapebaba
* @since 0.2.6
*/
class BlobCodecTest {

@Test
@DisplayName("test decode blob.")
void testBlobCodec() throws IOException {
URL url = Resources.getResource("blob1.txt");
String origin = Resources.toString(url, Charsets.UTF_8);

assertEquals(
"this is a test of blob encoding/decoding",
new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin)), Charsets.UTF_8));

URL url1 = Resources.getResource("blob2.txt");
String origin1 = Resources.toString(url1, Charsets.UTF_8);
assertEquals("short", new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin1)), Charsets.UTF_8));

URL url2 = Resources.getResource("blob3.txt");
String origin2 = Resources.toString(url2, Charsets.UTF_8);
assertEquals("\u0000", new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin2)), Charsets.UTF_8));

URL url3 = Resources.getResource("blob4.txt");
String origin3 = Resources.toString(url3, Charsets.UTF_8);
assertEquals(
"\u0000\u0001\u0000",
new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin3)), Charsets.UTF_8));

URL url4 = Resources.getResource("blob5.txt");
String origin4 = Resources.toString(url4, Charsets.UTF_8);
assertEquals(
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000",
new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin4)), Charsets.UTF_8));

URL url5 = Resources.getResource("blob6.txt");
String origin5 = Resources.toString(url5, Charsets.UTF_8);
assertEquals(
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000",
new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin5)), Charsets.UTF_8));

URL url6 = Resources.getResource("blob7.txt");
String origin6 = Resources.toString(url6, Charsets.UTF_8);
assertEquals(
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000",
new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin6)), Charsets.UTF_8));

URL url7 = Resources.getResource("blob8.txt");
String origin7 = Resources.toString(url7, Charsets.UTF_8);
assertEquals("", new String(BlobCodec.decode(Numeric.hexStringToByteArray(origin7)), Charsets.UTF_8));
}
}
1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob1.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob2.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob3.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob4.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob5.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob6.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob7.txt

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions hildr-utilities/src/test/resources/blob8.txt

Large diffs are not rendered by default.

Loading