From fb9dd353929c35e95182fb0f241afa813cdecbc9 Mon Sep 17 00:00:00 2001
From: shuo-young <2857043555@qq.com>
Date: Tue, 30 Jan 2024 09:55:16 +0800
Subject: [PATCH] add workflow to publish package
---
.github/workflows/publish-docker-image.yml | 52 +++
README.md | 159 +++++--
gigahorse-toolchain/.gitignore | 2 +-
.../souffle-addon/keccak256.cpp | 74 ++++
.../souffle-addon/keccak256_test.cpp | 60 +++
gigahorse-toolchain/souffle-addon/lists.cpp | 92 ++++
.../souffle-addon/lists_test.cpp | 7 +
.../souffle-addon/mappings.cpp | 414 ++++++++++++++++++
.../souffle-addon/mappings_test.cpp | 261 +++++++++++
gigahorse-toolchain/souffle-addon/num256.cpp | 331 ++++++++++++++
.../souffle-addon/num256_test.cpp | 303 +++++++++++++
logo.png | Bin 0 -> 486837 bytes
12 files changed, 1725 insertions(+), 30 deletions(-)
create mode 100644 .github/workflows/publish-docker-image.yml
create mode 100644 gigahorse-toolchain/souffle-addon/keccak256.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/keccak256_test.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/lists.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/lists_test.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/mappings.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/mappings_test.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/num256.cpp
create mode 100644 gigahorse-toolchain/souffle-addon/num256_test.cpp
create mode 100644 logo.png
diff --git a/.github/workflows/publish-docker-image.yml b/.github/workflows/publish-docker-image.yml
new file mode 100644
index 0000000..4ecb289
--- /dev/null
+++ b/.github/workflows/publish-docker-image.yml
@@ -0,0 +1,52 @@
+name: Create and publish the lydia docker image
+
+# Configures this workflow to run every time a change is pushed to the branch called `release`.
+on:
+ push:
+ branches:
+ - "v*"
+ - "latest"
+ tags:
+ - "v*"
+
+# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
+env:
+ REGISTRY: ghcr.io
+ IMAGE_NAME: ${{ github.repository }}
+
+# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
+jobs:
+ build-and-push-image:
+ runs-on: ubuntu-latest
+ # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
+ permissions:
+ contents: read
+ packages: write
+ #
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
+ - name: Log in to the Container registry
+ uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
+ with:
+ registry: ${{ env.REGISTRY }}
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
+ with:
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
+ # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
+ # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
+ # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
+ - name: Build and push Docker image
+ id: build-and-push
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ push: true
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
diff --git a/README.md b/README.md
index a756ae2..98e762e 100644
--- a/README.md
+++ b/README.md
@@ -1,46 +1,147 @@
-# Lydia
-
-![Static Badge](https://img.shields.io/badge/license-apache-blue)
-![Static Badge](https://img.shields.io/badge/language-rust-red)
-
-An Attacker Contract Identification Tool Implemented in Rust based on BlockWatchdog.
-
-## Quick Start
-
-### Rust Environment
-
-Before running Lydia, you need to have the Rust environment set up.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Table of Contents
+
+ -
+ About The Project
+
+ -
+ Prerequisites
+
+
+ -
+ Usage
+
+
+
+ - Features
+
+ - Contact
+ - License
+
+
+
+
+
+
+## About The Project
+
+An Attacker Contract Identification Tool Implemented in Rust based on [BlockWatchdog](https://github.com/shuo-young/BlockWatchdog).
+
+
+
+
+## Prerequisites
+
+- rust toolchain
+
+ ```bash
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
+ ```
+
+- gigahorse-toolchain
+
+ Lydia requires Gigahorse to be set up for analyzing EVM bytecode. To set up Gigahorse, refer to its [repository](https://github.com/nevillegrech/gigahorse-toolchain).
+
+
+
+## Install
+
+1. Rust build locally.
+
+```sh
+cargo build --release
+```
-### Gigahorse Environment
+2. Or you can build or pull the docker image.
-Lydia requires Gigahorse to be set up for analyzing Ethereum bytecode. To set up Gigahorse, refer to its [repository](https://github.com/nevillegrech/gigahorse-toolchain)
+```sh
+docker build -t lydia:v0.1 .
+docker pull ghcr.io/shuo-young/lydia:latest
+```
-## Run
+## Usage
### Local
-To run Lydia locally, use the following command:
-
-```shell
+```sh
RUST_LOG=info cargo run -- ETH 0x10C509AA9ab291C76c45414e7CdBd375e1D5AcE8
+# or use build bin
+./target/release/lydia -- ETH 0x10C509AA9ab291C76c45414e7CdBd375e1D5AcE8
```
-Replace the address of the contract you want to analyze. Contracts on other platforms (e.g., BSC) are also supported.
-
### Docker
-To build and run Lydia using Docker, use the following commands:
+For the docker image, run with the following command.
-```shell
-docker build -t lydia:v1.0 .
+```sh
+docker run ghcr.io/shuo-young/lydia:latest ETH 0x10C509AA9ab291C76c45414e7CdBd375e1D5AcE8
```
-Run with the following command:
+## Features
-```shell
-docker run lydia:v1.0 ETH 0x10C509AA9ab291C76c45414e7CdBd375e1D5AcE8
-```
+> more faster for identifying attackers and contracts with malicious intentions
+
+- Recover all possible call chains in attacker contract (each public function).
+- Report critical attack semantic, e.g., call in hook functions, selfdestruct, use randomnumer, creation (sole and multi) etc.
+- Locating call sites that could perform reentrancy and possible reentrancy targets.
+
+## Contact
+
+👤 **Shuo Yang**
+
+- Website: [shuo-young.github.io](https://shuo-young.github.io/)
+- Twitter: [@shall_yangshuo](https://twitter.com/shall_yangshuo)
+- Github: [@shuo-young](https://github.com/shuo-young)
-## Publication
+## License
-Based on _*BlockWatchdog*_, the ICSE'24 paper: Uncover the Premeditated Attacks: Detecting Exploitable Reentrancy Vulnerabilities by Identifying Attacker Contracts.
+Copyright © 2024 [Shuo Yang](https://github.com/shuo-young).
+This project is [Apache](https://github.com/NFTDefects/nftdefects/blob/master/LICENSE) licensed.
diff --git a/gigahorse-toolchain/.gitignore b/gigahorse-toolchain/.gitignore
index 2dc1d38..508800a 100644
--- a/gigahorse-toolchain/.gitignore
+++ b/gigahorse-toolchain/.gitignore
@@ -52,7 +52,7 @@ results.json
*_compiled
*_compiled
*.ccerr
-*.cpp
+# *.cpp
# testing files
.tests/**
diff --git a/gigahorse-toolchain/souffle-addon/keccak256.cpp b/gigahorse-toolchain/souffle-addon/keccak256.cpp
new file mode 100644
index 0000000..035064f
--- /dev/null
+++ b/gigahorse-toolchain/souffle-addon/keccak256.cpp
@@ -0,0 +1,74 @@
+#include
+#include
+
+using namespace boost::multiprecision;
+using namespace std;
+
+char num_to_hex(char num) {
+ return num < 10 ? num + '0' : (num - 10) + 'a';
+}
+
+char hex_to_num(char hex_char) {
+ return hex_char <= '9' ? hex_char - '0' : (hex_char - 'a') + 10;
+}
+
+extern "C"
+{
+ #include "keccak/KeccakHash.h"
+ const char* keccak_256(const char* input) {
+ thread_local static char out_str[67] = {"0x"};
+ thread_local static char out[32] = {0};
+
+ Keccak_HashInstance hi;
+ Keccak_HashInitialize(&hi, 1088, 512, 256, 0x01);
+ Keccak_HashUpdate(&hi, (const unsigned char*)input, strlen(input) * 8);
+ Keccak_HashFinal(&hi, (unsigned char*)out);
+
+ for (int i = 0; i < 32; ++i) {
+ unsigned char c = out[i];
+ out_str[2 + 2*i] = num_to_hex(c >> 4);
+ out_str[2 + 2*i + 1] = num_to_hex(c & 0x0f);
+ }
+
+ return out_str;
+ }
+
+ const char* hex_keccak_256(const char* input) {
+ thread_local static char out_str[67] = {"0x"};
+ thread_local static char out[32] = {0};
+
+ const size_t input_len = strlen(input);
+ const size_t input_byte_len = input_len/2 - 1;
+
+ char* input_bytes = (char*) malloc(sizeof(char) * input_byte_len);
+
+ for (size_t i = 0; i < input_byte_len; ++i)
+ input_bytes[i] = (hex_to_num(input[2 + 2*i]) << 4) + hex_to_num(input[2 + 2*i + 1]);
+
+ Keccak_HashInstance hi;
+ Keccak_HashInitialize(&hi, 1088, 512, 256, 0x01);
+ Keccak_HashUpdate(&hi, (const unsigned char*)input_bytes, input_byte_len * 8);
+ Keccak_HashFinal(&hi, (unsigned char*)out);
+
+ free(input_bytes);
+
+ for (int i = 0; i < 32; ++i) {
+ unsigned char c = out[i];
+ out_str[2 + 2*i] = num_to_hex(c >> 4);
+ out_str[2 + 2*i + 1] = num_to_hex(c & 0x0f);
+ }
+
+ return out_str;
+ }
+
+ const char* hex_to_str(const char* input) {
+ thread_local static char* out = (char*) malloc(sizeof(char) * (strlen(input)/2));
+
+ for (int i = 1; i < strlen(input)/2; i++){
+ out[i - 1] = hex_to_num(input[2*i])*16 + hex_to_num(input[2*i + 1]);
+ }
+ out[strlen(input)/2 - 1] = '\0';
+
+ return out;
+ }
+}
diff --git a/gigahorse-toolchain/souffle-addon/keccak256_test.cpp b/gigahorse-toolchain/souffle-addon/keccak256_test.cpp
new file mode 100644
index 0000000..5bb00b9
--- /dev/null
+++ b/gigahorse-toolchain/souffle-addon/keccak256_test.cpp
@@ -0,0 +1,60 @@
+#define BOOST_TEST_MODULE Keccak Tests
+#include
+
+#include "keccak256.cpp"
+
+BOOST_AUTO_TEST_CASE(test_hash_empty) {
+ BOOST_TEST(
+ keccak_256("")
+ ==
+ "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"
+ );
+}
+
+BOOST_AUTO_TEST_CASE(test_hash_simple) {
+ BOOST_TEST(
+ keccak_256("hi")
+ ==
+ "0x7624778dedc75f8b322b9fa1632a610d40b85e106c7d9bf0e743a9ce291b9c6f"
+ );
+}
+
+BOOST_AUTO_TEST_CASE(test_hash_signature) {
+ BOOST_TEST(
+ keccak_256("transfer(address,uint256)")
+ ==
+ "0xa9059cbb2ab09eb219583f4a59a5d0623ade346d962bcd4e46b11da047c9049b"
+ );
+}
+
+BOOST_AUTO_TEST_CASE(test_hex_to_str) {
+ BOOST_TEST(
+ hex_to_str("0x72656365697665417070726f76616c28616464726573732c75696e743235362c616464726573732c627974657329")
+ ==
+ "receiveApproval(address,uint256,address,bytes)"
+ );
+}
+
+BOOST_AUTO_TEST_CASE(test_hash_hex_to_str) {
+ BOOST_TEST(
+ keccak_256(hex_to_str("0x7472616e7366657228616464726573732c75696e7432353629"))
+ ==
+ "0xa9059cbb2ab09eb219583f4a59a5d0623ade346d962bcd4e46b11da047c9049b"
+ );
+}
+
+BOOST_AUTO_TEST_CASE(test_hash_hex_keccak_256_single_byte) {
+ BOOST_TEST(
+ hex_keccak_256("0x61")
+ ==
+ "0x3ac225168df54212a25c1c01fd35bebfea408fdac2e31ddd6f80a4bbf9a5f1cb"
+ );
+}
+
+BOOST_AUTO_TEST_CASE(test_hash_hex_keccak_256_two_bytes) {
+ BOOST_TEST(
+ hex_keccak_256("0x6162")
+ ==
+ "0x67fad3bfa1e0321bd021ca805ce14876e50acac8ca8532eda8cbf924da565160"
+ );
+}
diff --git a/gigahorse-toolchain/souffle-addon/lists.cpp b/gigahorse-toolchain/souffle-addon/lists.cpp
new file mode 100644
index 0000000..53671e6
--- /dev/null
+++ b/gigahorse-toolchain/souffle-addon/lists.cpp
@@ -0,0 +1,92 @@
+// A data structure and API for maintaining and manipulating maps of
+// dependencies. These come from a Datalog program analysis and are
+// treated opaquely. They are supposed to be mappings of variables to
+// expressions.
+
+#include "souffle/RecordTable.h"
+#include "souffle/SymbolTable.h"
+#include
+#include
+// #include
+// #include // only debugging
+
+#include
+
+
+#include
+using namespace std;
+
+extern "C" {
+
+ souffle::RamDomain list_concat(
+ souffle::SymbolTable* symbolTable, souffle::RecordTable* recordTable,
+ souffle::RamDomain arg1, souffle::RamDomain arg2) {
+ assert(symbolTable && "NULL symbol table");
+ assert(recordTable && "NULL record table");
+
+ if(arg1 == 0)
+ return arg2;
+
+ if(arg2 == 0)
+ return arg1;
+
+ const souffle::RamDomain* myTuple1 = recordTable->unpack(arg1, 2);
+ const souffle::RamDomain* myTuple2 = recordTable->unpack(arg2, 2);
+ std::list l = {};
+ // cout << endl;
+ // cout << myTuple1 << endl;
+ // cout << myTuple2 << endl;
+ while (1) {
+ // const std::string& sarg1 = symbolTable->decode(myTuple1[0]);
+ // const std::string& sarg2 = symbolTable->decode(myTuple1[1]);
+ // cout << sarg1 << " " << sarg2 << endl;
+ l.push_front(myTuple1[0]);
+ if (myTuple1[1] == 0)
+ break;
+ myTuple1 = recordTable->unpack(myTuple1[1], 2);
+ }
+
+ souffle::RamDomain curr = 0;
+ souffle::RamDomain myTuple3[2] = {myTuple2[0], myTuple2[1]};
+ curr = recordTable->pack(myTuple3, 2);
+ while (l.size() > 0) {
+ souffle::RamDomain myTuple4[2] = {l.front(), curr};
+ // cout << l.front() << " "<< symbolTable->decode(l.front()) << " " << l.size() << endl;
+ l.pop_front();
+ curr = recordTable->pack(myTuple4, 2);
+ }
+ // cout << endl;
+
+ return curr;
+ }
+
+ souffle::RamDomain list_append(
+ souffle::SymbolTable* symbolTable, souffle::RecordTable* recordTable,
+ souffle::RamDomain list, souffle::RamDomain elem) {
+ assert(symbolTable && "NULL symbol table");
+ assert(recordTable && "NULL record table");
+
+ souffle::RamDomain innerMost[2] = {elem, 0};
+
+ if (list == 0)
+ return recordTable->pack(innerMost, 2);
+
+ const souffle::RamDomain* myTuple1 = recordTable->unpack(list, 2);
+ std::list l = {};
+
+ while (1) {
+ l.push_back(myTuple1[0]);
+ if (myTuple1[1] == 0)
+ break;
+ myTuple1 = recordTable->unpack(myTuple1[1], 2);
+ }
+
+ souffle::RamDomain curr = recordTable->pack(innerMost, 2);
+ while (l.size() > 0) {
+ souffle::RamDomain temp[2] = {l.front(), curr};
+ l.pop_front();
+ curr = recordTable->pack(temp, 2);
+ }
+ return curr;
+ }
+}
\ No newline at end of file
diff --git a/gigahorse-toolchain/souffle-addon/lists_test.cpp b/gigahorse-toolchain/souffle-addon/lists_test.cpp
new file mode 100644
index 0000000..4c39e8d
--- /dev/null
+++ b/gigahorse-toolchain/souffle-addon/lists_test.cpp
@@ -0,0 +1,7 @@
+// Just a skeleton for meaningful test cases to be added later
+// No way to test in c++ without running souffle, will come up with datalog tests
+
+#define BOOST_TEST_MODULE Lists Tests
+#include
+
+#include "lists.cpp" // shouldn't include .cpps, so ... sue me
diff --git a/gigahorse-toolchain/souffle-addon/mappings.cpp b/gigahorse-toolchain/souffle-addon/mappings.cpp
new file mode 100644
index 0000000..228da5f
--- /dev/null
+++ b/gigahorse-toolchain/souffle-addon/mappings.cpp
@@ -0,0 +1,414 @@
+// A data structure and API for maintaining and manipulating maps of
+// dependencies. These come from a Datalog program analysis and are
+// treated opaquely. They are supposed to be mappings of variables to
+// expressions.
+
+#include
+#include
+#include
+#include