Skip to content

Commit

Permalink
Merge branch 'develop' into tooling-wallet/improve-fetching-visual-as…
Browse files Browse the repository at this point in the history
…sets
  • Loading branch information
brancoder authored Dec 23, 2024
2 parents 12f00ec + 0ca0076 commit c524070
Show file tree
Hide file tree
Showing 6 changed files with 162 additions and 29 deletions.
10 changes: 8 additions & 2 deletions .github/workflows/apps_wallet_prod_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@ jobs:
steps:
- name: Checking out the repository
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
with:
# Number of commits to fetch. 0 indicates all history for all branches and tags. Default: 1
fetch-depth: 0
# Whether to fetch tags, even if fetch-depth > 0.
fetch-tags: "true"
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
- name: Install Nodejs
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
Expand Down Expand Up @@ -61,8 +66,9 @@ jobs:
echo "No previous tag found. Skipping changelog generation."
echo "changelog=No previous tag found. Changelog generation skipped." >> $GITHUB_OUTPUT
else
echo "## Changelog" >> CHANGELOG.md
git log ${{ env.PREV_TAG }}..${{ env.CURRENT_TAG }} --pretty=format:"- %s in #%h" -- ./apps/wallet > CHANGELOG.md
echo "## Changelog" > CHANGELOG.md
git log ${{ env.PREV_TAG }}..${{ env.CURRENT_TAG }} --pretty=format:"- %s in #%h" -- ./apps/wallet >> CHANGELOG.md
cat CHANGELOG.md
fi
- name: Get version from tag
Expand Down
1 change: 1 addition & 0 deletions apps/core/src/constants/coins.constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@

export const COINS_QUERY_REFETCH_INTERVAL = 20_000;
export const COINS_QUERY_STALE_TIME = 20_000;
export const COIN_TYPE = '0x2::coin::Coin';
35 changes: 19 additions & 16 deletions apps/wallet-dashboard/app/(protected)/assets/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
'use client';

import { Panel, Title, Chip, TitleSize } from '@iota/apps-ui-kit';
import { hasDisplayData, useGetOwnedObjects } from '@iota/core';
import { COIN_TYPE, hasDisplayData, useGetOwnedObjects } from '@iota/core';
import { useCurrentAccount } from '@iota/dapp-kit';
import { IotaObjectData } from '@iota/iota-sdk/client';
import { useState } from 'react';
Expand All @@ -31,25 +31,28 @@ export default function AssetsDashboardPage(): React.JSX.Element {
const account = useCurrentAccount();
const { data, isFetching, fetchNextPage, hasNextPage, refetch } = useGetOwnedObjects(
account?.address,
undefined,
{
MatchNone: [{ StructType: COIN_TYPE }],
},
OBJECTS_PER_REQ,
);

const assets: IotaObjectData[] = [];

for (const page of data?.pages || []) {
for (const asset of page.data) {
if (asset.data && asset.data.objectId) {
if (selectedCategory == AssetCategory.Visual) {
if (hasDisplayData(asset)) {
assets.push(asset.data);
}
} else if (selectedCategory == AssetCategory.Other) {
assets.push(asset.data);
}
const assets = (data?.pages || [])
.flatMap((page) => page.data)
.filter((asset) => {
if (!asset.data || !asset.data.objectId) {
return false;
}
}
}
if (selectedCategory === AssetCategory.Visual) {
return hasDisplayData(asset);
}
if (selectedCategory === AssetCategory.Other) {
return !hasDisplayData(asset);
}
return false;
})
.map((asset) => asset.data)
.filter((data): data is IotaObjectData => data !== null && data !== undefined);

function onAssetClick(asset: IotaObjectData) {
setSelectedAsset(asset);
Expand Down
2 changes: 1 addition & 1 deletion apps/wallet-dashboard/components/tiles/AssetTileLink.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ export function AssetTileLink({ asset, type, onClick }: AssetTileLinkProps): Rea
{type === AssetCategory.Visual ? (
<VisualAssetTile asset={asset} icon={<VisibilityOff />} onClick={handleClick} />
) : (
<NonVisualAssetCard asset={asset} onClick={handleClick} />
<NonVisualAssetCard asset={asset} />
)}
</>
);
Expand Down
24 changes: 14 additions & 10 deletions apps/wallet-dashboard/components/tiles/NonVisualAssetTile.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,25 @@ import { Card, CardAction, CardActionType, CardBody, CardType } from '@iota/apps
import { IotaObjectData } from '@iota/iota-sdk/client';
import { formatAddress, parseStructTag } from '@iota/iota-sdk/utils';
import { ArrowTopRight } from '@iota/ui-icons';
import { ExplorerLink } from '../ExplorerLink';
import { ExplorerLinkType } from '@iota/core';

type NonVisualAssetCardProps = {
asset: IotaObjectData;
} & Pick<React.ComponentProps<typeof Card>, 'onClick'>;
} & React.ComponentProps<typeof Card>;

export function NonVisualAssetCard({ asset, onClick }: NonVisualAssetCardProps): React.JSX.Element {
export function NonVisualAssetCard({ asset }: NonVisualAssetCardProps): React.JSX.Element {
const { address, module, name } = parseStructTag(asset.type!);
return (
<Card type={CardType.Default} isHoverable onClick={onClick}>
<CardBody
title={formatAddress(asset.objectId!)}
subtitle={`${formatAddress(address)}::${module}::${name}`}
isTextTruncated
/>
<CardAction type={CardActionType.Link} icon={<ArrowTopRight />} />
</Card>
<ExplorerLink objectID={asset.objectId} type={ExplorerLinkType.Object}>
<Card type={CardType.Default} isHoverable>
<CardBody
title={formatAddress(asset.objectId!)}
subtitle={`${formatAddress(address)}::${module}::${name}`}
isTextTruncated
/>
<CardAction type={CardActionType.Link} icon={<ArrowTopRight />} />
</Card>
</ExplorerLink>
);
}
119 changes: 119 additions & 0 deletions crates/iota-analytics-indexer/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
# IOTA Analytics Indexer

The IOTA Analytics Indexer is a service that exports data from the main IOTA network to a remote big object store (S3/GCS/Azure) for further analytical processing. It does not perform any analysis on its own.

## **Key Features**

- Exports data from the IOTA network to a remote big object store
- Provides BigQuery and Snowflake schemas for the exported data

> [!NOTE]
> BigQuery and Snowflake are cloud-based data warehousing solutions.
> After getting data there one can analyse it in the cloud using SQL queries.
>
> BigQuery is part of Google Cloud Platform: [https://cloud.google.com/bigquery]
>
> Snowflake isn't part of any large cloud provider: [https://snowflake.com]
## **Relation to iota-indexer**

### iota-indexer

Currently iota-indexer is computing and storing analytical metrics about:

- network statistics (amount of transactions, transactions per second)
- (active) addresses (transactions senders/recipients)
- move calls

Those metrics are computed by a separate analytical worker instance of the indexer, but it uses the main DB as the main indexer instance.

It seems that some of the values stored in main indexer tables by iota-indexer's `fullnode_sync_worker` are only stored there for analytical purposes (move calls, tx recipients) and could potentially be excluded from further processing if it was not for analytical reasons.

### iota-analytics-indexer

The `iota-analytics-indexer` is not computing any analytical metrics directly.
It is only exporting data for further processing via external tools (BigQuery/SnowFlake).

On this premise, the functionality in `iota-indexer` that is currently used for extracting analytics (and thus unrelated to the JSON-RPC/GraphQL service) could be moved out and delegated to another tool that processes data exported by `iota-analytics-indexer`.
Then the sync logic in `iota-indexer` could be simplified as well to store only data that is needed for the purposes of the RPC APIs.

## **Schemas**

The crate provides:

- [BigQuery Schemas](src/store/bq/schemas/)
- [SnowFlake Schemas](src/store/snowflake/schemas/)
- [Rust struct representations](src/tables.rs)

for the data that it is exporting.

The tables covered by the schemas:

- CHECKPOINT
- EVENT
- MOVE_CALL
- OBJECT
- MOVE_PACKAGE
- TRANSACTION_OBJECT - input and output objects for given transactions
- TRANSACTION

> [!NOTE]
> The following rust structs currently do not have DB schemas prepared:
>
> - DynamicFieldEntry
> - WrappedObjectEntry
## **Architecture**

When running the indexer, one needs to specify object type that would be extracted from checkpoints and uploaded to the cloud.

The following object types are supported:

- Checkpoint
- Object
- Transaction
- TransactionObjects
- Event
- MoveCall
- MovePackage
- DynamicField
- WrappedObject

Only one object type can be passed in given run, to process multiple object types it is needed to run multiple analytics indexer instances.

In general, the data flow is as follows:

- Checkpoints are read via JSON RPC using reused code from `iota_data_ingestion_core`.
- Checkpoints are processed by an appropriate handler (e.g. `EventHandler`), which extracts relevant objects from each transaction of the checkpoint.
- Objects are passed to the Writer, which writes the objects to a local temporary store in CSV or Parquet format.
- The `AnalyticsProcessor` syncs the objects from the local store to the remote store (S3/GCS/Azure, or also local, for testing purposes).
- Every 5 minutes the last processed checkpoint ID is fetched from BigQuery/Snowflake and reported as a metric.

> [!NOTE]
> It is assumed that data from the big object store will be readable from BigQuery/Snowflake automatically, the indexer is not putting the data in BigQuery/Snowflake tables explicitly.
Here is a graph summarizing the data flow:

```mermaid
flowchart TD
FNODE["Fullnode/Indexer"] <-->|JSON RPC| CPREADER["`IndexerExecutor/CheckpointReader from the **iota_data_ingestion_core** package`"];
subgraph "`**iota-analytics-indexer**`"
CPREADER -->|"`Executor calls **AnalyticsProcessor** for each checkpoint, which in turn passes the checkpoint to appropriate Handler`"| HANDLER["CheckpointHandler/EventHandler etc., depending on indexer configuration"]
HANDLER -->|"`**AnalyticsProcessor** reads object data extracted from the checkpoint by the Handler and passes it to the Writer`"| WRITER["CSVWriter/ParquetWriter"]
WRITER -->|Writes objects to temporary local storage| DISK[Temporary Local Storage]
DISK --> REMOTESYNC["`Task inside of **AnalyticsProcessor** that removes files from Local Storage and uploads them to Remote Storage(S3/GCS/Azure)`"]
WRITER -->|"`Once every few checkpoints, **AnalyticsProcessor** calls cut() to prepare file to be sent, FileMetadata is sent to the Remote Sync Task which triggers the sync`"| REMOTESYNC
REMOTESYNC -->|Some process outside of analytics indexer makes the newly uploaded data available via BigQuery/Snowflake tables| BQSF["BigQuery/Snowflake"]
BQSF -->|"Every 5 minutes max processed checkpoint number is read from the tables"| METRICS[Analytics Indexer Prometheus Metrics]
end
linkStyle 6 stroke:red,stroke-width:2px,stroke-dasharray:3;
```

## **Metrics**

The following Prometheus metrics are served by `iota-analytics-indexer` to monitor the indexer execution:

- **total_received**: count of checkpoints processed in given run
- **last_uploaded_checkpoint**: id of last checkpoint uploaded to the big object store
- **max_checkpoint_on_store**: id of last checkpoint available via BigQuery/Snowflake tables

0 comments on commit c524070

Please sign in to comment.