Skip to content

Commit

Permalink
feat: tx to/from address options (#2012)
Browse files Browse the repository at this point in the history
  • Loading branch information
He1DAr authored Jun 20, 2024
1 parent ae78773 commit 542973c
Show file tree
Hide file tree
Showing 4 changed files with 186 additions and 33 deletions.
12 changes: 12 additions & 0 deletions docs/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,18 @@ paths:
items:
type: string
enum: [coinbase, token_transfer, smart_contract, contract_call, poison_microblock, tenure_change]
- name: from_address
in: query
description: Option to filter results by sender address
required: false
schema:
type: string
- name: to_address
in: query
description: Option to filter results by recipient address
required: false
schema:
type: string
- name: sort_by
in: query
description: Option to sort results by block height, timestamp, or fee
Expand Down
24 changes: 24 additions & 0 deletions src/api/routes/tx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,28 @@ export function createTxRouter(db: PgStore): express.Router {
}
}

let fromAddress: string | undefined;
if (typeof req.query.from_address === 'string') {
if (!isValidC32Address(req.query.from_address)) {
throw new InvalidRequestError(
`Invalid query parameter for "from_address": "${req.query.from_address}" is not a valid STX address`,
InvalidRequestErrorType.invalid_param
);
}
fromAddress = req.query.from_address;
}

let toAddress: string | undefined;
if (typeof req.query.to_address === 'string') {
if (!isValidPrincipal(req.query.to_address)) {
throw new InvalidRequestError(
`Invalid query parameter for "to_address": "${req.query.to_address}" is not a valid STX address`,
InvalidRequestErrorType.invalid_param
);
}
toAddress = req.query.to_address;
}

let sortBy: 'block_height' | 'burn_block_time' | 'fee' | undefined;
if (req.query.sort_by) {
if (
Expand All @@ -100,6 +122,8 @@ export function createTxRouter(db: PgStore): express.Router {
limit,
txTypeFilter,
includeUnanchored,
fromAddress,
toAddress,
order,
sortBy,
});
Expand Down
75 changes: 42 additions & 33 deletions src/datastore/pg-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1416,18 +1416,20 @@ export class PgStore extends BasePgStore {
offset,
txTypeFilter,
includeUnanchored,
fromAddress,
toAddress,
order,
sortBy,
}: {
limit: number;
offset: number;
txTypeFilter: TransactionType[];
includeUnanchored: boolean;
fromAddress?: string;
toAddress?: string;
order?: 'desc' | 'asc';
sortBy?: 'block_height' | 'burn_block_time' | 'fee';
}): Promise<{ results: DbTx[]; total: number }> {
let totalQuery: { count: number }[];
let resultQuery: ContractTxQueryResult[];
return await this.sqlTransaction(async sql => {
const maxHeight = await this.getMaxBlockHeight(sql, { includeUnanchored });
const orderSql = order === 'asc' ? sql`ASC` : sql`DESC`;
Expand All @@ -1448,37 +1450,44 @@ export class PgStore extends BasePgStore {
throw new Error(`Invalid sortBy param: ${sortBy}`);
}

if (txTypeFilter.length === 0) {
totalQuery = await sql<{ count: number }[]>`
SELECT ${includeUnanchored ? sql('tx_count_unanchored') : sql('tx_count')} AS count
FROM chain_tip
`;
resultQuery = await sql<ContractTxQueryResult[]>`
SELECT ${sql(TX_COLUMNS)}, ${abiColumn(sql)}
FROM txs
WHERE canonical = true AND microblock_canonical = true AND block_height <= ${maxHeight}
${orderBySql}
LIMIT ${limit}
OFFSET ${offset}
`;
} else {
const txTypeIds = txTypeFilter.flatMap<number>(t => getTxTypeId(t));
totalQuery = await sql<{ count: number }[]>`
SELECT COUNT(*)::integer
FROM txs
WHERE canonical = true AND microblock_canonical = true
AND type_id IN ${sql(txTypeIds)} AND block_height <= ${maxHeight}
`;
resultQuery = await sql<ContractTxQueryResult[]>`
SELECT ${sql(TX_COLUMNS)}, ${abiColumn(sql)}
FROM txs
WHERE canonical = true AND microblock_canonical = true
AND type_id IN ${sql(txTypeIds)} AND block_height <= ${maxHeight}
${orderBySql}
LIMIT ${limit}
OFFSET ${offset}
`;
}
const txTypeFilterSql =
txTypeFilter.length > 0
? sql`AND type_id IN ${sql(txTypeFilter.flatMap<number>(t => getTxTypeId(t)))}`
: sql``;
const fromAddressFilterSql = fromAddress ? sql`AND sender_address = ${fromAddress}` : sql``;
const toAddressFilterSql = toAddress
? sql`AND token_transfer_recipient_address = ${toAddress}`
: sql``;

const noFilters = txTypeFilter.length === 0 && !fromAddress && !toAddress;

const totalQuery: { count: number }[] = noFilters
? await sql<{ count: number }[]>`
SELECT ${includeUnanchored ? sql('tx_count_unanchored') : sql('tx_count')} AS count
FROM chain_tip
`
: await sql<{ count: number }[]>`
SELECT COUNT(*)::integer AS count
FROM txs
WHERE canonical = true AND microblock_canonical = true AND block_height <= ${maxHeight}
${txTypeFilterSql}
${fromAddressFilterSql}
${toAddressFilterSql}
`;

const resultQuery: ContractTxQueryResult[] = await sql<ContractTxQueryResult[]>`
SELECT ${sql(TX_COLUMNS)}, ${abiColumn(sql)}
FROM txs
WHERE canonical = true AND microblock_canonical = true AND block_height <= ${maxHeight}
${txTypeFilterSql}
${fromAddressFilterSql}
${toAddressFilterSql}
${orderBySql}
LIMIT ${limit}
OFFSET ${offset}
`;

const parsed = resultQuery.map(r => parseTxQueryResult(r));
return { results: parsed, total: totalQuery[0].count };
});
Expand Down
108 changes: 108 additions & 0 deletions src/tests/tx-tests.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2090,6 +2090,114 @@ describe('tx tests', () => {
);
});

test('tx list - filter by to/from address', async () => {
const fromAddress = 'ST1HB1T8WRNBYB0Y3T7WXZS38NKKPTBR3EG9EPJKR';
const toAddress = 'STRYYQQ9M8KAF4NS7WNZQYY59X93XEKR31JP64CP';
const differentAddress = 'STF9B75ADQAVXQHNEQ6KGHXTG7JP305J2GRWF3A2';

const block1 = new TestBlockBuilder({ block_height: 1, index_block_hash: '0x01' })
.addTx({
tx_id: '0x0001',
sender_address: fromAddress,
token_transfer_recipient_address: toAddress,
})
.build();
await db.update(block1);

const block2 = new TestBlockBuilder({
block_height: 2,
index_block_hash: '0x02',
parent_block_hash: block1.block.block_hash,
parent_index_block_hash: block1.block.index_block_hash,
})
.addTx({
tx_id: '0x0002',
sender_address: fromAddress,
token_transfer_recipient_address: toAddress,
})
.addTx({
tx_id: '0x0003',
sender_address: fromAddress,
token_transfer_recipient_address: toAddress,
})
.addTx({
tx_id: '0x0004',
sender_address: fromAddress,
token_transfer_recipient_address: differentAddress,
})
.addTx({
tx_id: '0x0005',
sender_address: differentAddress,
token_transfer_recipient_address: toAddress,
})
.build();
await db.update(block2);

const txsReqFrom = await supertest(api.server).get(
`/extended/v1/tx?from_address=${fromAddress}`
);
expect(txsReqFrom.status).toBe(200);
expect(txsReqFrom.body).toEqual(
expect.objectContaining({
results: [
expect.objectContaining({
tx_id: block2.txs[2].tx.tx_id,
}),
expect.objectContaining({
tx_id: block2.txs[1].tx.tx_id,
}),
expect.objectContaining({
tx_id: block2.txs[0].tx.tx_id,
}),
expect.objectContaining({
tx_id: block1.txs[0].tx.tx_id,
}),
],
})
);

const txsReqTo = await supertest(api.server).get(`/extended/v1/tx?to_address=${toAddress}`);
expect(txsReqTo.status).toBe(200);
expect(txsReqTo.body).toEqual(
expect.objectContaining({
results: [
expect.objectContaining({
tx_id: block2.txs[3].tx.tx_id,
}),
expect.objectContaining({
tx_id: block2.txs[1].tx.tx_id,
}),
expect.objectContaining({
tx_id: block2.txs[0].tx.tx_id,
}),
expect.objectContaining({
tx_id: block1.txs[0].tx.tx_id,
}),
],
})
);

const txsReqFromTo = await supertest(api.server).get(
`/extended/v1/tx?from_address=${fromAddress}&to_address=${toAddress}`
);
expect(txsReqFromTo.status).toBe(200);
expect(txsReqFromTo.body).toEqual(
expect.objectContaining({
results: [
expect.objectContaining({
tx_id: block2.txs[1].tx.tx_id,
}),
expect.objectContaining({
tx_id: block2.txs[0].tx.tx_id,
}),
expect.objectContaining({
tx_id: block1.txs[0].tx.tx_id,
}),
],
})
);
});

test('fetch raw tx', async () => {
const block: DbBlock = {
block_hash: '0x1234',
Expand Down

0 comments on commit 542973c

Please sign in to comment.