Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cut v8.2.0 #2143

Merged
merged 7 commits into from
Oct 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/* eslint-disable camelcase */

/** @param { import("node-pg-migrate").MigrationBuilder } pgm */
exports.up = pgm => {

pgm.addColumn('blocks', {
tenure_height: {
type: 'integer',
}
});

pgm.addColumn('blocks', {
signer_signatures: {
type: 'bytea[]',
}
});

pgm.createIndex('blocks', 'signer_signatures', { method: 'gin' });

};
2 changes: 2 additions & 0 deletions src/api/controllers/db-controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -682,6 +682,8 @@ function parseDbBlock(
dbBlock.block_time > 0
? unixEpochToIso(dbBlock.block_time)
: unixEpochToIso(dbBlock.burn_block_time),
// If `tenure_height` is not available, but `signer_bitvec` is set we can safely assume it's same as `block_height` (epoch2.x rules)
tenure_height: dbBlock.tenure_height ?? (dbBlock.signer_bitvec ? -1 : dbBlock.block_height),
index_block_hash: dbBlock.index_block_hash,
parent_block_hash: dbBlock.parent_block_hash,
burn_block_time: dbBlock.burn_block_time,
Expand Down
5 changes: 5 additions & 0 deletions src/api/pagination.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ export enum ResourceType {
Signer,
PoxCycle,
TokenHolders,
BlockSignerSignature,
}

export const pagingQueryLimits: Record<ResourceType, { defaultLimit: number; maxLimit: number }> = {
Expand Down Expand Up @@ -94,6 +95,10 @@ export const pagingQueryLimits: Record<ResourceType, { defaultLimit: number; max
defaultLimit: 100,
maxLimit: 200,
},
[ResourceType.BlockSignerSignature]: {
defaultLimit: 500,
maxLimit: 1000,
},
};

export function getPagingQueryLimit(
Expand Down
59 changes: 57 additions & 2 deletions src/api/routes/v2/blocks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,16 @@ import { Server } from 'node:http';
import { CursorOffsetParam, LimitParam, OffsetParam } from '../../schemas/params';
import { getPagingQueryLimit, pagingQueryLimits, ResourceType } from '../../pagination';
import { PaginatedResponse } from '../../schemas/util';
import { NakamotoBlock, NakamotoBlockSchema } from '../../schemas/entities/block';
import {
NakamotoBlock,
NakamotoBlockSchema,
SignerSignatureSchema,
} from '../../schemas/entities/block';
import { TransactionSchema } from '../../schemas/entities/transactions';
import { BlockListV2ResponseSchema } from '../../schemas/responses/responses';
import {
BlockListV2ResponseSchema,
BlockSignerSignatureResponseSchema,
} from '../../schemas/responses/responses';

export const BlockRoutesV2: FastifyPluginAsync<
Record<never, never>,
Expand Down Expand Up @@ -174,5 +181,53 @@ export const BlockRoutesV2: FastifyPluginAsync<
}
);

fastify.get(
'/:height_or_hash/signer-signatures',
{
preHandler: handleBlockCache,
preValidation: (req, _reply, done) => {
cleanBlockHeightOrHashParam(req.params);
done();
},
schema: {
operationId: 'get_signer_signatures_for_block',
summary: 'Get signer signatures for block',
description: `Retrieves the signer signatures (an array of signature byte strings) in a single block`,
tags: ['Blocks'],
params: BlockParamsSchema,
querystring: Type.Object({
limit: LimitParam(ResourceType.BlockSignerSignature),
offset: OffsetParam(),
}),
response: {
200: BlockSignerSignatureResponseSchema,
},
},
},
async (req, reply) => {
const params = parseBlockParam(req.params.height_or_hash);
const query = req.query;

try {
const { limit, offset, results, total } = await fastify.db.v2.getBlockSignerSignature({
blockId: params,
...query,
});
const response = {
limit,
offset,
total,
results: results,
};
await reply.send(response);
} catch (error) {
if (error instanceof InvalidRequestError) {
throw new NotFoundError('Block not found');
}
throw error;
}
}
);

await Promise.resolve();
};
2 changes: 2 additions & 0 deletions src/api/routes/v2/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ export function parseDbNakamotoBlock(block: DbBlock): NakamotoBlock {
hash: block.block_hash,
block_time: block.block_time,
block_time_iso: unixEpochToIso(block.block_time),
// If `tenure_height` is not available, but `signer_bitvec` is set we can safely assume it's same as `block_height` (epoch2.x rules)
tenure_height: block.tenure_height ?? (block.signer_bitvec ? -1 : block.block_height),
index_block_hash: block.index_block_hash,
parent_block_hash: block.parent_block_hash,
parent_index_block_hash: block.parent_index_block_hash,
Expand Down
8 changes: 8 additions & 0 deletions src/api/routes/v2/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,14 @@ export const TransactionLimitParamSchema = Type.Integer({
description: 'Transactions per page',
});

export const BlockSignerSignatureLimitParamSchema = Type.Integer({
minimum: 1,
maximum: pagingQueryLimits[ResourceType.BlockSignerSignature].maxLimit,
default: pagingQueryLimits[ResourceType.BlockSignerSignature].defaultLimit,
title: 'Block signer signature limit',
description: 'Block signer signatures per page',
});

export const PoxCycleLimitParamSchema = Type.Integer({
minimum: 1,
maximum: pagingQueryLimits[ResourceType.PoxCycle].maxLimit,
Expand Down
10 changes: 10 additions & 0 deletions src/api/schemas/entities/block.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ export const BlockSchema = Type.Object(
block_time_iso: Type.String({
description: 'An ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) indicating when this block was mined.',
}),
tenure_height: Type.Integer({
description: 'The tenure height (AKA coinbase height) of this block',
}),
index_block_hash: Type.String({
description:
'The only hash that can uniquely identify an anchored block or an unconfirmed state trie',
Expand Down Expand Up @@ -93,6 +96,9 @@ export const NakamotoBlockSchema = Type.Object({
block_time_iso: Type.String({
description: 'An ISO 8601 (YYYY-MM-DDTHH:mm:ss.sssZ) indicating when this block was mined.',
}),
tenure_height: Type.Integer({
description: 'The tenure height (AKA coinbase height) of this block',
}),
index_block_hash: Type.String({
description:
'The only hash that can uniquely identify an anchored block or an unconfirmed state trie',
Expand All @@ -116,3 +122,7 @@ export const NakamotoBlockSchema = Type.Object({
execution_cost_write_length: Type.Integer({ description: 'Execution cost write length.' }),
});
export type NakamotoBlock = Static<typeof NakamotoBlockSchema>;

export const SignerSignatureSchema = Type.String({
description: "Array of hex strings representing the block's signer signature",
});
5 changes: 4 additions & 1 deletion src/api/schemas/responses/responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import {
BurnchainRewardSchema,
BurnchainRewardSlotHolderSchema,
} from '../entities/burnchain-rewards';
import { NakamotoBlockSchema } from '../entities/block';
import { NakamotoBlockSchema, SignerSignatureSchema } from '../entities/block';

export const ErrorResponseSchema = Type.Object(
{
Expand Down Expand Up @@ -182,3 +182,6 @@ export type RunFaucetResponse = Static<typeof RunFaucetResponseSchema>;

export const BlockListV2ResponseSchema = PaginatedCursorResponse(NakamotoBlockSchema);
export type BlockListV2Response = Static<typeof BlockListV2ResponseSchema>;

export const BlockSignerSignatureResponseSchema = PaginatedResponse(SignerSignatureSchema);
export type BlockSignerSignatureResponse = Static<typeof BlockSignerSignatureResponseSchema>;
5 changes: 5 additions & 0 deletions src/datastore/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ export interface DbBlock {
tx_count: number;
block_time: number;
signer_bitvec: string | null;
signer_signatures: string[] | null;
tenure_height: number | null;
}

/** An interface representing the microblock data that can be constructed _only_ from the /new_microblocks payload */
Expand Down Expand Up @@ -862,6 +864,7 @@ export interface BlockQueryResult {
execution_cost_write_length: string;
tx_count: number;
signer_bitvec: string | null;
tenure_height: number | null;
}

export interface MicroblockQueryResult {
Expand Down Expand Up @@ -1286,6 +1289,8 @@ export interface BlockInsertValues {
execution_cost_write_length: number;
tx_count: number;
signer_bitvec: string | null;
signer_signatures: PgBytea[] | null;
tenure_height: number | null;
}

export interface MicroblockInsertValues {
Expand Down
41 changes: 3 additions & 38 deletions src/datastore/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ export const BLOCK_COLUMNS = [
'execution_cost_write_length',
'tx_count',
'signer_bitvec',
'tenure_height',
];

export const MICROBLOCK_COLUMNS = [
Expand Down Expand Up @@ -465,7 +466,6 @@ export function parseFaucetRequestQueryResult(result: FaucetRequestQueryResult):
}

export function parseBlockQueryResult(row: BlockQueryResult): DbBlock {
// TODO(mb): is the tx_index preserved between microblocks and committed anchor blocks?
const block: DbBlock = {
block_hash: row.block_hash,
index_block_hash: row.index_block_hash,
Expand All @@ -487,6 +487,8 @@ export function parseBlockQueryResult(row: BlockQueryResult): DbBlock {
execution_cost_write_length: Number.parseInt(row.execution_cost_write_length),
tx_count: row.tx_count,
signer_bitvec: row.signer_bitvec,
signer_signatures: null, // this field is not queried from db by default due to size constraints
tenure_height: row.tenure_height,
};
return block;
}
Expand Down Expand Up @@ -1282,43 +1284,6 @@ export function convertTxQueryResultToDbMempoolTx(txs: TxQueryResult[]): DbMempo
return dbMempoolTxs;
}

export function setTotalBlockUpdateDataExecutionCost(data: DataStoreBlockUpdateData) {
const cost = data.txs.reduce(
(previousValue, currentValue) => {
const {
execution_cost_read_count,
execution_cost_read_length,
execution_cost_runtime,
execution_cost_write_count,
execution_cost_write_length,
} = previousValue;
return {
execution_cost_read_count:
execution_cost_read_count + currentValue.tx.execution_cost_read_count,
execution_cost_read_length:
execution_cost_read_length + currentValue.tx.execution_cost_read_length,
execution_cost_runtime: execution_cost_runtime + currentValue.tx.execution_cost_runtime,
execution_cost_write_count:
execution_cost_write_count + currentValue.tx.execution_cost_write_count,
execution_cost_write_length:
execution_cost_write_length + currentValue.tx.execution_cost_write_length,
};
},
{
execution_cost_read_count: 0,
execution_cost_read_length: 0,
execution_cost_runtime: 0,
execution_cost_write_count: 0,
execution_cost_write_length: 0,
}
);
data.block.execution_cost_read_count = cost.execution_cost_read_count;
data.block.execution_cost_read_length = cost.execution_cost_read_length;
data.block.execution_cost_runtime = cost.execution_cost_runtime;
data.block.execution_cost_write_count = cost.execution_cost_write_count;
data.block.execution_cost_write_length = cost.execution_cost_write_length;
}

export function markBlockUpdateDataAsNonCanonical(data: DataStoreBlockUpdateData): void {
data.block = { ...data.block, canonical: false };
data.microblocks = data.microblocks.map(mb => ({ ...mb, canonical: false }));
Expand Down
43 changes: 43 additions & 0 deletions src/datastore/pg-store-v2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
PoxSignerPaginationQueryParams,
PoxSignerLimitParamSchema,
BlockIdParam,
BlockSignerSignatureLimitParamSchema,
} from '../api/routes/v2/schemas';
import { InvalidRequestError, InvalidRequestErrorType } from '../errors';
import { normalizeHashString } from '../helpers';
Expand Down Expand Up @@ -226,6 +227,48 @@ export class PgStoreV2 extends BasePgStoreModule {
});
}

async getBlockSignerSignature(args: {
blockId: BlockIdParam;
limit?: number;
offset?: number;
}): Promise<DbPaginatedResult<string>> {
return await this.sqlTransaction(async sql => {
const limit = args.limit ?? BlockSignerSignatureLimitParamSchema.default;
const offset = args.offset ?? 0;
const blockId = args.blockId;
const filter =
blockId.type === 'latest'
? sql`index_block_hash = (SELECT index_block_hash FROM blocks WHERE canonical = TRUE ORDER BY block_height DESC LIMIT 1)`
: blockId.type === 'hash'
? sql`(
block_hash = ${normalizeHashString(blockId.hash)}
OR index_block_hash = ${normalizeHashString(blockId.hash)}
)`
: sql`block_height = ${blockId.height}`;
const blockQuery = await sql<{ signer_signatures: string[]; total: number }[]>`
SELECT
signer_signatures[${offset + 1}:${offset + limit}] as signer_signatures,
array_length(signer_signatures, 1)::integer AS total
FROM blocks
WHERE canonical = true AND ${filter}
LIMIT 1
`;
if (blockQuery.count === 0)
return {
limit,
offset,
results: [],
total: 0,
};
return {
limit,
offset,
results: blockQuery[0].signer_signatures,
total: blockQuery[0].total,
};
});
}

async getAverageBlockTimes(): Promise<{
last_1h: number;
last_24h: number;
Expand Down
6 changes: 4 additions & 2 deletions src/datastore/pg-write-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@ import {
} from './common';
import {
BLOCK_COLUMNS,
setTotalBlockUpdateDataExecutionCost,
convertTxQueryResultToDbMempoolTx,
markBlockUpdateDataAsNonCanonical,
MICROBLOCK_COLUMNS,
Expand Down Expand Up @@ -210,7 +209,6 @@ export class PgWriteStore extends PgStore {
}));
await this.pruneMempoolTxs(sql, prunableTxs);
}
setTotalBlockUpdateDataExecutionCost(data);

// Insert microblocks, if any. Clear already inserted microblock txs from the anchor-block
// update data to avoid duplicate inserts.
Expand Down Expand Up @@ -484,6 +482,8 @@ export class PgWriteStore extends PgStore {
execution_cost_write_length: block.execution_cost_write_length,
tx_count: block.tx_count,
signer_bitvec: block.signer_bitvec,
signer_signatures: block.signer_signatures,
tenure_height: block.tenure_height,
};
const result = await sql`
INSERT INTO blocks ${sql(values)}
Expand Down Expand Up @@ -3384,6 +3384,8 @@ export class PgWriteStore extends PgStore {
execution_cost_write_length: block.execution_cost_write_length,
tx_count: block.tx_count,
signer_bitvec: block.signer_bitvec,
signer_signatures: block.signer_signatures,
tenure_height: block.tenure_height,
}));
await sql`
INSERT INTO blocks ${sql(values)}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ const populateBatchInserters = (db: PgWriteStore) => {
write: async (data: CoreNodeBlockMessage, _encoding, next) => {
let dbData: DataStoreBlockUpdateData;
try {
dbData = parseNewBlockMessage(chainID, data);
({ dbData } = parseNewBlockMessage(chainID, data, true));
} catch (err) {
logger.error({ component: 'event-replay' }, 'Error when parsing new_block event');
console.error(err);
Expand Down
Loading
Loading