diff --git a/apps/api/mvm.lock b/apps/api/mvm.lock
index 52c70cbb2..7e5d99ebf 100644
--- a/apps/api/mvm.lock
+++ b/apps/api/mvm.lock
@@ -3,6 +3,6 @@
"@akashnetwork/database": "1.0.0",
"@akashnetwork/env-loader": "1.0.1",
"@akashnetwork/http-sdk": "1.0.8",
- "@akashnetwork/logging": "1.0.1"
+ "@akashnetwork/logging": "2.0.0"
}
}
diff --git a/apps/api/src/app.ts b/apps/api/src/app.ts
index 7fb26eb37..4f33addd8 100644
--- a/apps/api/src/app.ts
+++ b/apps/api/src/app.ts
@@ -101,7 +101,7 @@ function startScheduler() {
scheduler.start();
}
-const appLogger = new LoggerService({ context: "APP" });
+const appLogger = LoggerService.forContext("APP");
/**
* Initialize database
diff --git a/apps/api/src/billing/repositories/checkout-session/checkout-session.repository.ts b/apps/api/src/billing/repositories/checkout-session/checkout-session.repository.ts
index 173a48eb3..f87d64466 100644
--- a/apps/api/src/billing/repositories/checkout-session/checkout-session.repository.ts
+++ b/apps/api/src/billing/repositories/checkout-session/checkout-session.repository.ts
@@ -12,7 +12,7 @@ export type CheckoutSessionsOutput = Table["$inferSelect"];
@singleton()
export class CheckoutSessionRepository extends BaseRepository
{
- private readonly logger = new LoggerService({ context: CheckoutSessionRepository.name });
+ private readonly logger = LoggerService.forContext(CheckoutSessionRepository.name);
constructor(
@InjectPg() protected readonly pg: ApiPgDatabase,
diff --git a/apps/api/src/billing/services/managed-user-wallet/managed-user-wallet.service.ts b/apps/api/src/billing/services/managed-user-wallet/managed-user-wallet.service.ts
index 2a60e0a1a..d5a28b010 100644
--- a/apps/api/src/billing/services/managed-user-wallet/managed-user-wallet.service.ts
+++ b/apps/api/src/billing/services/managed-user-wallet/managed-user-wallet.service.ts
@@ -32,7 +32,7 @@ export class ManagedUserWalletService {
private readonly HD_PATH = "m/44'/118'/0'/0";
- private readonly logger = new LoggerService({ context: ManagedUserWalletService.name });
+ private readonly logger = LoggerService.forContext(ManagedUserWalletService.name);
constructor(
@InjectBillingConfig() private readonly config: BillingConfig,
diff --git a/apps/api/src/billing/services/master-signing-client/master-signing-client.service.ts b/apps/api/src/billing/services/master-signing-client/master-signing-client.service.ts
index 75d881b92..6768342ea 100644
--- a/apps/api/src/billing/services/master-signing-client/master-signing-client.service.ts
+++ b/apps/api/src/billing/services/master-signing-client/master-signing-client.service.ts
@@ -49,7 +49,7 @@ export class MasterSigningClientService {
{ cache: false, batchScheduleFn: callback => setTimeout(callback, this.config.MASTER_WALLET_BATCHING_INTERVAL_MS) }
);
- private readonly logger = new LoggerService({ context: this.loggerContext });
+ private readonly logger = LoggerService.forContext(this.loggerContext);
constructor(
private readonly config: BillingConfig,
diff --git a/apps/api/src/billing/services/refill/refill.service.ts b/apps/api/src/billing/services/refill/refill.service.ts
index 4e97c6026..98ec02fef 100644
--- a/apps/api/src/billing/services/refill/refill.service.ts
+++ b/apps/api/src/billing/services/refill/refill.service.ts
@@ -11,7 +11,7 @@ import { SentryEventService } from "@src/core/services/sentry-event/sentry-event
@singleton()
export class RefillService {
- private readonly logger = new LoggerService({ context: RefillService.name });
+ private readonly logger = LoggerService.forContext(RefillService.name);
constructor(
@InjectBillingConfig() private readonly config: BillingConfig,
diff --git a/apps/api/src/billing/services/stripe-webhook/stripe-webhook.service.ts b/apps/api/src/billing/services/stripe-webhook/stripe-webhook.service.ts
index a89c5ad10..f26cb99b6 100644
--- a/apps/api/src/billing/services/stripe-webhook/stripe-webhook.service.ts
+++ b/apps/api/src/billing/services/stripe-webhook/stripe-webhook.service.ts
@@ -9,7 +9,7 @@ import { WithTransaction } from "@src/core";
@singleton()
export class StripeWebhookService {
- private readonly logger = new LoggerService({ context: StripeWebhookService.name });
+ private readonly logger = LoggerService.forContext(StripeWebhookService.name);
constructor(
private readonly stripe: StripeService,
diff --git a/apps/api/src/caching/helpers.ts b/apps/api/src/caching/helpers.ts
index d93ac2ee3..ce8ad0ead 100644
--- a/apps/api/src/caching/helpers.ts
+++ b/apps/api/src/caching/helpers.ts
@@ -4,7 +4,7 @@ import { differenceInSeconds } from "date-fns";
import MemoryCacheEngine from "./memoryCacheEngine";
-const logger = new LoggerService({ context: "Caching" });
+const logger = LoggerService.forContext("Caching");
export const cacheEngine = new MemoryCacheEngine();
const pendingRequests: { [key: string]: Promise } = {};
diff --git a/apps/api/src/chain/services/block-http/block-http.service.spec.ts b/apps/api/src/chain/services/block-http/block-http.service.spec.ts
index 552ab9c9c..802d3d863 100644
--- a/apps/api/src/chain/services/block-http/block-http.service.spec.ts
+++ b/apps/api/src/chain/services/block-http/block-http.service.spec.ts
@@ -1,10 +1,10 @@
+import "@test/mocks/logger-service.mock";
+
import { BlockHttpService as BlockHttpServiceCommon } from "@akashnetwork/http-sdk";
import { faker } from "@faker-js/faker";
import { BlockHttpService } from "./block-http.service";
-jest.mock("@akashnetwork/logging");
-
describe(BlockHttpService.name, () => {
let service: BlockHttpService;
let blockHttpService: BlockHttpServiceCommon;
diff --git a/apps/api/src/console.ts b/apps/api/src/console.ts
index 6f915f2bd..41b8900bf 100644
--- a/apps/api/src/console.ts
+++ b/apps/api/src/console.ts
@@ -45,7 +45,7 @@ program
});
});
-const logger = new LoggerService({ context: "CLI" });
+const logger = LoggerService.forContext("CLI");
async function executeCliHandler(name: string, handler: () => Promise) {
await context.with(trace.setSpan(context.active(), tracer.startSpan(name)), async () => {
diff --git a/apps/api/src/core/providers/postgres.provider.ts b/apps/api/src/core/providers/postgres.provider.ts
index c9dd85bd9..caea10879 100644
--- a/apps/api/src/core/providers/postgres.provider.ts
+++ b/apps/api/src/core/providers/postgres.provider.ts
@@ -10,7 +10,7 @@ import { config } from "@src/core/config";
import { PostgresLoggerService } from "@src/core/services/postgres-logger/postgres-logger.service";
import * as userSchemas from "@src/user/model-schemas";
-const logger = new LoggerService({ context: "POSTGRES" });
+const logger = LoggerService.forContext("POSTGRES");
const migrationClient = postgres(config.POSTGRES_DB_URI, { max: 1, onnotice: logger.info.bind(logger) });
const appClient = postgres(config.POSTGRES_DB_URI, { max: config.POSTGRES_MAX_CONNECTIONS, onnotice: logger.info.bind(logger) });
diff --git a/apps/api/src/core/services/error/error.service.spec.ts b/apps/api/src/core/services/error/error.service.spec.ts
index c67806d12..e081d07ba 100644
--- a/apps/api/src/core/services/error/error.service.spec.ts
+++ b/apps/api/src/core/services/error/error.service.spec.ts
@@ -1,3 +1,5 @@
+import "@test/mocks/logger-service.mock";
+
import { LoggerService } from "@akashnetwork/logging";
import { faker } from "@faker-js/faker";
@@ -5,8 +7,6 @@ import { Sentry } from "@src/core/providers/sentry.provider";
import { SentryEventService } from "@src/core/services/sentry-event/sentry-event.service";
import { ErrorService } from "./error.service";
-jest.mock("@akashnetwork/logging");
-
describe(ErrorService.name, () => {
const sentryEventService = new SentryEventService();
let sentry: Sentry;
diff --git a/apps/api/src/core/services/error/error.service.ts b/apps/api/src/core/services/error/error.service.ts
index 312b821f3..3065e1060 100644
--- a/apps/api/src/core/services/error/error.service.ts
+++ b/apps/api/src/core/services/error/error.service.ts
@@ -6,7 +6,7 @@ import { SentryEventService } from "@src/core/services/sentry-event/sentry-event
@singleton()
export class ErrorService {
- private readonly logger = new LoggerService();
+ private readonly logger = LoggerService.forContext(ErrorService.name);
constructor(
@InjectSentry() private readonly sentry: Sentry,
diff --git a/apps/api/src/core/services/hono-error-handler/hono-error-handler.service.ts b/apps/api/src/core/services/hono-error-handler/hono-error-handler.service.ts
index f643b8abe..8cb53a4ad 100644
--- a/apps/api/src/core/services/hono-error-handler/hono-error-handler.service.ts
+++ b/apps/api/src/core/services/hono-error-handler/hono-error-handler.service.ts
@@ -12,7 +12,7 @@ import { SentryEventService } from "@src/core/services/sentry-event/sentry-event
@singleton()
export class HonoErrorHandlerService {
- private readonly logger = new LoggerService({ context: "ErrorHandler" });
+ private readonly logger = LoggerService.forContext("ErrorHandler");
constructor(
@InjectSentry() private readonly sentry: Sentry,
diff --git a/apps/api/src/core/services/http-logger/http-logger.service.ts b/apps/api/src/core/services/http-logger/http-logger.service.ts
index a8465f7b6..6e7cc35ef 100644
--- a/apps/api/src/core/services/http-logger/http-logger.service.ts
+++ b/apps/api/src/core/services/http-logger/http-logger.service.ts
@@ -6,7 +6,7 @@ import type { HonoInterceptor } from "@src/core/types/hono-interceptor.type";
@singleton()
export class HttpLoggerService implements HonoInterceptor {
- private readonly logger = new LoggerService({ context: "HTTP" });
+ private readonly logger = LoggerService.forContext("HTTP");
intercept() {
return async (c: Context, next: Next) => {
diff --git a/apps/api/src/core/services/postgres-logger/postgres-logger.service.ts b/apps/api/src/core/services/postgres-logger/postgres-logger.service.ts
index 67ee72e8e..f52278406 100644
--- a/apps/api/src/core/services/postgres-logger/postgres-logger.service.ts
+++ b/apps/api/src/core/services/postgres-logger/postgres-logger.service.ts
@@ -17,7 +17,7 @@ export class PostgresLoggerService implements LogWriter {
constructor(options?: PostgresLoggerServiceOptions) {
const orm = options?.orm || "drizzle";
- this.logger = new LoggerService({ context: "POSTGRES", orm, database: options?.database });
+ this.logger = new LoggerService({ base: { context: "POSTGRES", orm, database: options?.database } });
this.isDrizzle = orm === "drizzle";
this.useFormat = options?.useFormat || false;
}
diff --git a/apps/api/src/deployment/services/stale-managed-deployments-cleaner/stale-managed-deployments-cleaner.service.ts b/apps/api/src/deployment/services/stale-managed-deployments-cleaner/stale-managed-deployments-cleaner.service.ts
index b5bd4d52e..1fe067e01 100644
--- a/apps/api/src/deployment/services/stale-managed-deployments-cleaner/stale-managed-deployments-cleaner.service.ts
+++ b/apps/api/src/deployment/services/stale-managed-deployments-cleaner/stale-managed-deployments-cleaner.service.ts
@@ -13,7 +13,7 @@ import { averageBlockTime } from "@src/utils/constants";
@singleton()
export class StaleManagedDeploymentsCleanerService {
- private readonly logger = new LoggerService({ context: StaleManagedDeploymentsCleanerService.name });
+ private readonly logger = LoggerService.forContext(StaleManagedDeploymentsCleanerService.name);
private readonly MAX_LIVE_BLOCKS = Math.floor((10 * secondsInMinute) / averageBlockTime);
diff --git a/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.spec.ts b/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.spec.ts
index 0e9e20f78..2bc4354ce 100644
--- a/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.spec.ts
+++ b/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.spec.ts
@@ -1,3 +1,5 @@
+import "@test/mocks/logger-service.mock";
+
import { AllowanceHttpService, BalanceHttpService, Denom } from "@akashnetwork/http-sdk";
import { faker } from "@faker-js/faker";
import { MsgExec } from "cosmjs-types/cosmos/authz/v1beta1/tx";
@@ -23,8 +25,6 @@ import { DrainingDeploymentSeeder } from "@test/seeders/draining-deployment.seed
import { FeesAuthorizationSeeder } from "@test/seeders/fees-authorization.seeder";
import { stub } from "@test/services/stub";
-jest.mock("@akashnetwork/logging");
-
describe(TopUpCustodialDeploymentsService.name, () => {
const CURRENT_BLOCK_HEIGHT = 7481457;
const UAKT_TOP_UP_MASTER_WALLET_ADDRESS = AkashAddressSeeder.create();
diff --git a/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.ts b/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.ts
index cefe05f74..53f021bbe 100644
--- a/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.ts
+++ b/apps/api/src/deployment/services/top-up-custodial-deployments/top-up-custodial-deployments.service.ts
@@ -30,7 +30,7 @@ export class TopUpCustodialDeploymentsService implements DeploymentsRefiller {
private readonly MIN_FEES_AVAILABLE = 5000;
- private readonly logger = new LoggerService({ context: TopUpCustodialDeploymentsService.name });
+ private readonly logger = LoggerService.forContext(TopUpCustodialDeploymentsService.name);
constructor(
private readonly topUpToolsService: TopUpToolsService,
diff --git a/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.spec.ts b/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.spec.ts
index 755b8e456..700b3232d 100644
--- a/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.spec.ts
+++ b/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.spec.ts
@@ -1,3 +1,5 @@
+import "@test/mocks/logger-service.mock";
+
import { faker } from "@faker-js/faker";
import { BillingConfig } from "@src/billing/providers";
@@ -17,8 +19,6 @@ import { DrainingDeploymentSeeder } from "@test/seeders/draining-deployment.seed
import { UserWalletSeeder } from "@test/seeders/user-wallet.seeder";
import { stub } from "@test/services/stub";
-jest.mock("@akashnetwork/logging");
-
describe(TopUpManagedDeploymentsService.name, () => {
const CURRENT_BLOCK_HEIGHT = 7481457;
const MANAGED_MASTER_WALLET_ADDRESS = AkashAddressSeeder.create();
diff --git a/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.ts b/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.ts
index f22192969..38428f58c 100644
--- a/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.ts
+++ b/apps/api/src/deployment/services/top-up-managed-deployments/top-up-managed-deployments.service.ts
@@ -17,7 +17,7 @@ import { DeploymentsRefiller, TopUpDeploymentsOptions } from "@src/deployment/ty
export class TopUpManagedDeploymentsService implements DeploymentsRefiller {
private readonly CONCURRENCY = 10;
- private readonly logger = new LoggerService({ context: TopUpManagedDeploymentsService.name });
+ private readonly logger = LoggerService.forContext(TopUpManagedDeploymentsService.name);
constructor(
private readonly userWalletRepository: UserWalletRepository,
diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts
index f90cbfbe9..c058d6fbc 100644
--- a/apps/api/src/index.ts
+++ b/apps/api/src/index.ts
@@ -1,6 +1,6 @@
-import "./open-telemetry";
import "reflect-metadata";
import "@akashnetwork/env-loader";
+import "./open-telemetry";
async function bootstrap() {
/* eslint-disable @typescript-eslint/no-var-requires */
diff --git a/apps/api/src/open-telemetry.ts b/apps/api/src/open-telemetry.ts
index cf80b36d8..fce294c48 100644
--- a/apps/api/src/open-telemetry.ts
+++ b/apps/api/src/open-telemetry.ts
@@ -1,3 +1,5 @@
+import { LoggerService } from "@akashnetwork/logging";
+import { context, trace } from "@opentelemetry/api";
import { registerInstrumentations } from "@opentelemetry/instrumentation";
import { HttpInstrumentation } from "@opentelemetry/instrumentation-http";
import { NodeTracerProvider } from "@opentelemetry/sdk-trace-node";
@@ -7,3 +9,8 @@ new NodeTracerProvider().register();
registerInstrumentations({
instrumentations: [new HttpInstrumentation()]
});
+
+LoggerService.mixin = () => {
+ const currentSpan = trace.getSpan(context.active());
+ return { ...currentSpan?.spanContext() };
+};
diff --git a/apps/api/src/routes/v1/dashboardData.ts b/apps/api/src/routes/v1/dashboardData.ts
index 9d04f3dd1..4269396e9 100644
--- a/apps/api/src/routes/v1/dashboardData.ts
+++ b/apps/api/src/routes/v1/dashboardData.ts
@@ -8,9 +8,8 @@ import { getTransactions } from "@src/services/db/transactionsService";
import { getChainStats } from "@src/services/external/apiNodeService";
import { createLoggingExecutor } from "@src/utils/logging";
-
-const logger = new LoggerService({ context: "Dashboard" });
-const runOrLog = createLoggingExecutor(logger)
+const logger = LoggerService.forContext("Dashboard");
+const runOrLog = createLoggingExecutor(logger);
const route = createRoute({
method: "get",
@@ -167,8 +166,8 @@ export default new OpenAPIHono().openapi(route, async c => {
const chainStats = {
...chainStatsQuery,
height: latestBlocks && latestBlocks.length > 0 ? latestBlocks[0].height : undefined,
- transactionCount: latestBlocks && latestBlocks.length > 0 ? latestBlocks[0].totalTransactionCount : undefined,
- }
+ transactionCount: latestBlocks && latestBlocks.length > 0 ? latestBlocks[0].totalTransactionCount : undefined
+ };
return c.json({
chainStats,
@@ -179,4 +178,4 @@ export default new OpenAPIHono().openapi(route, async c => {
latestBlocks,
latestTransactions
});
-});
\ No newline at end of file
+});
diff --git a/apps/api/src/services/db/userDataService.ts b/apps/api/src/services/db/userDataService.ts
index f5f1c5f23..ccfda6e13 100644
--- a/apps/api/src/services/db/userDataService.ts
+++ b/apps/api/src/services/db/userDataService.ts
@@ -4,7 +4,7 @@ import pick from "lodash/pick";
import { Transaction } from "sequelize";
import { container } from "tsyringe";
-const logger = new LoggerService({ context: "UserDataService" });
+const logger = LoggerService.forContext("UserDataService");
function randomIntFromInterval(min: number, max: number) {
return Math.floor(Math.random() * (max - min + 1) + min);
diff --git a/apps/api/src/services/external/apiNodeService.ts b/apps/api/src/services/external/apiNodeService.ts
index ce80ea0fb..97058ae4c 100644
--- a/apps/api/src/services/external/apiNodeService.ts
+++ b/apps/api/src/services/external/apiNodeService.ts
@@ -34,49 +34,35 @@ import { getDeploymentRelatedMessages } from "../db/deploymentService";
import { getProviderList } from "../db/providerStatusService";
export async function getChainStats() {
- const logger = new LoggerService({ context: "ApiNode" })
- const runOrLog = createLoggingExecutor(logger)
+ const logger = LoggerService.forContext("ApiNode");
+ const runOrLog = createLoggingExecutor(logger);
const result = await cacheResponse(
60 * 5, // 5 minutes
cacheKeys.getChainStats,
async () => {
const bondedTokensAsPromised = await runOrLog(async () => {
- const bondedTokensQuery = await axios.get(
- `${apiNodeUrl}/cosmos/staking/v1beta1/pool`
- );
+ const bondedTokensQuery = await axios.get(`${apiNodeUrl}/cosmos/staking/v1beta1/pool`);
return parseInt(bondedTokensQuery.data.pool.bonded_tokens);
});
const totalSupplyAsPromised = await runOrLog(async () => {
- const supplyQuery = await axios.get(
- `${apiNodeUrl}/cosmos/bank/v1beta1/supply?pagination.limit=1000`
- );
- return parseInt(
- supplyQuery.data.supply.find((x) => x.denom === "uakt")?.amount || "0"
- );
+ const supplyQuery = await axios.get(`${apiNodeUrl}/cosmos/bank/v1beta1/supply?pagination.limit=1000`);
+ return parseInt(supplyQuery.data.supply.find(x => x.denom === "uakt")?.amount || "0");
});
const communityPoolAsPromised = await runOrLog(async () => {
- const communityPoolQuery = await axios.get(
- `${apiNodeUrl}/cosmos/distribution/v1beta1/community_pool`
- );
- return parseFloat(
- communityPoolQuery.data.pool.find((x) => x.denom === "uakt")?.amount || "0"
- );
+ const communityPoolQuery = await axios.get(`${apiNodeUrl}/cosmos/distribution/v1beta1/community_pool`);
+ return parseFloat(communityPoolQuery.data.pool.find(x => x.denom === "uakt")?.amount || "0");
});
const inflationAsPromised = await runOrLog(async () => {
- const inflationQuery = await axios.get(
- `${apiNodeUrl}/cosmos/mint/v1beta1/inflation`
- );
+ const inflationQuery = await axios.get(`${apiNodeUrl}/cosmos/mint/v1beta1/inflation`);
return parseFloat(inflationQuery.data.inflation || "0");
});
const communityTaxAsPromised = await runOrLog(async () => {
- const distributionQuery = await axios.get(
- `${apiNodeUrl}/cosmos/distribution/v1beta1/params`
- );
+ const distributionQuery = await axios.get(`${apiNodeUrl}/cosmos/distribution/v1beta1/params`);
return parseFloat(distributionQuery.data.params.community_tax || "0");
});
@@ -93,7 +79,7 @@ export async function getChainStats() {
inflation,
communityTax,
bondedTokens,
- totalSupply,
+ totalSupply
};
},
true
@@ -101,7 +87,7 @@ export async function getChainStats() {
let stakingAPR: number | undefined;
if (result.bondedTokens && result.bondedTokens > 0 && result.inflation && result.communityTax && result.totalSupply) {
- stakingAPR = result.inflation * (1 - result.communityTax) * result.totalSupply / result.bondedTokens
+ stakingAPR = (result.inflation * (1 - result.communityTax) * result.totalSupply) / result.bondedTokens;
}
return {
@@ -109,7 +95,7 @@ export async function getChainStats() {
totalSupply: result.totalSupply,
communityPool: result.communityPool,
inflation: result.inflation,
- stakingAPR,
+ stakingAPR
};
}
diff --git a/apps/api/src/utils/coin.ts b/apps/api/src/utils/coin.ts
index 8578f1fcd..cc2f6383b 100644
--- a/apps/api/src/utils/coin.ts
+++ b/apps/api/src/utils/coin.ts
@@ -2,8 +2,7 @@ import { LoggerService } from "@akashnetwork/logging";
import { asset_lists } from "@chain-registry/assets";
import { Coin } from "cosmjs-types/cosmos/base/v1beta1/coin";
-
-const logger = new LoggerService({ context: "CoinUtil" });
+const logger = LoggerService.forContext("CoinUtil");
export function coinToAsset(coin: Coin) {
if (coin.denom === "uakt") {
diff --git a/apps/api/test/mocks/logger-service.mock.ts b/apps/api/test/mocks/logger-service.mock.ts
new file mode 100644
index 000000000..2fa854e76
--- /dev/null
+++ b/apps/api/test/mocks/logger-service.mock.ts
@@ -0,0 +1,17 @@
+jest.mock("@akashnetwork/logging", () => {
+ class LoggerService {
+ static forContext() {
+ return new LoggerService();
+ }
+
+ error() {}
+ debug() {}
+ info() {}
+ }
+
+ jest.spyOn(LoggerService.prototype, "error");
+ jest.spyOn(LoggerService.prototype, "debug");
+ jest.spyOn(LoggerService.prototype, "info");
+
+ return { LoggerService };
+});
diff --git a/packages/logging/package.json b/packages/logging/package.json
index 43271cfe0..38cbe9f25 100644
--- a/packages/logging/package.json
+++ b/packages/logging/package.json
@@ -1,6 +1,6 @@
{
"name": "@akashnetwork/logging",
- "version": "1.0.1",
+ "version": "2.0.0",
"description": "Package containing logging tools",
"main": "src/index.ts",
"scripts": {
diff --git a/packages/logging/src/servicies/logger/logger.service.spec.ts b/packages/logging/src/servicies/logger/logger.service.spec.ts
index f2d684800..c06b973d6 100644
--- a/packages/logging/src/servicies/logger/logger.service.spec.ts
+++ b/packages/logging/src/servicies/logger/logger.service.spec.ts
@@ -1,41 +1,20 @@
import createHttpError from "http-errors";
import pino from "pino";
-import pinoFluentd from "pino-fluentd";
-import pretty from "pino-pretty";
+import { gcpLogOptions } from "pino-cloud-logging";
import { config } from "../../config";
import { Logger, LoggerService } from "./logger.service";
jest.mock("pino");
-jest.mock("pino-fluentd");
-jest.mock("pino-pretty");
+jest.mock("pino-cloud-logging");
+
+(gcpLogOptions as jest.Mock).mockImplementation(options => options);
describe("LoggerService", () => {
- let loggerService: LoggerService;
- let mockLogger: jest.Mocked;
const defaultLogFormat = config.STD_OUT_LOG_FORMAT;
- const defaultFluentdTag = config.FLUENTD_TAG;
- const defaultFluentdHost = config.FLUENTD_HOST;
- const defaultFluentdPort = config.FLUENTD_PORT;
-
- beforeEach(() => {
- mockLogger = {
- info: jest.fn(),
- error: jest.fn(),
- warn: jest.fn(),
- debug: jest.fn(),
- child: jest.fn().mockReturnThis()
- } as unknown as jest.Mocked;
-
- (pino as unknown as jest.Mock).mockReturnValue(mockLogger);
- loggerService = new LoggerService();
- });
afterEach(() => {
config.STD_OUT_LOG_FORMAT = defaultLogFormat;
- config.FLUENTD_TAG = defaultFluentdTag;
- config.FLUENTD_HOST = defaultFluentdHost;
- config.FLUENTD_PORT = defaultFluentdPort;
jest.clearAllMocks();
});
@@ -43,80 +22,117 @@ describe("LoggerService", () => {
it("should initialize pino with pretty formatting when STD_OUT_LOG_FORMAT is 'pretty'", () => {
config.STD_OUT_LOG_FORMAT = "pretty";
new LoggerService();
- expect(pretty).toHaveBeenCalledWith({ sync: true });
+
+ expect(pino).toHaveBeenCalledWith({
+ level: "info",
+ mixin: undefined,
+ transport: { target: "pino-pretty", options: { colorize: true, sync: true } }
+ });
+ expect(gcpLogOptions).not.toHaveBeenCalled();
});
it("should initialize pino without pretty formatting for other formats", () => {
config.STD_OUT_LOG_FORMAT = "json";
new LoggerService();
- expect(pretty).not.toHaveBeenCalled();
- expect(pino).toHaveBeenCalled();
- });
- it("should initialize fluentd if configuration is enabled", () => {
- config.FLUENTD_HOST = "localhost";
- config.FLUENTD_PORT = 24224;
- config.FLUENTD_TAG = "app";
+ expect(pino).toHaveBeenCalledWith({ level: "info", mixin: undefined });
+ expect(gcpLogOptions).toHaveBeenCalled();
+ });
+ it("should initialize pino with global mixin", () => {
+ function globalMixin() {
+ return {};
+ }
+ LoggerService.mixin = globalMixin;
new LoggerService();
- expect(pinoFluentd).toHaveBeenCalledWith({
- tag: config.FLUENTD_TAG,
- host: config.FLUENTD_HOST,
- port: config.FLUENTD_PORT,
- "trace-level": config.LOG_LEVEL
- });
+
+ expect(pino).toHaveBeenCalledWith({ level: "info", mixin: globalMixin });
+
+ LoggerService.mixin = undefined;
});
- it("should not initialize fluentd if configuration is missing", () => {
- config.FLUENTD_HOST = "";
- config.FLUENTD_PORT = 0;
- config.FLUENTD_TAG = "";
+ it("should initialize pino with local mixin overriding global mixin", () => {
+ function globalMixin() {
+ return {};
+ }
+ function localMixin() {
+ return {};
+ }
+ LoggerService.mixin = globalMixin;
+ new LoggerService({ mixin: localMixin });
- new LoggerService();
- expect(pinoFluentd).not.toHaveBeenCalled();
+ expect(pino).toHaveBeenCalledWith({ level: "info", mixin: localMixin });
+
+ LoggerService.mixin = undefined;
});
- });
- const methods: (keyof Logger)[] = ["info", "error", "warn", "debug"];
- describe.each(methods)("prototype.%s", method => {
- const logMessage = "Test message";
+ it("should initialize pino with provided log level overriding global log level", () => {
+ new LoggerService({ level: "debug" });
+
+ expect(pino).toHaveBeenCalledWith({ level: "debug" });
- it(`should call pino.${method} on info method`, () => {
- loggerService[method](logMessage);
- expect(mockLogger[method]).toHaveBeenCalledWith(logMessage);
+ LoggerService.mixin = undefined;
});
});
- describe("prototype.toLoggableInput", () => {
- it("should return status, message, stack, and data for HttpError", () => {
- const httpError = createHttpError(404, {
- status: 404,
- message: "Not found",
- stack: "stack trace",
- data: { key: "value" },
- originalError: new Error("Original error")
- });
+ describe("methods", () => {
+ let loggerService: LoggerService;
+ let mockLogger: jest.Mocked;
+
+ beforeEach(() => {
+ mockLogger = {
+ info: jest.fn(),
+ error: jest.fn(),
+ warn: jest.fn(),
+ debug: jest.fn(),
+ child: jest.fn().mockReturnThis()
+ } as unknown as jest.Mocked;
+
+ (pino as unknown as jest.Mock).mockReturnValue(mockLogger);
+ loggerService = new LoggerService();
+ });
+
+ const methods: (keyof Logger)[] = ["info", "error", "warn", "debug"];
+ describe.each(methods)("prototype.%s", method => {
+ const logMessage = "Test message";
- const loggable = loggerService["toLoggableInput"](httpError);
- expect(loggable).toEqual({
- status: 404,
- message: "Not found",
- stack: "stack trace",
- data: { key: "value" },
- originalError: "stack trace"
+ it(`should call pino.${method} on info method`, () => {
+ loggerService[method](logMessage);
+ expect(mockLogger[method]).toHaveBeenCalledWith(logMessage);
});
});
- it("should return stack for general Error instance", () => {
- const error = new Error("Test error");
- const loggable = loggerService["toLoggableInput"](error);
- expect(loggable).toBe(error.stack);
- });
+ describe("prototype.toLoggableInput", () => {
+ it("should return status, message, stack, and data for HttpError", () => {
+ const httpError = createHttpError(404, {
+ status: 404,
+ message: "Not found",
+ stack: "stack trace",
+ data: { key: "value" },
+ originalError: new Error("Original error")
+ });
+
+ const loggable = loggerService["toLoggableInput"](httpError);
+ expect(loggable).toEqual({
+ status: 404,
+ message: "Not found",
+ stack: "stack trace",
+ data: { key: "value" },
+ originalError: "stack trace"
+ });
+ });
- it("should return the original message if it is not an error", () => {
- const message = "Test message";
- const loggable = loggerService["toLoggableInput"](message);
- expect(loggable).toBe(message);
+ it("should return stack for general Error instance", () => {
+ const error = new Error("Test error");
+ const loggable = loggerService["toLoggableInput"](error);
+ expect(loggable).toBe(error.stack);
+ });
+
+ it("should return the original message if it is not an error", () => {
+ const message = "Test message";
+ const loggable = loggerService["toLoggableInput"](message);
+ expect(loggable).toBe(message);
+ });
});
});
});
diff --git a/packages/logging/src/servicies/logger/logger.service.ts b/packages/logging/src/servicies/logger/logger.service.ts
index 6998b6300..b856ea929 100644
--- a/packages/logging/src/servicies/logger/logger.service.ts
+++ b/packages/logging/src/servicies/logger/logger.service.ts
@@ -1,78 +1,61 @@
-import { context, trace } from "@opentelemetry/api";
import { isHttpError } from "http-errors";
-import pino, { Bindings, Logger as PinoLogger, LoggerOptions } from "pino";
+import pino from "pino";
import { gcpLogOptions } from "pino-cloud-logging";
-import pinoFluentd from "pino-fluentd";
-import pretty from "pino-pretty";
-import { Writable } from "stream";
import { config } from "../../config";
-export type Logger = Pick;
+export type Logger = Pick;
-export class LoggerService implements Logger {
- protected pino: Logger;
+interface Bindings extends pino.Bindings {
+ context?: string;
+}
+
+interface LoggerOptions extends pino.LoggerOptions {
+ base?: Bindings | null;
+}
- constructor(bindings?: Bindings) {
- this.pino = this.initPino(bindings);
+export class LoggerService implements Logger {
+ static forContext(context: string) {
+ return new LoggerService().setContext(context);
}
- private initPino(bindings?: Bindings): Logger {
- const destinations: Writable[] = [];
+ static mixin: (mergeObject: object) => object;
+
+ protected pino: pino.Logger;
+
+ constructor(options?: LoggerOptions) {
+ this.pino = this.initPino(options);
+ }
+ private initPino(inputOptions: LoggerOptions = {}): pino.Logger {
let options: LoggerOptions = {
level: config.LOG_LEVEL,
- mixin: () => {
- const currentSpan = trace.getSpan(context.active());
- return { ...currentSpan?.spanContext() };
- }
+ mixin: LoggerService.mixin,
+ ...inputOptions
};
- if (config.STD_OUT_LOG_FORMAT === "pretty") {
- destinations.push(pretty({ sync: true }));
+ if (typeof window === "undefined" && config.STD_OUT_LOG_FORMAT === "pretty") {
+ options.transport = {
+ target: "pino-pretty",
+ options: { colorize: true, sync: true }
+ };
} else {
options = gcpLogOptions(options as any) as LoggerOptions;
- destinations.push(process.stdout);
}
- const fluentd = this.initFluentd();
-
- if (fluentd) {
- destinations.push(fluentd);
- }
-
- let instance = pino(options, this.combineDestinations(destinations));
-
- if (bindings) {
- instance = instance.child(bindings);
- }
-
- return instance;
+ return pino(options);
}
- private initFluentd(): Writable | undefined {
- const isFluentdEnabled = !!(config.FLUENTD_HOST && config.FLUENTD_PORT && config.FLUENTD_TAG);
+ setContext(context: string) {
+ this.pino.setBindings({ context });
- if (isFluentdEnabled) {
- return pinoFluentd({
- tag: config.FLUENTD_TAG,
- host: config.FLUENTD_HOST,
- port: config.FLUENTD_PORT,
- "trace-level": config.LOG_LEVEL
- });
- }
+ return this;
}
- private combineDestinations(destinations: Writable[]): Writable {
- return new Writable({
- write(chunk, encoding, callback) {
- for (const destination of destinations) {
- destination.write(chunk, encoding);
- }
+ bind(bindings: pino.Bindings) {
+ this.pino.setBindings(bindings);
- callback();
- }
- });
+ return this;
}
info(message: any) {