diff --git a/packages/bsky/src/api/com/atproto/admin/getAccountInfos.ts b/packages/bsky/src/api/com/atproto/admin/getAccountInfos.ts new file mode 100644 index 00000000000..9a71da9eb7f --- /dev/null +++ b/packages/bsky/src/api/com/atproto/admin/getAccountInfos.ts @@ -0,0 +1,42 @@ +import { Server } from '../../../../lexicon' +import AppContext from '../../../../context' +import { Actor } from '../../../../db/tables/actor' +import { mapDefined } from '@atproto/common' +import { INVALID_HANDLE } from '@atproto/syntax' + +export default function (server: Server, ctx: AppContext) { + server.com.atproto.admin.getAccountInfos({ + auth: ctx.roleVerifier, + handler: async ({ params }) => { + const { dids } = params + const db = ctx.db.getPrimary() + const actorService = ctx.services.actor(db) + const [actors, profiles] = await Promise.all([ + actorService.getActors(dids, true), + actorService.getProfileRecords(dids, true), + ]) + const actorByDid = actors.reduce((acc, cur) => { + return acc.set(cur.did, cur) + }, new Map()) + + const infos = mapDefined(dids, (did) => { + const info = actorByDid.get(did) + if (!info) return + const profile = profiles.get(did) + return { + did, + handle: info.handle ?? INVALID_HANDLE, + relatedRecords: profile ? [profile] : undefined, + indexedAt: info.indexedAt, + } + }) + + return { + encoding: 'application/json', + body: { + infos, + }, + } + }, + }) +} diff --git a/packages/bsky/src/api/index.ts b/packages/bsky/src/api/index.ts index da21b582019..95c07ad90c1 100644 --- a/packages/bsky/src/api/index.ts +++ b/packages/bsky/src/api/index.ts @@ -45,6 +45,7 @@ import emitModerationEvent from './com/atproto/admin/emitModerationEvent' import searchRepos from './com/atproto/admin/searchRepos' import adminGetRecord from './com/atproto/admin/getRecord' import getRepo from './com/atproto/admin/getRepo' +import getAccountInfos from './com/atproto/admin/getAccountInfos' import queryModerationStatuses from './com/atproto/admin/queryModerationStatuses' import resolveHandle from './com/atproto/identity/resolveHandle' import getRecord from './com/atproto/repo/getRecord' @@ -106,6 +107,7 @@ export default function (server: Server, ctx: AppContext) { searchRepos(server, ctx) adminGetRecord(server, ctx) getRepo(server, ctx) + getAccountInfos(server, ctx) getModerationEvent(server, ctx) queryModerationEvents(server, ctx) queryModerationStatuses(server, ctx) diff --git a/packages/bsky/src/services/actor/index.ts b/packages/bsky/src/services/actor/index.ts index 7ef61529926..b8898570688 100644 --- a/packages/bsky/src/services/actor/index.ts +++ b/packages/bsky/src/services/actor/index.ts @@ -10,6 +10,8 @@ import { SearchKeyset, getUserSearchQuery } from '../util/search' import { FromDb } from '../types' import { GraphService } from '../graph' import { LabelService } from '../label' +import { AtUri } from '@atproto/syntax' +import { ids } from '../../lexicon/lexicons' export * from './types' @@ -96,6 +98,26 @@ export class ActorService { }) } + async getProfileRecords(dids: string[], includeSoftDeleted = false) { + if (dids.length === 0) return new Map() + const profileUris = dids.map((did) => + AtUri.make(did, ids.AppBskyActorProfile, 'self').toString(), + ) + const { ref } = this.db.db.dynamic + const res = await this.db.db + .selectFrom('record') + .innerJoin('actor', 'actor.did', 'record.did') + .if(!includeSoftDeleted, (qb) => + qb.where(notSoftDeletedClause(ref('actor'))), + ) + .where('uri', 'in', profileUris) + .select(['record.did', 'record.json']) + .execute() + return res.reduce((acc, cur) => { + return acc.set(cur.did, JSON.parse(cur.json)) + }, new Map()) + } + async getSearchResults({ cursor, limit = 25, diff --git a/packages/dev-env/src/network.ts b/packages/dev-env/src/network.ts index 784ffa7e6c0..fa9081015d1 100644 --- a/packages/dev-env/src/network.ts +++ b/packages/dev-env/src/network.ts @@ -36,16 +36,6 @@ export class TestNetwork extends TestNetworkNoAppView { const plc = await TestPlc.create(params.plc ?? {}) - let ozone: TestOzone | undefined = undefined - if (params.ozone?.enabled) { - ozone = await TestOzone.create({ - plcUrl: plc.url, - dbPostgresSchema: `ozone_${dbPostgresSchema}`, - dbPrimaryPostgresUrl: dbPostgresUrl, - ...params.ozone, - }) - } - const bskyPort = params.bsky?.port ?? (await getPort()) const pdsPort = params.pds?.port ?? (await getPort()) const bsky = await TestBsky.create({ @@ -59,6 +49,19 @@ export class TestNetwork extends TestNetworkNoAppView { moderationPushUrl: `http://admin:${ADMIN_PASSWORD}@localhost:${pdsPort}`, ...params.bsky, }) + + let ozone: TestOzone | undefined = undefined + if (params.ozone?.enabled) { + ozone = await TestOzone.create({ + plcUrl: plc.url, + dbPostgresSchema: `ozone_${dbPostgresSchema}`, + dbPrimaryPostgresUrl: dbPostgresUrl, + appviewUrl: bsky.url, + moderationPushUrl: `http://admin:${ADMIN_PASSWORD}@localhost:${pdsPort}`, // @TODO fix this + ...params.ozone, + }) + } + const pds = await TestPds.create({ port: pdsPort, didPlcUrl: plc.url, @@ -126,6 +129,7 @@ export class TestNetwork extends TestNetworkNoAppView { async close() { await Promise.all(this.feedGens.map((fg) => fg.close())) + await this.ozone?.close() await this.bsky.close() await this.pds.close() await this.plc.close() diff --git a/packages/dev-env/src/ozone.ts b/packages/dev-env/src/ozone.ts index 14c4e321d77..4bcf70386c5 100644 --- a/packages/dev-env/src/ozone.ts +++ b/packages/dev-env/src/ozone.ts @@ -40,7 +40,6 @@ export class TestOzone { labelCacheStaleTTL: 30 * SECOND, labelCacheMaxTTL: MINUTE, ...cfg, - // Each test suite gets its own lock id for the repo subscription adminPassword: ADMIN_PASSWORD, moderatorPassword: MOD_PASSWORD, triagePassword: TRIAGE_PASSWORD, @@ -49,13 +48,6 @@ export class TestOzone { rateLimitsEnabled: false, }) - // shared across server, ingester, and indexer in order to share pool, avoid too many pg connections. - const db = new ozone.Database({ - schema: cfg.dbPostgresSchema, - url: cfg.dbPrimaryPostgresUrl, - poolSize: 10, - }) - // Separate migration db in case migration changes some connection state that we need in the tests, e.g. "alter database ... set ..." const migrationDb = new ozone.Database({ schema: cfg.dbPostgresSchema, @@ -68,6 +60,12 @@ export class TestOzone { } await migrationDb.close() + const db = new ozone.Database({ + schema: cfg.dbPostgresSchema, + url: cfg.dbPrimaryPostgresUrl, + poolSize: 10, + }) + // api server const server = ozone.OzoneService.create({ db, diff --git a/packages/dev-env/src/types.ts b/packages/dev-env/src/types.ts index e186c68c2fc..321bc0baec3 100644 --- a/packages/dev-env/src/types.ts +++ b/packages/dev-env/src/types.ts @@ -29,6 +29,7 @@ export type BskyConfig = Partial & { export type OzoneConfig = Partial & { enabled?: boolean plcUrl: string + appviewUrl: string dbPrimaryPostgresUrl: string migration?: string } diff --git a/packages/ozone/src/config.ts b/packages/ozone/src/config.ts index 04134e69e21..76d431311e3 100644 --- a/packages/ozone/src/config.ts +++ b/packages/ozone/src/config.ts @@ -14,6 +14,7 @@ export interface ServerConfigValues { publicUrl?: string serverDid: string feedGenDid?: string + appviewUrl: string dbPrimaryPostgresUrl: string dbReplicaPostgresUrls?: string[] dbReplicaTags?: Record // E.g. { timeline: [0], thread: [1] } @@ -53,6 +54,8 @@ export class ServerConfig { const feedGenDid = process.env.FEED_GEN_DID const envPort = parseInt(process.env.PORT || '', 10) const port = isNaN(envPort) ? 2584 : envPort + const appviewUrl = process.env.APPVIEW_URL + assert(appviewUrl) const redisHost = overrides?.redisHost || process.env.REDIS_HOST || undefined const redisSentinelName = @@ -133,6 +136,7 @@ export class ServerConfig { publicUrl, serverDid, feedGenDid, + appviewUrl, dbPrimaryPostgresUrl, dbReplicaPostgresUrls, dbReplicaTags, @@ -199,6 +203,10 @@ export class ServerConfig { return this.cfg.feedGenDid } + get appviewUrl() { + return this.cfg.appviewUrl + } + get dbPrimaryPostgresUrl() { return this.cfg.dbPrimaryPostgresUrl } diff --git a/packages/ozone/src/db/migrations/20231219T205730722Z-init.ts b/packages/ozone/src/db/migrations/20231219T205730722Z-init.ts index beac37d7277..f719e284241 100644 --- a/packages/ozone/src/db/migrations/20231219T205730722Z-init.ts +++ b/packages/ozone/src/db/migrations/20231219T205730722Z-init.ts @@ -80,21 +80,50 @@ export async function up(db: Kysely): Promise { .column('uri') .execute() - // PushEvent + // Push Events await db.schema - .createTable('push_event') + .createTable('repo_push_event') .addColumn('eventType', 'varchar', (col) => col.notNull()) .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectUri', 'varchar') + .addColumn('takedownId', 'integer') + .addColumn('confirmedAt', 'varchar') + .addPrimaryKeyConstraint('repo_push_event_pkey', [ + 'subjectDid', + 'eventType', + ]) + .execute() + + await db.schema + .createTable('record_push_event') + .addColumn('eventType', 'varchar', (col) => col.notNull()) + .addColumn('subjectDid', 'varchar', (col) => col.notNull()) + .addColumn('subjectUri', 'varchar', (col) => col.notNull()) .addColumn('subjectCid', 'varchar') - .addColumn('subjectBlobCid', 'varchar') .addColumn('takedownId', 'integer') .addColumn('confirmedAt', 'varchar') - .addPrimaryKeyConstraint('push_event_pkey', [ + .addPrimaryKeyConstraint('record_push_event_pkey', [ + 'subjectUri', 'eventType', + ]) + .execute() + await db.schema + .createIndex('record_push_event_did_type_idx') + .on('record_push_event') + .columns(['subjectDid', 'eventType']) + .execute() + + await db.schema + .createTable('blob_push_event') + .addColumn('eventType', 'varchar', (col) => col.notNull()) + .addColumn('subjectDid', 'varchar', (col) => col.notNull()) + .addColumn('subjectBlobCid', 'varchar', (col) => col.notNull()) + .addColumn('subjectUri', 'varchar') + .addColumn('takedownId', 'integer') + .addColumn('confirmedAt', 'varchar') + .addPrimaryKeyConstraint('blob_push_event_pkey', [ 'subjectDid', - 'subjectUri', 'subjectBlobCid', + 'eventType', ]) .execute() } @@ -103,5 +132,7 @@ export async function down(db: Kysely): Promise { await db.schema.dropTable('moderation_event').execute() await db.schema.dropTable('moderation_subject_status').execute() await db.schema.dropTable('label').execute() - await db.schema.dropTable('push_event').execute() + await db.schema.dropTable('repo_push_event').execute() + await db.schema.dropTable('record_push_event').execute() + await db.schema.dropTable('blob_push_event').execute() } diff --git a/packages/ozone/src/db/schema/blob_push_event.ts b/packages/ozone/src/db/schema/blob_push_event.ts new file mode 100644 index 00000000000..915165d53b6 --- /dev/null +++ b/packages/ozone/src/db/schema/blob_push_event.ts @@ -0,0 +1,16 @@ +export const eventTableName = 'blob_push_event' + +export type BlobPushEventType = 'takedown' + +export interface BlobPushEvent { + eventType: BlobPushEventType + subjectDid: string + subjectBlobCid: string + subjectUri: string | null + takedownId: number | null + confirmedAt: string | null +} + +export type PartialDB = { + [eventTableName]: BlobPushEvent +} diff --git a/packages/ozone/src/db/schema/index.ts b/packages/ozone/src/db/schema/index.ts index 5d80398c559..cee3124ea7c 100644 --- a/packages/ozone/src/db/schema/index.ts +++ b/packages/ozone/src/db/schema/index.ts @@ -1,13 +1,17 @@ import { Kysely } from 'kysely' import * as modEvent from './moderation_event' import * as modSubjectStatus from './moderation_subject_status' -import * as pushEvent from './push_event' +import * as repoPushEvent from './repo_push_event' +import * as recordPushEvent from './record_push_event' +import * as blobPushEvent from './blob_push_event' import * as label from './label' export type DatabaseSchemaType = modEvent.PartialDB & modSubjectStatus.PartialDB & - pushEvent.PartialDB & - label.PartialDB + label.PartialDB & + repoPushEvent.PartialDB & + recordPushEvent.PartialDB & + blobPushEvent.PartialDB export type DatabaseSchema = Kysely diff --git a/packages/ozone/src/db/schema/push_event.ts b/packages/ozone/src/db/schema/push_event.ts deleted file mode 100644 index a56621a9b91..00000000000 --- a/packages/ozone/src/db/schema/push_event.ts +++ /dev/null @@ -1,20 +0,0 @@ -export const eventTableName = 'push_event' - -export type PushEventType = - | 'repo_takedown' - | 'record_takedown' - | 'blob_takedown' - -export interface PushEvent { - eventType: PushEventType - subjectDid: string - subjectUri: string | null - subjectCid: string | null - subjectBlobCid: string | null - takedownId: number | null - confirmedAt: string | null -} - -export type PartialDB = { - [eventTableName]: PushEvent -} diff --git a/packages/ozone/src/db/schema/record_push_event.ts b/packages/ozone/src/db/schema/record_push_event.ts new file mode 100644 index 00000000000..26436f1b51b --- /dev/null +++ b/packages/ozone/src/db/schema/record_push_event.ts @@ -0,0 +1,16 @@ +export const eventTableName = 'record_push_event' + +export type RecordPushEventType = 'takedown' + +export interface RecordPushEvent { + eventType: RecordPushEventType + subjectDid: string + subjectUri: string + subjectCid: string | null + takedownId: number | null + confirmedAt: string | null +} + +export type PartialDB = { + [eventTableName]: RecordPushEvent +} diff --git a/packages/ozone/src/db/schema/repo_push_event.ts b/packages/ozone/src/db/schema/repo_push_event.ts new file mode 100644 index 00000000000..66ad1252b3f --- /dev/null +++ b/packages/ozone/src/db/schema/repo_push_event.ts @@ -0,0 +1,14 @@ +export const eventTableName = 'repo_push_event' + +export type RepoPushEventType = 'takedown' + +export interface RepoPushEvent { + eventType: RepoPushEventType + subjectDid: string + takedownId: number | null + confirmedAt: string | null +} + +export type PartialDB = { + [eventTableName]: RepoPushEvent +} diff --git a/packages/ozone/src/index.ts b/packages/ozone/src/index.ts index c06ae6b91c9..d52beceb1f8 100644 --- a/packages/ozone/src/index.ts +++ b/packages/ozone/src/index.ts @@ -8,7 +8,7 @@ import compression from 'compression' import { IdResolver } from '@atproto/identity' import API, { health, wellKnown } from './api' import * as error from './error' -import { loggerMiddleware } from './logger' +import { dbLogger, loggerMiddleware } from './logger' import { ServerConfig } from './config' import { createServer } from './lexicon' import { createServices } from './services' @@ -17,6 +17,7 @@ import { BackgroundQueue } from './background' import { AtpAgent } from '@atproto/api' import { Keypair } from '@atproto/crypto' import Database from './db' +import * as auth from './auth' export type { ServerConfigValues } from './config' export { ServerConfig } from './config' @@ -54,8 +55,12 @@ export class OzoneService { }) const backgroundQueue = new BackgroundQueue(db) + const appviewAgent = new AtpAgent({ service: config.appviewUrl }) - const appviewAgent = new AtpAgent({ service: '@TODO' }) + appviewAgent.api.setHeader( + 'authorization', + auth.buildBasicAuth('admin', config.adminPassword), + ) const services = createServices(appviewAgent) @@ -89,36 +94,24 @@ export class OzoneService { } async start(): Promise { - // const { db, backgroundQueue } = this.ctx - // this.dbStatsInterval = setInterval(() => { - // dbLogger.info( - // { - // idleCount: replicas.reduce( - // (tot, replica) => tot + replica.pool.idleCount, - // 0, - // ), - // totalCount: replicas.reduce( - // (tot, replica) => tot + replica.pool.totalCount, - // 0, - // ), - // waitingCount: replicas.reduce( - // (tot, replica) => tot + replica.pool.waitingCount, - // 0, - // ), - // primaryIdleCount: primary.pool.idleCount, - // primaryTotalCount: primary.pool.totalCount, - // primaryWaitingCount: primary.pool.waitingCount, - // }, - // 'db pool stats', - // ) - // dbLogger.info( - // { - // runningCount: backgroundQueue.queue.pending, - // waitingCount: backgroundQueue.queue.size, - // }, - // 'background queue stats', - // ) - // }, 10000) + const { db, backgroundQueue } = this.ctx + this.dbStatsInterval = setInterval(() => { + dbLogger.info( + { + idleCount: db.pool.idleCount, + totalCount: db.pool.totalCount, + waitingCount: db.pool.waitingCount, + }, + 'db pool stats', + ) + dbLogger.info( + { + runningCount: backgroundQueue.queue.pending, + waitingCount: backgroundQueue.queue.size, + }, + 'background queue stats', + ) + }, 10000) const server = this.app.listen(this.ctx.cfg.port) this.server = server server.keepAliveTimeout = 90000 @@ -129,10 +122,10 @@ export class OzoneService { return server } - async destroy(opts?: { skipDb: boolean }): Promise { + async destroy(): Promise { await this.terminator?.terminate() await this.ctx.backgroundQueue.destroy() - if (!opts?.skipDb) await this.ctx.db.close() + await this.ctx.db.close() clearInterval(this.dbStatsInterval) } } diff --git a/packages/ozone/src/services/moderation/index.ts b/packages/ozone/src/services/moderation/index.ts index db548c7c6a2..965276ba2b1 100644 --- a/packages/ozone/src/services/moderation/index.ts +++ b/packages/ozone/src/services/moderation/index.ts @@ -403,15 +403,15 @@ export class ModerationService { }): Promise { const { takedownId, did } = info await this.db.db - .insertInto('push_event') + .insertInto('repo_push_event') .values({ - eventType: 'repo_takedown', + eventType: 'takedown', subjectDid: did, takedownId, }) .onConflict((oc) => oc - .columns(['eventType', 'subjectDid']) + .columns(['subjectDid', 'eventType']) .doUpdateSet({ confirmedAt: null, takedownId }), ) .execute() @@ -429,8 +429,8 @@ export class ModerationService { async reverseTakedownRepo(info: { did: string }) { await this.db.db - .updateTable('push_event') - .where('eventType', '=', 'repo_takedown') + .updateTable('repo_push_event') + .where('eventType', '=', 'takedown') .where('subjectDid', '=', info.did) .set({ takedownId: null, confirmedAt: null }) .execute() @@ -445,9 +445,9 @@ export class ModerationService { const did = uri.hostname this.db.assertTransaction() await this.db.db - .insertInto('push_event') + .insertInto('record_push_event') .values({ - eventType: 'record_takedown', + eventType: 'takedown', subjectDid: uri.hostname, subjectUri: uri.toString(), subjectCid: cid.toString(), @@ -455,7 +455,7 @@ export class ModerationService { }) .onConflict((oc) => oc - .columns(['eventType', 'subjectDid', 'subjectUri']) + .columns(['subjectUri', 'eventType']) .doUpdateSet({ confirmedAt: null, takedownId }), ) .execute() @@ -474,8 +474,8 @@ export class ModerationService { async reverseTakedownRecord(info: { uri: AtUri }) { this.db.assertTransaction() await this.db.db - .updateTable('push_event') - .where('eventType', '=', 'record_takedown') + .updateTable('record_push_event') + .where('eventType', '=', 'takedown') .where('subjectDid', '=', info.uri.hostname) .where('subjectUri', '=', info.uri.toString()) .set({ takedownId: null, confirmedAt: null }) @@ -492,10 +492,10 @@ export class ModerationService { if (blobCids.length > 0) { await this.db.db - .insertInto('push_event') + .insertInto('blob_push_event') .values( blobCids.map((cid) => ({ - eventType: 'blob_takedown' as const, + eventType: 'takedown' as const, subjectDid: did, subjectBlobCid: cid.toString(), takedownId, @@ -503,7 +503,7 @@ export class ModerationService { ) .onConflict((oc) => oc - .columns(['eventType', 'subjectDid', 'subjectBlobCid']) + .columns(['subjectDid', 'subjectBlobCid', 'eventType']) .doUpdateSet({ confirmedAt: null, takedownId }), ) .execute() @@ -523,8 +523,8 @@ export class ModerationService { const { did, blobCids } = info if (blobCids.length < 1) return await this.db.db - .updateTable('push_event') - .where('eventType', '=', 'blob_takedown') + .updateTable('blob_push_event') + .where('eventType', '=', 'takedown') .where('subjectDid', '=', did) .where( 'subjectBlobCid', diff --git a/packages/ozone/src/services/moderation/views.ts b/packages/ozone/src/services/moderation/views.ts index 045a2c8c0f4..eda9ecdb1c0 100644 --- a/packages/ozone/src/services/moderation/views.ts +++ b/packages/ozone/src/services/moderation/views.ts @@ -193,25 +193,36 @@ export class ModerationViews { } } - async fetchRecords( - uris: AtUri[], - ): Promise< - Map }> + async fetchRecords(uris: AtUri[]): Promise< + Map< + string, + { + uri: string + cid: string + value: Record + indexedAt: string + } + > > { const fetched = await Promise.all( uris.map((uri) => - this.appviewAgent.api.com.atproto.repo.getRecord({ - repo: uri.hostname, - collection: uri.collection, - rkey: uri.rkey, - }), + this.appviewAgent.api.com.atproto.repo + .getRecord({ + repo: uri.hostname, + collection: uri.collection, + rkey: uri.rkey, + }) + .catch(() => null), ), ) return fetched.reduce((acc, cur) => { + if (!cur) return acc // @TODO fix this up // @ts-ignore - return acc.set(cur.data.uri, { ...cur, cid: cur.cid ?? '' }) - }, new Map }>()) + const data = cur.data + const indexedAt = new Date().toISOString() + return acc.set(data.uri, { ...data, cid: data.cid ?? '', indexedAt }) + }, new Map; indexedAt: string }>()) } async records(uris: AtUri[]): Promise> { @@ -225,17 +236,16 @@ export class ModerationViews { return uris.reduce((acc, uri) => { const repo = repos.get(uri.hostname) - if (!repo) throw new Error(`Record repo is missing: ${uri.toString()}`) + if (!repo) return acc const record = records.get(uri.toString()) - if (!record) throw new Error(`Record is missing`) + if (!record) return acc const subjectStatus = subjectStatuses.get(uri.toString()) return acc.set(uri.toString(), { uri: uri.toString(), cid: record.cid, value: record.value, blobCids: findBlobRefs(record.value).map((blob) => blob.ref.toString()), - // indexedAt: res.indexedAt, - indexedAt: '', // @TODO fix + indexedAt: record.indexedAt, repo, moderation: { subjectStatus: subjectStatus diff --git a/packages/ozone/tests/admin/get-repo.test.ts b/packages/ozone/tests/admin/get-repo.test.ts index 1e95f8cc0fc..74969b87adf 100644 --- a/packages/ozone/tests/admin/get-repo.test.ts +++ b/packages/ozone/tests/admin/get-repo.test.ts @@ -15,6 +15,7 @@ describe('admin get repo view', () => { beforeAll(async () => { network = await TestNetwork.create({ dbPostgresSchema: 'views_admin_get_repo', + ozone: { enabled: true }, }) agent = network.pds.getClient() sc = network.getSeedClient() diff --git a/packages/xrpc/src/client.ts b/packages/xrpc/src/client.ts index 6603345608a..e9e41080dca 100644 --- a/packages/xrpc/src/client.ts +++ b/packages/xrpc/src/client.ts @@ -115,6 +115,9 @@ export class ServiceClient { this.baseClient.lex.assertValidXrpcOutput(methodNsid, res.body) } catch (e: any) { if (e instanceof ValidationError) { + console.log(methodNsid) + console.log(res.body) + console.log('E: ', e) throw new XRPCInvalidResponseError(methodNsid, e, res.body) } else { throw e